content
stringlengths
10
4.9M
Estimation of the phosphorus sorption capacity of acidic soils in Ireland The test for the degree of phosphorus (P) saturation (DPS) of soils is used in northwest Europe to estimate the potential of P loss from soil to water. It expresses the historic sorption of P by soil as a percentage of the soil's P sorption capacity (PSC), which is taken to be α (Alox + Feox), where Alox and Feox are the amounts of aluminium and iron extracted by a single extraction of oxalate. All quantities are measured as mmol kg soil−1, and a value of 0.5 is commonly used for the scaling factor α in this equation. Historic or previously sorbed P is taken to be the quantity of P extracted by oxalate (Pox) so that DPS = Pox/PSC.
<reponame>nphkh/fluid-framework<gh_stars>1-10 // // Generated by the J2ObjC translator. DO NOT EDIT! // source: src-delomboked/com/sponberg/fluid/layout/ModalView.java // #ifndef _FFTModalView_H_ #define _FFTModalView_H_ @class JavaUtilArrayList; @protocol FFTModalView_ModalActionListener; #import "JreEmulation.h" @interface FFTModalView : NSObject { @public JavaUtilArrayList *actionListeners_; NSString *systemId_; id userData_; id fluidData_; id userSelection_; BOOL userCancelable_; NSString *tag_; } - (id)initWithNSString:(NSString *)systemId; - (void)addActionListenerWithFFTModalView_ModalActionListener:(id<FFTModalView_ModalActionListener>)al; - (void)modalCanceled; - (void)modalCompleteWithId:(id)userDataObject; - (NSString *)description; - (JavaUtilArrayList *)getActionListeners; - (NSString *)getSystemId; - (id)getUserData; - (id)getFluidData; - (id)getUserSelection; - (BOOL)isUserCancelable; - (NSString *)getTag; - (void)setActionListenersWithJavaUtilArrayList:(JavaUtilArrayList *)actionListeners; - (void)setUserDataWithId:(id)userData; - (void)setFluidDataWithId:(id)fluidData; - (void)setUserSelectionWithId:(id)userSelection; - (void)setUserCancelableWithBoolean:(BOOL)userCancelable; - (void)setTagWithNSString:(NSString *)tag; - (BOOL)isEqual:(id)o; - (BOOL)canEqualWithId:(id)other; - (NSUInteger)hash; - (void)copyAllFieldsTo:(FFTModalView *)other; @end __attribute__((always_inline)) inline void FFTModalView_init() {} J2OBJC_FIELD_SETTER(FFTModalView, actionListeners_, JavaUtilArrayList *) J2OBJC_FIELD_SETTER(FFTModalView, systemId_, NSString *) J2OBJC_FIELD_SETTER(FFTModalView, userData_, id) J2OBJC_FIELD_SETTER(FFTModalView, fluidData_, id) J2OBJC_FIELD_SETTER(FFTModalView, userSelection_, id) J2OBJC_FIELD_SETTER(FFTModalView, tag_, NSString *) FOUNDATION_EXPORT NSString *FFTModalView_FluidLayout_; J2OBJC_STATIC_FIELD_GETTER(FFTModalView, FluidLayout_, NSString *) FOUNDATION_EXPORT NSString *FFTModalView_FluidLayoutFullScreen_; J2OBJC_STATIC_FIELD_GETTER(FFTModalView, FluidLayoutFullScreen_, NSString *) FOUNDATION_EXPORT NSString *FFTModalView_ImagePicker_; J2OBJC_STATIC_FIELD_GETTER(FFTModalView, ImagePicker_, NSString *) FOUNDATION_EXPORT NSString *FFTModalView_Confirmation_; J2OBJC_STATIC_FIELD_GETTER(FFTModalView, Confirmation_, NSString *) FOUNDATION_EXPORT NSString *FFTModalView_WaitingDialog_; J2OBJC_STATIC_FIELD_GETTER(FFTModalView, WaitingDialog_, NSString *) FOUNDATION_EXPORT NSString *FFTModalView_Custom_; J2OBJC_STATIC_FIELD_GETTER(FFTModalView, Custom_, NSString *) typedef FFTModalView ComSponbergFluidLayoutModalView; @protocol FFTModalView_ModalActionListener < NSObject, JavaObject > - (void)modalCompleteWithId:(id)userData; - (void)modalCanceled; @end __attribute__((always_inline)) inline void FFTModalView_ModalActionListener_init() {} @interface FFTModalView_ModalActionListenerAdapter : NSObject < FFTModalView_ModalActionListener > { } - (void)modalCompleteWithId:(id)userData; - (void)modalCanceled; - (id)init; @end __attribute__((always_inline)) inline void FFTModalView_ModalActionListenerAdapter_init() {} @interface FFTModalView_ModalViewConfirmation : NSObject { @public NSString *title_; NSString *message_; NSString *ok_; NSString *cancel_; } - (NSString *)description; - (NSString *)getTitle; - (NSString *)getMessage; - (NSString *)getOk; - (NSString *)getCancel; - (void)setTitleWithNSString:(NSString *)title; - (void)setMessageWithNSString:(NSString *)message; - (void)setOkWithNSString:(NSString *)ok; - (void)setCancelWithNSString:(NSString *)cancel; - (id)init; - (void)copyAllFieldsTo:(FFTModalView_ModalViewConfirmation *)other; @end __attribute__((always_inline)) inline void FFTModalView_ModalViewConfirmation_init() {} J2OBJC_FIELD_SETTER(FFTModalView_ModalViewConfirmation, title_, NSString *) J2OBJC_FIELD_SETTER(FFTModalView_ModalViewConfirmation, message_, NSString *) J2OBJC_FIELD_SETTER(FFTModalView_ModalViewConfirmation, ok_, NSString *) J2OBJC_FIELD_SETTER(FFTModalView_ModalViewConfirmation, cancel_, NSString *) @interface FFTModalView_ModalViewWaitingDialog : NSObject { @public NSString *title_; NSString *message_; } - (NSString *)description; - (NSString *)getTitle; - (NSString *)getMessage; - (void)setTitleWithNSString:(NSString *)title; - (void)setMessageWithNSString:(NSString *)message; - (id)init; - (void)copyAllFieldsTo:(FFTModalView_ModalViewWaitingDialog *)other; @end __attribute__((always_inline)) inline void FFTModalView_ModalViewWaitingDialog_init() {} J2OBJC_FIELD_SETTER(FFTModalView_ModalViewWaitingDialog, title_, NSString *) J2OBJC_FIELD_SETTER(FFTModalView_ModalViewWaitingDialog, message_, NSString *) #endif // _FFTModalView_H_
// MakeResource creates a wrapper for a resource. func MakeResource(name corev1.ResourceName) *ResourceWrapper { return &ResourceWrapper{kueue.Resource{ Name: name, }} }
<reponame>Xaviervu/typicodeNoLibs package ru.vegax.xavier.a3test.data_loader; public interface LoaderCallback { void notifyDataLoaded(); void notifyError(String e); }
def degree(self): degs = {sum(spx.dimension for spx in k) for k in self.keys()} if len(degs) != 1: return None return degs.pop()
package org.openmrs.module.nemrapps.fragment; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.ui.framework.fragment.FragmentRequest; import org.openmrs.ui.framework.fragment.FragmentRequestMapper; import org.springframework.stereotype.Component; @Component public class NemrFragmentRequestMapper implements FragmentRequestMapper { protected final Log log = LogFactory.getLog(getClass()); /** * Implementations should call {@link FragmentRequest#setProviderNameOverride(String)} and * {@link FragmentRequest#setFragmentIdOverride(String)}, and return true if they want to remap * a request, or return false if they didn't remap it. * * @param request may have its providerNameOverride and fragmentNameOverride set * @return true if this page was mapped (by overriding the provider and/or frgament), false * otherwise */ public boolean mapRequest(FragmentRequest request) { log.info("Incoming: " + request); if (request.getProviderName().equals("coreapps")) { if (request.getFragmentId().equals("contactInfo")) { request.setProviderNameOverride("referenceapplication.registrationapp.nigeria"); log.info("Mapped To: " + request); return true; } } if (request.getProviderName().equals("coreapps")) { if (request.getFragmentId().equals("patientdashboard/contactInfoInline")) { request.setProviderNameOverride("nemrapps"); log.info("Mapped To: " + request); return true; } } if (request.getProviderName().equals("coreapps")) { if (request.getFragmentId().equals("patientHeader")) { request.setProviderNameOverride("nemrapps"); log.info("Mapped To: " + request); return true; } } return false; } }
/** * @author David Insley */ public class HerbloreAction extends Action<Player> { private final HerbloreRecipe recipe; private final int amount; private boolean started; private int count; public HerbloreAction(Player player, HerbloreRecipe recipe, int amount) { super(player, recipe.getDelay(), true); this.recipe = recipe; this.amount = amount; } @Override public void execute() { if (!started) { mob.getWalkingQueue().reset(); } if (!recipe.getRequirements().hasRequirementsDisplayOne(mob)) { stop(); return; } recipe.getRequirements().fulfillAll(mob); mob.getInventory().add(new Item(recipe.getProduct())); mob.getSkillSet().addExperience(Skill.HERBLORE, recipe.getXp()); mob.sendMessage(recipe.getMessage()); if (recipe.getAnimation() != null) { mob.playAnimation(recipe.getAnimation()); } if (++count == amount) { stop(); } } }
package three; public class AccountTest { public static void main(String[] args) { Account account = new Account(); account.deposit(10000); try { account.withdraw(30000); } catch(BalanceException e) { e.printStackTrace(); } } }
/** * @author Mamadou Lamine NIANG **/ @Component @ConditionalOnBean(ObjectMapper.class) public class JsonMapper { private final ObjectMapper objectMapper; public JsonMapper(ObjectMapper objectMapper) { this.objectMapper = objectMapper; } public <T> T getObject(JsonNode node, Class<T> tClass) throws ZabbixApiException { try { return objectMapper.treeToValue(node, tClass); } catch (JsonProcessingException e) { throw new ZabbixApiException("Error converting value to Object", e); } } public <T> List<T> getList(JsonNode node, Class<T> tClass) throws ZabbixApiException { JavaType type = objectMapper.getTypeFactory().constructCollectionType(List.class, tClass); JsonParser jsonParser = node.traverse(); try { MappingIterator<List<T>> mappingIterator = objectMapper.readValues(jsonParser, type); if(mappingIterator.hasNext()) { return mappingIterator.next(); } else { return new ArrayList<>(); } } catch (IOException e) { throw new ZabbixApiException("Error converting value to List", e); } } public <T> List<T> getList(JsonNode node, String fieldName, Class<T> tClass) throws ZabbixApiException { JsonNode innerNode = node.findValue(fieldName); if(innerNode == null) { throw new ZabbixApiException(String.format("Node '%s' not found", fieldName)); } return getList(innerNode, tClass); } }
def fapi(self, f): @wraps(f) def decorated_function(*args, **kwargs): if FAPI_SESSION_KEY in session: flask.g.fapi = self.get_fapi_session() return flask.make_response(f(*args, **kwargs)) else: session[REDIRECT_AFTER_LOGIN_KEY] = request.url return redirect(request.root_url + self.login_path) return decorated_function
/** * Adds the specified columns which will not be serialized. * * @param columns the columns */ public ResultMetadata addNonSerializedColumns(Collection<? extends ColumnSpecification> columns) { names.addAll(columns); return this; }
def translate_path(basedir, uripath): assert os.path.isabs(basedir) path, trailing_slash, _ = url_collapse_path(uripath) words = ( w for w in path.split('/') if w and not os.path.dirname(w) and w not in (os.curdir, os.pardir) ) path = os.path.join(basedir, *words) if trailing_slash: path += os.path.sep return path
import { assert } from "chai"; import { canToggleState } from "../../../src/common/entity/can_toggle_state"; describe("canToggleState", () => { const hass: any = { services: { light: { turn_on: null, // Service keys only need to be present for test turn_off: null, }, }, }; it("Detects lights toggle", () => { const stateObj: any = { entity_id: "light.bla", state: "on", }; assert.isTrue(canToggleState(hass, stateObj)); }); it("Detects group with toggle", () => { const stateObj: any = { entity_id: "group.bla", state: "on", }; assert.isTrue(canToggleState(hass, stateObj)); }); it("Detects group without toggle", () => { const stateObj: any = { entity_id: "group.devices", state: "home", }; assert.isFalse(canToggleState(hass, stateObj)); }); it("Detects climate with toggle", () => { const stateObj: any = { entity_id: "climate.bla", attributes: { supported_features: 4096, }, }; assert.isTrue(canToggleState(hass, stateObj)); }); it("Detects climate without toggle", () => { const stateObj: any = { entity_id: "climate.bla", attributes: { supported_features: 0, }, }; assert.isFalse(canToggleState(hass, stateObj)); }); });
Expectant parents' emotions evoked by pregnancy: A longitudinal dyadic analysis of couples in the Swedish Pregnancy Panel. RATIONALE Holistic antenatal care requires knowledge of individuals' emotional response to pregnancy. Little is known about how a pregnant woman and her partner influence each other emotionally during a pregnancy. OBJECTIVE This study examines six discrete emotions that expectant couples experience during pregnancy, how these emotions change mid-to late-pregnancy, and whether the partners' emotional responses influence each other. METHODS A longitudinal dyadic study where pregnant women and their partners (1432 couples) rated the extent to which the pregnancy evoked joy, strength, security, worry, shame, and anger at pregnancy week 12-19, 22-24, and 36. Latent curve models with structured residuals identify levels of and change in these emotions over time, while accounting for between- and within-couple variance. RESULTS Pregnancy evoked mainly joy, strength, security, and worry, and lower levels of anger and shame. Pregnant women and partners felt similar levels of joy, strength, and security, but pregnant women felt more worry, shame, and anger. There was little to no mean-level change in all six measured emotions evoked by pregnancy (between-couple change), and no reciprocal effects between the partners (within-couple change). CONCLUSIONS Emotions in mid-pregnancy were also felt in late pregnancy. Furthermore, the pregnant woman and her partner have individual emotional trajectories. The results can assist healthcare professionals and researchers target interventions to expectant mothers and partners, specifically by understanding emotional response to pregnancy as a stable confound and by not approaching the couple as one emotional unit.
<reponame>openharmony-gitee-mirror/ace_ace_engine /* * Copyright (c) 2021 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "frameworks/core/components/svg/flutter_render_svg_filter.h" #include "frameworks/core/components/svg/flutter_render_svg_fe_colormatrix.h" #include "frameworks/core/components/svg/flutter_render_svg_fe_composite.h" #include "frameworks/core/components/svg/flutter_render_svg_fe_gaussianblur.h" #include "frameworks/core/components/svg/flutter_render_svg_fe_offset.h" #include "third_party/skia/include/effects/SkColorFilterImageFilter.h" #include "third_party/skia/include/effects/SkColorMatrix.h" namespace OHOS::Ace { RefPtr<RenderNode> RenderSvgFilter::Create() { return AceType::MakeRefPtr<FlutterRenderSvgFilter>(); } void FlutterRenderSvgFilter::Paint(RenderContext& context, const Offset& offset) { return; } const SkPaint FlutterRenderSvgFilter::OnAsPaint() { SkPaint skPaint; skPaint.setAntiAlias(true); sk_sp<SkImageFilter> imageFilter = nullptr; ColorInterpolationType currentColor = ColorInterpolationType::SRGB; for (const auto& item : GetChildren()) { GetImageFilter(AceType::DynamicCast<RenderSvgFe>(item), imageFilter, currentColor); } ConverImageFilterColor(imageFilter, currentColor, ColorInterpolationType::SRGB); skPaint.setImageFilter(imageFilter); return skPaint; } void FlutterRenderSvgFilter::GetImageFilter( const RefPtr<RenderSvgFe>& fe, sk_sp<SkImageFilter>& imageFilter, ColorInterpolationType& currentColor) { if (!fe) { return; } ColorInterpolationType srcColor = currentColor; InitFilterColor(fe, currentColor); auto feComposite = AceType::DynamicCast<FlutterRenderSvgFeComposite>(fe); if (feComposite) { auto foreImageFilter = MakeImageFilter(feComposite->GetInType(), imageFilter); auto backImageFilter = MakeImageFilter(feComposite->GetIn2Type(), imageFilter); ConverImageFilterColor(foreImageFilter, srcColor, currentColor); ConverImageFilterColor(backImageFilter, srcColor, currentColor); feComposite->OnAsImageFilter(backImageFilter, foreImageFilter, imageFilter); ConverImageFilterColor(imageFilter, srcColor, currentColor); return; } imageFilter = MakeImageFilter(fe->GetInType(), imageFilter); auto feOffset = AceType::DynamicCast<FlutterRenderSvgFeOffset>(fe); if (feOffset) { feOffset->OnAsImageFilter(imageFilter); ConverImageFilterColor(imageFilter, srcColor, currentColor); return; } auto feColorMatrix = AceType::DynamicCast<FlutterRenderSvgFeColorMatrix>(fe); if (feColorMatrix) { feColorMatrix->OnAsImageFilter(imageFilter); ConverImageFilterColor(imageFilter, srcColor, currentColor); return; } auto feGaussianBlur = AceType::DynamicCast<FlutterRenderSvgFeGaussianBlur>(fe); if (feGaussianBlur) { feGaussianBlur->OnAsImageFilter(imageFilter); ConverImageFilterColor(imageFilter, srcColor, currentColor); return; } currentColor = srcColor; } void FlutterRenderSvgFilter::InitFilterColor(const RefPtr<RenderSvgFe>& fe, ColorInterpolationType& currentColor) { if (!fe) { return; } if (fe->GetInType() == FeInType::SOURCE_GRAPHIC) { currentColor = ColorInterpolationType::SRGB; } else { currentColor = fe->GetColorType(); } } sk_sp<SkImageFilter> FlutterRenderSvgFilter::MakeImageFilter(const FeInType& in, sk_sp<SkImageFilter>& imageFilter) { switch (in) { case FeInType::SOURCE_GRAPHIC: return nullptr; case FeInType::SOURCE_ALPHA: SkColorMatrix m; m.setScale(0, 0, 0, 1.0f); #ifdef USE_SYSTEM_SKIA return SkColorFilterImageFilter::Make(SkColorFilter::MakeMatrixFilterRowMajor255(m.fMat), nullptr); #else return SkColorFilterImageFilter::Make(SkColorFilters::Matrix(m), nullptr); #endif case FeInType::BACKGROUND_IMAGE: break; case FeInType::BACKGROUND_ALPHA: break; case FeInType::FILL_PAINT: break; case FeInType::STROKE_PAINT: break; case FeInType::PRIMITIVE: break; default: break; } return imageFilter; } void FlutterRenderSvgFilter::ConverImageFilterColor( sk_sp<SkImageFilter>& imageFilter, const ColorInterpolationType& src, const ColorInterpolationType& dst) { if (dst == ColorInterpolationType::LINEAR_RGB && src == ColorInterpolationType::SRGB) { #ifdef USE_SYSTEM_SKIA imageFilter = SkColorFilterImageFilter::Make(SkColorFilter::MakeSRGBToLinearGamma(), imageFilter); #else imageFilter = SkColorFilterImageFilter::Make(SkColorFilters::SRGBToLinearGamma(), imageFilter); #endif } else if (dst == ColorInterpolationType::SRGB && src == ColorInterpolationType::LINEAR_RGB) { #ifdef USE_SYSTEM_SKIA imageFilter = SkColorFilterImageFilter::Make(SkColorFilter::MakeLinearToSRGBGamma(), imageFilter); #else imageFilter = SkColorFilterImageFilter::Make(SkColorFilters::LinearToSRGBGamma(), imageFilter); #endif } } } // namespace OHOS::Ace
n,p,w,d=input().split() n=eval(n) p=eval(p) w=eval(w) d=eval(d) u=0 for y in range(100000): if y*d>p: break if (p-y*d)%w==0: x=(p-y*d)/w if x+y<=n: u=1 print("%d %d %d" % (x,y,n-x-y)) break if u==0: print("-1")
import threading import numpy as np import pandas as pd from time_series_transform.stock_transform.base import * from time_series_transform.stock_transform.stock_engine._investing import investing from time_series_transform.stock_transform.stock_engine._yahoo_stock import yahoo_stock from datetime import date, timedelta class Stock_Extractor(object): def __init__(self,symbol,engine, *args, **kwargs): """ Stock_Extractor extracts data of the given symbol using the selected engine For investing engine: country is required. for example, Stock_Extractor('aapl','investing', country = 'united states') Parameters ---------- symbol : str symbol of the stock engine : str engine used for data extraction """ self.client = self._get_extractor(engine)(symbol, *args, **kwargs) self.symbol = symbol self.stock = None def _get_extractor(self,engine): engineDict = { 'yahoo': yahoo_stock, 'investing': investing } return engineDict[engine] def get_period(self,period): """ get_period extracts the stock data of the selected period Parameters ---------- period : str period of the data for example, 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max Returns ------- stock data The stock data of selected period """ data = self.client.getHistoricalByPeriod(period) data = pd.DataFrame(data.to_records()) data['Date'] = data.Date.astype(str) additionalInfo = self.client.getAdditionalInfo() self.stock = Stock( data, time_index='Date', symbol=self.symbol ) return self.stock def get_date(self,start_date,end_date): """ get_period extracts the stock data of the selected period Parameters ---------- start_date : str start of the data format: "%Y-%m-%d", eg "2020-02-20" end_date : str end of the data Returns ------- stock data The stock data of selected period """ data = self.client.getHistoricalByRange(start_date,end_date) data = pd.DataFrame(data.to_records()) data['Date'] = data.Date.astype(str) additionalInfo = self.client.getAdditionalInfo() self.stock = Stock( data, time_index='Date', symbol = self.symbol ) return self.stock def get_intra_day(self,start_date,end_date,interval = '1m'): """ get_intra_day extracts the intraday stock data of the selected period Parameters ---------- start_date : str start of the data format: "%Y-%m-%d", eg "2020-02-20" end_date : str end of the data interval : str interval of the data Valid intervals: [1m, 2m, 5m, 15m, 30m, 60m, 90m, 1h] Returns ------- stock data The stock data of selected period """ data = self.client.getIntraDayData(start_date,end_date,interval) data = pd.DataFrame(data.to_records()) data['Datetime'] = data.Datetime.astype(str) self.stock= Stock( data, time_index = 'Datetime', symbol = self.symbol ) return self.stock class Portfolio_Extractor(object): def __init__(self,symbolList,engine, *args, **kwargs): """ Portfolio_Extractor extracts data of the given symbolList using the selected engine Parameters ---------- symbolList : list list of symbol engine : str engine used for data extraction """ self.engine = engine self.symbolList = symbolList self.portfolio = None self.args = args self.kwargs = kwargs def get_period(self,period, n_threads= 8): """ get_period extracts the list of stock by the given period Parameters ---------- period : str period of the data for example, 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max n_threads : int number of thread of multi-thread processing Returns ------- portfolio portfolio data of the given stock list """ stockList = self._get_stock_list_multi(n_threads,'get_period', [period]) self.portfolio = Portfolio( stockList, time_index='Date', symbolIx='symbol' ) return self.portfolio def get_date(self,start_date, end_date, n_threads = 8): """ get_portfolio_date extracts the list of stock by the date period Parameters ---------- start_date : str start of the data format: "%Y-%m-%d", eg "2020-02-20" end_date : str end of the data n_threads : int number of thread of multi-thread processing Returns ------- portfolio portfolio data of the given stock list """ stockList = self._get_stock_list_multi(n_threads,'get_date', [start_date, end_date]) self.portfolio = Portfolio( stockList, time_index='Date', symbolIx='symbol' ) return self.portfolio def get_intra_day(self,start_date, end_date, interval = '1m', n_threads = 8): """ get_intra_day extracts the intraday data of the list of stock data by the date period Parameters ---------- start_date : str start of the data format: "%Y-%m-%d", eg "2020-02-20" end_date : str end of the data interval : str interval of the data Valid intervals: [1m, 2m, 5m, 15m, 30m, 60m, 90m, 1h] n_threads : int number of thread of multi-thread processing Returns ------- portfolio portfolio data of the given stock list """ stockList = self._get_stock_list_multi(n_threads,'get_intra_day', [start_date, end_date, interval]) self.portfolio = Portfolio( stockList, time_index='Datetime', symbolIx='symbol' ) return self.portfolio def _get_stock_list_multi(self, n_threads, func, time_val): stockList = [] tasks = [] if len(self.symbolList) < n_threads: n_threads = len(self.symbolList) bins = np.array_split(self.symbolList, n_threads) for bn in bins: thread = threading.Thread(target=self._get_stock_data, args= [stockList, bn, func, time_val]) tasks.append(thread) thread.start() for task in tasks: task.join() stockDict = {} for i in stockList: stockDict.update(i) return stockDict def _get_stock_data(self, stockList, symbolList, func, time_val, *args, **kwargs): for i in range(len(symbolList)): symbol = symbolList[i] if self.engine == "investing": if 'country' not in self.kwargs: raise ValueError("Country must be included while using the investing engine") country = self.kwargs['country'][i] stock_data = Stock_Extractor(symbol, self.engine, *self.args, country = country) else: stock_data = Stock_Extractor(symbol, self.engine, *self.args, **self.kwargs) extract_func = getattr(stock_data,func) stock_data = extract_func(*time_val) stockList.append({symbol:stock_data}) return stockList
/* Copyright 2017-2019 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "lullaby/tools/anim_pipeline/animation.h" #include <cmath> #include <queue> #include "lullaby/util/fixed_string.h" #include "lullaby/util/logging.h" #include "lullaby/util/math.h" #include "lullaby/tools/common/file_utils.h" namespace lull { namespace tool { namespace { // Extracts the quaternion rotation from the transform matrix `m` using `scale` // as the pre-computed scale component of the `m`. mathfu::quat ExtractQuaternion(const mathfu::mat4 m, const mathfu::vec3& scale) { // The math here must undo the math in mathfu::mat4::Transform(). const mathfu::vec3 inv_scale = mathfu::kOnes3f / scale; const mathfu::mat3 rot( m(0, 0) * inv_scale.x, m(1, 0) * inv_scale.x, m(2, 0) * inv_scale.x, m(0, 1) * inv_scale.y, m(1, 1) * inv_scale.y, m(2, 1) * inv_scale.y, m(0, 2) * inv_scale.z, m(1, 2) * inv_scale.z, m(2, 2) * inv_scale.z); return mathfu::quat::FromMatrix(rot).Normalized(); } } // namespace using LogString = FixedString<512>; // Use these bitfields to find situations where scale x, y, and z occur, in // any order, in a row. static const uint32_t kScaleXBitfield = 1 << motive::kScaleX; static const uint32_t kScaleYBitfield = 1 << motive::kScaleY; static const uint32_t kScaleZBitfield = 1 << motive::kScaleZ; static const uint32_t kScaleXyzBitfield = kScaleXBitfield | kScaleYBitfield | kScaleZBitfield; Animation::Animation(std::string name, const Tolerances& tolerances, bool sqt_anims) : name_(std::move(name)), tolerances_(tolerances), cur_bone_index_(-1), sqt_anims_(sqt_anims) {} int Animation::RegisterBone(const char* bone_name, int parent_bone_index) { const int bone_index = static_cast<int>(bones_.size()); bones_.emplace_back(bone_name, parent_bone_index); return bone_index; } FlatChannelId Animation::AllocChannel(int bone_index, motive::MatrixOperationType op, motive::MatrixOpId id) { assert(bone_index < bones_.size()); cur_bone_index_ = bone_index; Channels& channels = CurChannels(); channels.emplace_back(id, op); return static_cast<FlatChannelId>(channels.size() - 1); } void Animation::AddConstant(FlatChannelId channel_id, float const_val) { Channels& channels = CurChannels(); Nodes& n = channels[channel_id].nodes; n.resize(0); n.emplace_back(0, const_val, 0.0f); } void Animation::AddCurve(FlatChannelId channel_id, const AnimCurve& curve) { AddCurve(channel_id, curve.times.data(), curve.values.data(), curve.derivatives.data(), curve.times.size()); } void Animation::AddCurve(FlatChannelId channel_id, const float* times, const float* vals, const float* derivatives, size_t count) { // Break the curve down into segments and process them depth-first so that the // resulting nodes are in chronological order. std::vector<CurveSegment> segments; segments.emplace_back(times, vals, derivatives, count); while (!segments.empty()) { CurveSegment s = segments.back(); segments.pop_back(); // Create cubic that covers the entire range from time_start ~ time_end. // The cubic `c` is shifted to the left, to start at 0 instead of // time_start. // This is to maintain floating-point precision. const float time_start = s.times[0]; const float time_end = s.times[s.count - 1]; const float time_width = static_cast<float>(time_end - time_start); const motive::CubicCurve c( motive::CubicInit(s.vals[0], s.derivatives[0], s.vals[s.count - 1], s.derivatives[s.count - 1], time_width)); // Find the worst intermediate val in for this cubic. // That is, the index into `s.vals` where the cubic evaluation is most // inaccurate. float worst_diff = 0.0f; size_t worst_idx = 0; for (size_t i = 1; i < s.count - 1; ++i) { const float cubic_val = c.Evaluate(s.times[i] - time_start); const float curve_val = s.vals[i]; const float diff_val = fabs(cubic_val - curve_val); if (diff_val > worst_diff) { worst_idx = i; worst_diff = diff_val; } } // If the cubic is off by a lot, divide the curve into two curves at the // worst time. Note that the recursion will end, at worst, when // s.count ==> 2. const float tolerance = Tolerance(channel_id); if (worst_idx > 0 && worst_diff > tolerance) { // Push the "end" segment on first so that the "start" segment is // processed first, resulting in a depth-first search. segments.emplace_back(&s.times[worst_idx], &s.vals[worst_idx], &s.derivatives[worst_idx], s.count - worst_idx); segments.emplace_back(s.times, s.vals, s.derivatives, worst_idx + 1); continue; } // Otherwise, the generated cubic is good enough, so record it. const SplineNode start_node(time_start, s.vals[0], s.derivatives[0]); const SplineNode end_node(time_end, s.vals[s.count - 1], s.derivatives[s.count - 1]); // Only push the start node if it differs from the previously pushed end // node. Most of the time it will be the same. Channels& channels = CurChannels(); Nodes& n = channels[channel_id].nodes; const bool start_matches_prev = !n.empty() && n.back() == start_node; if (!start_matches_prev) { n.push_back(start_node); } n.push_back(end_node); } } size_t Animation::NumNodes(FlatChannelId channel_id) const { const Channels& channels = CurChannels(); const Nodes& n = channels[channel_id].nodes; return n.size(); } void Animation::PruneNodes(FlatChannelId channel_id) { const float tolerance = Tolerance(channel_id); // For every node try to prune as many redunant nodes that come after it. // A node is redundant if the spline evaluates to the same value even if // it doesn't exists (note: here "same value" means within `tolerances_`). Channels& channels = CurChannels(); Nodes& n = channels[channel_id].nodes; std::vector<bool> prune(n.size(), false); for (size_t i = 0; i < n.size();) { size_t next_i = i + 1; for (size_t j = i + 2; j < n.size(); ++j) { const bool redundant = IntermediateNodesRedundant(&n[i], j - i + 1, tolerance); if (redundant) { prune[j - 1] = true; next_i = j; } } i = next_i; } // Compact to remove all pruned nodes. size_t write = 0; for (size_t read = 0; read < n.size(); ++read) { if (prune[read]) continue; if (write < read) { n[write] = n[read]; } write++; } n.resize(write); // If value is constant for the entire time, remove the second node so that // we know to output a constant value in `OutputFlatBuffer()`. const bool is_const = n.size() == 2 && fabs(n[0].val - n[1].val) < tolerance && fabs(DerivativeAngle(n[0].derivative)) < tolerances_.derivative_angle && fabs(DerivativeAngle(n[1].derivative)) < tolerances_.derivative_angle; if (is_const) { n.resize(1); } } void Animation::PruneChannels(bool no_uniform_scale) { for (auto bone = bones_.begin(); bone != bones_.end(); ++bone) { // Iterate from the end to minimize the cost of the erase operations. Channels& channels = bone->channels; for (FlatChannelId ch = static_cast<FlatChannelId>(channels.size() - 1); ch >= 0; ch--) { // Collapse kScaleX,Y,Z into kScaleUniformly. const bool uniform_scale = (!no_uniform_scale && UniformScaleChannels(channels, ch)); if (uniform_scale) { // log_.Log(fplutil::kLogVerbose, // " Collapsing scale x, y, z channels %d~%d into" // " one scale-uniformly channel\n", // ch, ch + 2); // Ids values are in consecutive order // scale-X id, scale-Y id, scale-Z id, scale-uniformly id // the same as op values are in consecutive order // kScaleX, kScaleY, kScaleZ, kScaleUniformly // but with a different initial value. // // So to convert from scale-? id to scale-uniformly id, we add on // the difference kScaleUniformly - kScale?. channels[ch].id += motive::kScaleUniformly - static_cast<motive::MatrixOpId>(channels[ch].op); channels[ch].op = motive::kScaleUniformly; channels.erase(channels.begin() + (ch + 1), channels.begin() + (ch + 3)); } // Sum together channels that are adjacent, or separated only by // independent ops. const FlatChannelId summable_ch = SummableChannel(channels, ch); if (summable_ch >= 0) { // log_.Log(fplutil::kLogVerbose, " Summing %s channels %d and %d\n", // motive::MatrixOpName(channels[ch].op), ch, summable_ch); SumChannels(channels, ch, summable_ch); channels.erase(channels.begin() + summable_ch); } // Remove constant channels that have the default value. // Most of the time these won't be created, but it's possible that // of the collapse operations above (especially summing) will create // this situation. if (channels[ch].nodes.size() == 1 && IsDefaultValue(channels[ch].op, channels[ch].nodes[0].val)) { // log_.Log(fplutil::kLogVerbose, " Omitting constant %s channel %d\n", // motive::MatrixOpName(channels[ch].op), ch); channels.erase(channels.begin() + ch); } } // Ensure that the channels remain in accending order of id. std::sort(channels.begin(), channels.end(), [](const AnimChannel& lhs, const AnimChannel& rhs) { return lhs.id < rhs.id; }); } } void Animation::ShiftTime(int time_offset) { if (time_offset == 0) return; // log_.Log(fplutil::kLogImportant, "Shifting animation by %d ticks.\n", // time_offset); for (auto bone = bones_.begin(); bone != bones_.end(); ++bone) { for (auto ch = bone->channels.begin(); ch != bone->channels.end(); ++ch) { for (auto n = ch->nodes.begin(); n != ch->nodes.end(); ++n) { n->time += time_offset; } } } } void Animation::ExtendChannelsToTime(int end_time) { for (auto bone = bones_.begin(); bone != bones_.end(); ++bone) { Channels& channels = bone->channels; for (auto ch = channels.begin(); ch != channels.end(); ++ch) { Nodes& n = ch->nodes; // Ignore empty or constant channels. if (n.size() <= 1) continue; // Ignore channels that are already long enough. const SplineNode back = n.back(); if (back.time >= end_time) continue; // Append a point with 0 derivative at the back, if required. // This ensures that the extra segment is a flat line. if (back.derivative != 0) { n.push_back(SplineNode(back.time, back.val, 0.0f)); } // Append a point at the end time, also with 0 derivative. n.push_back(SplineNode(end_time, back.val, 0.0f)); } } } void Animation::BakeSqtAnimations() { if (cur_bone_index_ == -1) { return; } Channels& channels = CurChannels(); if (channels.empty()) { return; } // Compute start and end times for this bone. const int start_time = bones_[cur_bone_index_].MinAnimatedTime(); const int end_time = bones_[cur_bone_index_].MaxAnimatedTime(); // Determine the sample rate and required number of samples. float sample_rate = 0.f; int num_samples = 1; // Length 0 animations only need a single sample. if (start_time != end_time) { // Otherwise, compute the duration and determine the number of samples // required at 120hz. const float duration = static_cast<float>(end_time - start_time); const float ideal_sample_rate = 1000.f / 120.f; num_samples = std::ceil(duration / ideal_sample_rate); // Ensure the actual sample rate will place the last node exactly on // `end_time`. Increment the number of samples so `end_time` is included. sample_rate = duration / num_samples; ++num_samples; } // Store over-sampled translation, rotation, and scale curves. AnimCurve curves[10] = {AnimCurve(motive::kTranslateX, num_samples), AnimCurve(motive::kTranslateY, num_samples), AnimCurve(motive::kTranslateZ, num_samples), AnimCurve(motive::kQuaternionW, num_samples), AnimCurve(motive::kQuaternionX, num_samples), AnimCurve(motive::kQuaternionY, num_samples), AnimCurve(motive::kQuaternionZ, num_samples), AnimCurve(motive::kScaleX, num_samples), AnimCurve(motive::kScaleY, num_samples), AnimCurve(motive::kScaleZ, num_samples)}; // Track the previous quaternion used to that neighboring quaternions lie in // the same 4-dimensional hemisphere since both q and -q represent the same // orientation. mathfu::quat last_rotation = mathfu::quat::identity; // Take the designated number of curve samples. float time = start_time; for (int i = 0; i < num_samples; ++i) { // Get a list of matrix operations to apply at this time. std::vector<motive::MatrixOperation> ops; for (auto ch = channels.begin(); ch != channels.end(); ++ch) { // If the time is outside the curve, insert an operation with the first // or last value. const SplineNode& front = ch->nodes.front(); if (time <= static_cast<float>(front.time)) { ops.emplace_back(motive::MatrixOperationInit(ch->id, ch->op, front.val), nullptr); continue; } const SplineNode& back = ch->nodes.back(); if (time >= static_cast<float>(back.time)) { ops.emplace_back(motive::MatrixOperationInit(ch->id, ch->op, back.val), nullptr); continue; } // Otherwise find where the time is within the curve. for (int j = 0; j < ch->nodes.size() - 1; ++j) { const SplineNode& start_node = ch->nodes[j]; // Insert nearly-exact matches. if (AreNearlyEqual(start_node.time, time, 1e-5f)) { ops.emplace_back( motive::MatrixOperationInit(ch->id, ch->op, start_node.val), nullptr); break; } else if (static_cast<float>(start_node.time) < time) { // Ensure that `start_node` is the closest node before `time`. const SplineNode& end_node = ch->nodes[j + 1]; if (static_cast<float>(end_node.time) < time) { continue; } // Create a cubic that covers the range between `start_node` and // `end_node`. The cubic `c` is shifted to the left, to start at 0 // instead of `start_node.time`, to maintain floating-point // precision. const float time_width = static_cast<float>(end_node.time - start_node.time); const motive::CubicCurve c( motive::CubicInit(start_node.val, start_node.derivative, end_node.val, end_node.derivative, time_width)); // Shift the time of this sample into the cubic above, but cast it to // an integer since curve samples aren't floating point values and we // want the samples to be as accurate as possible const int shifted_time = time - static_cast<float>(start_node.time); ops.emplace_back(motive::MatrixOperationInit( ch->id, ch->op, c.Evaluate(shifted_time)), nullptr); break; } } } // Evaluate the transform matrix using Motive's function to maintain // consistency with how this frame *would* be computed at runtime. mathfu::vec3 scale; const mathfu::mat4 value = motive::MatrixOperation::CalculateResultMatrix( ops.data(), ops.size(), &scale); const mathfu::vec3 translation = value.TranslationVector3D(); mathfu::quat rotation = ExtractQuaternion(value, scale); // Flip the quaternion if it lies in the opposite 4-dimensional hemisphere // as the previous quaternion to avoid huge changes in the individual // in subsequent component nodes. float dotprod = mathfu::quat::DotProduct(last_rotation, rotation); if (dotprod < 0.f) { rotation.set_scalar(-rotation.scalar()); rotation.set_vector(-rotation.vector()); } last_rotation = rotation; // Add new nodes to the sampled curves. curves[0].AddNode(time, translation.x); curves[1].AddNode(time, translation.y); curves[2].AddNode(time, translation.z); const mathfu::vec3 rotation_vector = rotation.vector(); curves[3].AddNode(time, rotation.scalar()); curves[4].AddNode(time, rotation_vector[0]); curves[5].AddNode(time, rotation_vector[1]); curves[6].AddNode(time, rotation_vector[2]); curves[7].AddNode(time, scale.x); curves[8].AddNode(time, scale.y); curves[9].AddNode(time, scale.z); // Update the sample time. If on the last sample, use exactly the end time. time += sample_rate; if (i == num_samples - 1) { time = end_time; } } // Re-allocate channels now that all the new curve data is prepared. channels.clear(); for (int i = 0; i < 10; ++i) { curves[i].GenerateDerivatives(); FlatChannelId id = AllocChannel(cur_bone_index_, curves[i].type, i); AddCurve(id, curves[i]); PruneNodes(id); } } float Animation::ToleranceForOp(motive::MatrixOperationType op) const { if (motive::RotateOp(op)) { return tolerances_.rotate; } else if (motive::TranslateOp(op)) { return tolerances_.translate; } else if (motive::ScaleOp(op)) { return tolerances_.scale; } else if (motive::QuaternionOp(op)) { return tolerances_.quaternion; } return 0.1f; } bool Animation::IsDefaultValue(motive::MatrixOperationType op, float value) const { return fabs(value - DefaultOpValue(op)) < ToleranceForOp(op); } int Animation::MaxAnimatedTime() const { int max_time = std::numeric_limits<int>::min(); for (auto bone = bones_.begin(); bone != bones_.end(); ++bone) { for (auto ch = bone->channels.begin(); ch != bone->channels.end(); ++ch) { // Only consider channels with more than one keyframe (non-constant). if (ch->nodes.size() > 1) { max_time = std::max(max_time, ch->nodes.back().time); } } } return max_time == std::numeric_limits<int>::min() ? 0 : max_time; } int Animation::MinAnimatedTime() const { int min_time = std::numeric_limits<int>::max(); for (auto bone = bones_.begin(); bone != bones_.end(); ++bone) { for (auto ch = bone->channels.begin(); ch != bone->channels.end(); ++ch) { // Only consider channels with more than one keyframe (non-constant). if (ch->nodes.size() > 1) { min_time = std::min(min_time, ch->nodes[0].time); } } } return min_time == std::numeric_limits<int>::max() ? 0 : min_time; } Animation::Channels& Animation::CurChannels() { assert(static_cast<unsigned int>(cur_bone_index_) < bones_.size()); return bones_[cur_bone_index_].channels; } const Animation::Channels& Animation::CurChannels() const { assert(static_cast<unsigned int>(cur_bone_index_) < bones_.size()); return bones_[cur_bone_index_].channels; } float Animation::Tolerance(FlatChannelId channel_id) const { const Channels& channels = CurChannels(); return ToleranceForOp(channels[channel_id].op); } motive::BoneIndex Animation::FirstNonRepeatingBone( FlatChannelId* first_channel_id) const { for (motive::BoneIndex bone_idx = 0; bone_idx < bones_.size(); ++bone_idx) { const AnimBone& bone = bones_[bone_idx]; const Channels& channels = bone.channels; for (FlatChannelId channel_id = 0; channel_id < static_cast<FlatChannelId>(channels.size()); ++channel_id) { const AnimChannel& channel = channels[channel_id]; // Get deltas for the start and end of the channel. const SplineNode& start = channel.nodes.front(); const SplineNode& end = channel.nodes.back(); const float diff_val = fabs(start.val - end.val); const float diff_derivative_angle = fabs(DerivativeAngle(start.derivative - end.derivative)); // Return false unless the start and end of the channel are the same. const float tolerance = ToleranceForOp(channel.op); const bool same = diff_val < tolerance && diff_derivative_angle < tolerances_.repeat_derivative_angle; if (!same) { *first_channel_id = channel_id; return bone_idx; } } } return motive::kInvalidBoneIdx; } bool Animation::Repeat(RepeatPreference repeat_preference) const { if (repeat_preference == kNeverRepeat) return false; // Check to see if the animation is repeatable. FlatChannelId channel_id = 0; const motive::BoneIndex bone_idx = FirstNonRepeatingBone(&channel_id); const bool repeat = repeat_preference == kAlwaysRepeat || (repeat_preference == kRepeatIfRepeatable && bone_idx == motive::kInvalidBoneIdx); // Log repeat information. if (repeat_preference == kAlwaysRepeat) { if (bone_idx != motive::kInvalidBoneIdx) { // const Bone& bone = bones_[bone_idx]; // const Channel& channel = bone.channels[channel_id]; // log_.Log(fplutil::kLogWarning, // "Animation marked as repeating (as requested)," // " but it does not repeat on bone %s's" // " `%s` channel\n", // bone.name.c_str(), motive::MatrixOpName(channel.op)); } } else if (repeat_preference == kRepeatIfRepeatable) { // log_.Log(fplutil::kLogVerbose, // repeat ? "Animation repeats.\n" : "Animation does not // repeat.\n"); } return repeat; } bool Animation::UniformScaleChannels(const Channels& channels, FlatChannelId channel_id) const { if (channel_id + 2 >= static_cast<FlatChannelId>(channels.size())) return false; // Consider the three channels starting at `channel_id`. const AnimChannel& c0 = channels[channel_id]; const AnimChannel& c1 = channels[channel_id + 1]; const AnimChannel& c2 = channels[channel_id + 2]; // The order is not important, but we need kScaleX, Y, and Z. const uint32_t op_bits = (1 << c0.op) | (1 << c1.op) | (1 << c2.op); if (op_bits != kScaleXyzBitfield) return false; // The sequence of values must also be identical. const Nodes& n0 = c0.nodes; const Nodes& n1 = c1.nodes; const Nodes& n2 = c2.nodes; const bool same_length = n0.size() == n1.size() && n0.size() == n2.size() && n1.size() == n2.size(); if (!same_length) return false; // The splines must be equal. const float tolerance = tolerances_.scale; for (size_t i = 0; i < n0.size(); ++i) { const SplineNode v0 = n0[i]; const SplineNode v1 = n1[i]; const SplineNode v2 = n2[i]; const bool are_equal = EqualNodes(v0, v1, tolerance, tolerances_.derivative_angle) && EqualNodes(v0, v2, tolerance, tolerances_.derivative_angle) && EqualNodes(v1, v2, tolerance, tolerances_.derivative_angle); if (!are_equal) return false; } return true; } FlatChannelId Animation::SummableChannel(const Channels& channels, FlatChannelId ch) const { const motive::MatrixOperationType ch_op = channels[ch].op; for (FlatChannelId id = ch + 1; id < static_cast<FlatChannelId>(channels.size()); ++id) { const motive::MatrixOperationType id_op = channels[id].op; // If we're adjacent to a similar op, we can combine by summing. if (id_op == ch_op) return id; // Rotate ops cannot have other ops inbetween them and still be combined. if (RotateOp(ch_op)) return -1; // Translate and scale ops can only have, respectively, other translate // and scale ops in between them. if (TranslateOp(ch_op) && !TranslateOp(id_op)) return -1; if (ScaleOp(ch_op) && !ScaleOp(id_op)) return -1; } return -1; } float Animation::EvaluateNodes(const Nodes& nodes, int time, float* derivative) { assert(!nodes.empty()); // Handle before and after curve cases. *derivative = 0.0f; if (time < nodes.front().time) return nodes.front().val; if (time >= nodes.back().time) return nodes.back().val; // Find first node after `time`. size_t i = 1; for (;; ++i) { assert(i < nodes.size()); if (nodes[i].time >= time) break; } const SplineNode& pre = nodes[i - 1]; const SplineNode& post = nodes[i]; assert(pre.time <= time && time <= post.time); // Create a cubic from before time to after time, and interpolate values // with it. const float cubic_total_time = static_cast<float>(post.time - pre.time); const float cubic_time = static_cast<float>(time - pre.time); const motive::CubicCurve cubic(motive::CubicInit( pre.val, pre.derivative, post.val, post.derivative, cubic_total_time)); *derivative = cubic.Derivative(cubic_time); return cubic.Evaluate(cubic_time); } bool Animation::GetValueAtTime(const Nodes& nodes, const Nodes::const_iterator& node, int time, float* value, float* derivative) const { if (node != nodes.end() && node->time == time) { *value = node->val; *derivative = node->derivative; return true; } else { *value = EvaluateNodes(nodes, time, derivative); return false; } } void Animation::SumChannels(Channels& channels, FlatChannelId ch_a, FlatChannelId ch_b) const { const Nodes& nodes_a = channels[ch_a].nodes; const Nodes& nodes_b = channels[ch_b].nodes; Nodes sum; // TODO: The following assumes that the key on constant channels // is not significant to its evaluation. With pre/post infinities, single // key curves might not necessarily be "constant" curves. We should validate // if elsewhere that assumption is also made. // // If there is only one key, we ignore it because we can sample the curve // at any time, and don't want its key time to affect the resulting curve. auto node_iter_a = (nodes_a.size() == 1) ? nodes_a.end() : nodes_a.begin(); auto node_iter_b = (nodes_b.size() == 1) ? nodes_b.end() : nodes_b.begin(); // If both channels are constant, the curve should just contain a single // key with the sum. Time and derivative are ignored in constant channels. if (nodes_a.size() == 1 && nodes_b.size() == 1) { sum.push_back(SplineNode(0, nodes_a[0].val + nodes_b[0].val, 0.0f)); } while (node_iter_a != nodes_a.end() || node_iter_b != nodes_b.end()) { int time = std::numeric_limits<int>::max(); if (node_iter_a != nodes_a.end()) { time = node_iter_a->time; } if (node_iter_b != nodes_b.end()) { time = std::min(time, node_iter_b->time); } float a, b; float da, db; if (GetValueAtTime(nodes_a, node_iter_a, time, &a, &da)) { ++node_iter_a; } if (GetValueAtTime(nodes_b, node_iter_b, time, &b, &db)) { ++node_iter_b; } sum.push_back(SplineNode(time, a + b, da + db)); } channels[ch_a].nodes = sum; } motive::BoneIndex Animation::BoneParent(int bone_idx) const { const int parent_bone_index = bones_[bone_idx].parent_bone_index; return parent_bone_index < 0 ? motive::kInvalidBoneIdx : static_cast<motive::BoneIndex>(parent_bone_index); } bool Animation::IntermediateNodesRedundant(const SplineNode* n, size_t len, float tolerance) const { // If the start and end nodes occur at the same time and are equal, // then ignore everything inbetween them. const SplineNode& start = n[0]; const SplineNode& end = n[len - 1]; if (EqualNodes(start, end, tolerance, tolerances_.derivative_angle)) return true; // Construct cubic curve `c` that skips all the intermediate nodes. const float cubic_width = static_cast<float>(end.time - start.time); const motive::CubicCurve c(motive::CubicInit( start.val, start.derivative, end.val, end.derivative, cubic_width)); // For each intermediate node, check if the cubic `c` is close. for (size_t i = 1; i < len - 1; ++i) { // Evaluate `c` at the time of `mid`. const SplineNode& mid = n[i]; const float mid_time = static_cast<float>(mid.time - start.time); const float mid_val = c.Evaluate(mid_time); const float mid_derivative = c.Derivative(mid_time); // If the mid point is on the curve, it's redundant. const float derivative_angle_error = DerivativeAngle(mid_derivative - mid.derivative); const bool mid_on_c = fabs(mid_val - mid.val) < tolerance && fabs(derivative_angle_error) < tolerances_.derivative_angle; if (!mid_on_c) return false; } // All mid points are redundant. return true; } bool Animation::EqualNodes(const SplineNode& a, const SplineNode& b, float tolerance, float derivative_tolerance) { return a.time == b.time && fabs(a.val - b.val) < tolerance && fabs(DerivativeAngle(a.derivative - b.derivative)) < derivative_tolerance; } float Animation::DefaultOpValue(motive::MatrixOperationType op) { return motive::OperationDefaultValue(op); } void Animation::LogChannel(FlatChannelId channel_id) const { const Channels& channels = CurChannels(); const Nodes& n = channels[channel_id].nodes; for (size_t i = 0; i < n.size(); ++i) { const SplineNode& node = n[i]; LOG(INFO) << " flat, " << i << ", " << node.time << ", " << node.val << ", " << node.derivative; } } void Animation::LogAllChannels() const { LogString log; log.format(" %30s %16s %9s %s\n", "bone name", "operation", "time range", "values"); LOG(INFO) << log; for (motive::BoneIndex bone_idx = 0; bone_idx < bones_.size(); ++bone_idx) { const AnimBone& bone = bones_[bone_idx]; const Channels& channels = bone.channels; if (channels.empty()) { continue; } for (auto c = channels.begin(); c != channels.end(); ++c) { LogString tmp; log.clear(); tmp.format(" %30s %16s ", bone.name.c_str(), MatrixOpName(c->op)); log.append(tmp); const char* format = motive::RotateOp(c->op) ? "%.0f " : motive::TranslateOp(c->op) ? "%.1f " : "%.2f "; const float factor = motive::RotateOp(c->op) ? motive::kRadiansToDegrees : 1.0f; const Nodes& n = c->nodes; if (n.size() <= 1) { tmp.format(" constant "); } else { tmp.format("%4d~%4d ", n[0].time, n[n.size() - 1].time); } log.append(tmp); for (size_t i = 0; i < n.size(); ++i) { tmp.format(format, factor * n[i].val); log.append(tmp); } LOG(INFO) << log; } } } bool Animation::GnuplotAllChannels(const std::string& gplot_dir) const { if (!CreateFolder(gplot_dir.c_str())) { return false; } for (motive::BoneIndex bone_idx = 0; bone_idx < NumBones(); ++bone_idx) { // Loop through bones; save data file for each, in turn. const AnimBone& bone = GetBone(bone_idx); const Channels& channels = bone.channels; if (channels.empty()) { continue; } std::string out_fullpath = gplot_dir + "/" + bone.name + ".dat"; std::ostringstream os; std::string bone_name = bone.name; std::replace(bone_name.begin(), bone_name.end(), '_', '-'); os << "# Run shell cmd below to visualize this file:\n#\n" << "# gnuplot -persist -e \"" << "d = '" << out_fullpath.c_str() << "'; " << "set title '" << bone_name << "' " << "font '14' textcolor rgbcolor 'royalblue'; " << "set linetype 1; set pointsize 1; " << "plot "; std::set<int> key_times; for (auto it = channels.begin(); it != channels.end(); ++it) { // Build union set of all keytimes over all channels std::string c_name = MatrixOpName(it->op); std::replace(c_name.begin(), c_name.end(), ' ', '-'); os << "d using 1:" << 2 + static_cast<int>(it - channels.begin()) << " " << "title '" << c_name << "' " << "with linespoints pointtype 7 pointsize .7, "; const Nodes& n = it->nodes; for (size_t i = 0; i < n.size(); ++i) { key_times.emplace(n[i].time); } } os << "\"\n#\n"; for (auto it = key_times.begin(); it != key_times.end(); ++it) { // Each row of data is keytime + values for all channels int key_time = *it; float value, deriv; os << key_time << " "; for (auto c = channels.begin(); c != channels.end(); ++c) { const float factor = motive::RotateOp(c->op) ? motive::kRadiansToDegrees : 1.0f; value = EvaluateNodes(c->nodes, key_time, &deriv); os << factor * value << " "; } os << "\n"; } const std::string& data = os.str(); if (!SaveFile(data.c_str(), data.size(), out_fullpath.c_str(), false)) { return false; } } return true; } } // namespace tool } // namespace lull
Switching terahertz wave with grating-coupled Kretschmann configuration. We present a terahertz wave switch utilizing Kretschmann configuration which consists of high-refractive-index prism-liquid crystal-periodically grooved metal grating. The switching mechanism of the terahertz switch is based on spoof surface plasmon polariton (SSPP) excitation in the attenuated total reflection regime by changing the liquid crystal refractive index. The results highlighted the fact that the feasibility to "tune" the attenuated total reflection terahertz wave intensity by using the external applied bias voltage. The extinction ratio of the terahertz switch reaches 31.48dB. The terahertz switch has good control ability and flexibility, and can be used in potential terahertz free space device systems.
// storeList would store the list in the cache. func (r *rebuild) storeList(list *List) bool { key := string(list.key) if _, ok := r.cache[key]; ok { return false } r.cache[key] = list return true }
<reponame>DebbieElabonga/Quote-app import { Component, OnInit } from '@angular/core'; import { Quote } from '../quote'; @Component({ selector: 'app-quote', templateUrl: './quote.component.html', styleUrls: ['./quote.component.css'] }) export class QuoteComponent implements OnInit { quote: Quote[] = [ new Quote(1, 'I think the people in this country have had enough of experts with organisations from acronyms saying that they know what is best and getting it consistently wrong.', 'Benitha','Janet', new Date(2021,3,14),0,0), new Quote(2,'Always end your book with <NAME> saying something about rainbows or renaissances. Because you care.','Ange','Naomy',new Date(2021,5,9),0,0), new Quote(3,'How many legs does a dog have if you call his tail a leg? Four. Saying that a tail is a leg doesnt make it a leg.','Diana','Tina', new Date(2020,4,12),0,0), new Quote(4,'Having a positive mental attitude is asking how something can be done rather than saying it cant be done.','Angelic','Tharcissie',new Date(2021,3,18),0,0), new Quote(5,'Stop saying these negative things about yourself. Look in the mirror and find something about yourself thats positive and celebrate that!','Ganza','Lyse', new Date(2021,2,14),0,0), new Quote(6,'They invented hugs to let people know you love them without saying anything.','Adeline','Divine',new Date(2021,1,14),0,0), ]; toggleDetails(index:any){ this.quote[index].showDescription = !this.quote[index].showDescription; } deleteQuote(isComplete:boolean, index:any){ if (isComplete) { let toDelete = confirm(`Are you sure you want to delete ${this.quote[index].quote}?`) if (toDelete){ this.quote.splice(index,1) } } } addNewQuote(quote:any){ let quoteLength = this.quote.length; quote.id = quoteLength+1; quote.completeDate = new Date(quote.completeDate) this.quote.push(quote) } constructor() { } ngOnInit(): void { } }
<gh_stars>0 from common import game_functions as game """Main file for executing the program""" def main() -> None: player_list = [] print("WELCOME TO PIG\n") print("-------------------\n") print("HOW MANY PLAYERS ARE PLAYING?\n") num_of_players = input(">> ") if num_of_players < 2: player_list.append(game.add_bot()) for num in range(1, num_of_players): print("\nPLAYER {} NAME\n".format(num)) nickname = input(">> ") player_list.append(game.adding_players(nickname)) target = input("WHAT IS THE WINNING POINT?\n>> ") winner = game.play(player_list, target) if __name__ == '__main__': main()
The Making of Modern Belize: Politics, Society and British Colonialism in Central America . By C. H. Grant. (London: Cambridge University Press, Cambridge Commonwealth Series, 1976. pp. xvi, 400. Notes, Biblio., Index, Tables, Illus.) In the final section of this piece, Ulloa studies how the order modified its stand on strict observance. With time, the secularization of society, and the passing of the golden age of missionary activity, the Dominicans gradually relaxed their rule. While still strictly enforcing the basic vows of the order, greater liberality in interpretation entered. This section is meticulously researched in the capitular acts of the Province of Santiago de Mexico. For this reason, among many, Ulloa's work stands as an important contribution. His use of these official meetings, which presented provincial policy, represents an important step in the study of the religious orders. Unfortunately, Ulloa did not consult many materials held in the Archivo General de Indias. Several of the reports and memoriales housed there could have helped to flesh out the official picture which he painted of the order. Aside from the capitular acts and some Roman archival material the book is based primarily on printed sources. Yet those archival sources which he did use are of utmost importance.
#include <iostream> #include <vector> #include <algorithm> #include <map> #include <queue> using namespace std; #define fr(i,n) for(int i = 0; i < n; i++) #define tr(c, i) for(auto i = c.begin(); i != c.end(); i++) #define vi vector< int > #define vii vector< vi > int n; int dist[100000]; vi con[10000]; int color(int o) { dist[o] = 0; queue< int > q; q.push(o); while(!q.empty()){ int u = q.front(); q.pop(); fr(i, con[u].size()) { int v = con[u][i]; if (dist[v] == -1){ if (dist[u] == 0) dist[v] = 1; else dist[v] = 0; q.push(v); } } } return -1; } int main() { int a, b; cin >> a >> b; bool c[a][b]; fr(i,a){ fr(j,b){ char aux; cin >> aux; c[i][j] = (aux=='.'); } } /*fr(i,a){ fr(j,b){ cout << c[i][j]; } cout << endl; } */ fr(i,a){ fr(j,b){ if (c[i][j]){ if(j-1 >= 0 && c[i][j-1]) con[i*b + (j-1)].push_back(i*b + j);// cout << "i-1 " << i << " " << j << " " << i*b+(j-1) << "->" << i*b+j << endl;} if(j+1 < b && c[i][j+1]) con[i*b + (j+1)].push_back(i*b + j); //cout << "i+1 " << i << " " << j << " " << i*b + (j+1) << "->" << i*b+j << endl; } if(i-1 >= 0 && c[i-1][j]) con[(i-1)*b + j].push_back(i*b + j);//cout << "i-a " << i << " " << j << " " <<(i-1)*b + j << "->" << i*b+j << endl;} if(i+1 < a && c[i+1][j]) con[(i+1)*b + j].push_back(i*b + j);//cout << "i+a " << i << " " << j << " " << (i+1)*b + j << "->" << i*b+j << endl;} } } } /*fr(i, a*b){ cout << i << ": "; fr(j, con[i].size()) { cout << con[i][j] << " "; } cout << endl; } */ fr(i, a*b){ dist[i] = -1; } fr(i, a*b){ if(dist[i] == -1) { color(i); //cout << i << endl; } } int aux = 0; fr(i, a) { fr(j, b){ if (c[i][j]) cout << (dist[i*b + j]? 'W':'B'); else cout << "-"; aux++; if (aux == b) { cout << endl; aux = 0; } } } //cout << distance(0, n) << endl; /* 6 -1 2 1 3 -3 3 10 5 1 1 1 6 -1 1 1 1 1 */ }
package main import ( "fmt" "github.com/alexpfx/go_menus/fzf" ) func main() { testFzfBuilder() testFzfBuilder2() } func testFzfBuilder() { const input1 = "1 janeiro sexta true\n2 fevereiro quarta true\n2 março terça false" m := fzf.New("Selecione", false, "", "") run, err := m.Run(input1) if err != nil { fmt.Println(err) } fmt.Println(run) } func testFzfBuilder2() { const input2 = "1;gremio fbpa\n2;palmeiras fc\n3;figueira fc" m := fzf.NewIndexed("Selecione") run, err := m.Run(input2) if err != nil { fmt.Println(err) } fmt.Println(run) }
<reponame>coderling/astc_preview #include "decode_wrap.h" #ifdef DEBUG_PRINT_DIAGNOSTICS #include <stdio.h> #endif //for test use int Add(int a, int b) { return MIN(a, b); } unsigned char* GetUnsignedCharArr() { unsigned char* output = new unsigned char[10]; for (size_t i = 0; i < 10; i++) { output[i] = i; } return output; } astc_codec_image* GetCustomStruct() { astc_codec_image* img = allocate_image(8, 1024, 720, 1, 0); int x, y, z; int exsize = img->xsize + 2 * img->padding; int eysize = img->ysize + 2 * img->padding; int ezsize = (img->zsize == 1) ? 1 : img->zsize + 2 * img->padding; if (img->imagedata8) { for (z = 0; z < ezsize; z++) for (y = 0; y < eysize; y++) for (x = 0; x < exsize; x++) { img->imagedata8[z][y][4 * x] = 0xFF; img->imagedata8[z][y][4 * x + 1] = 0; img->imagedata8[z][y][4 * x + 2] = 0; img->imagedata8[z][y][4 * x + 3] = 0xFF; } } return img; } static UnityError logFunc = nullptr; void SetUnityError(UnityError error) { logFunc = error; } enum ECompressMode { BLOCK_4x4 = 1, BLOCK_5x5 = 2, BLOCK_6x6 = 3, BLOCK_8x8 = 4, BLOCK_10x10 = 5, BLOCK_12x12 = 6, }; enum ECompressQuality { QUALITY_VERYFAST = 0, QUALITY_FAST, QUALITY_MEDIUM, QUALITY_THOROUGH, QUALITY_EXHAUSTIVE, }; astc_codec_image* pack_and_unpack_image(const char* file_path, int compress_mode, int decode_mode, int sw_alpha, int quality) { prepare_angular_tables(); build_quantization_mode_table(); int result; astc_codec_image* input_image = astc_codec_load_image(file_path, 0, &result); if(result < 0) { return nullptr; } int xdim_2d = 0, ydim_2d = 0, zdim_2d = 1; ECompressMode mode = (ECompressMode)compress_mode; switch (mode) { case BLOCK_4x4: { xdim_2d = ydim_2d = 4; } break; case BLOCK_5x5: { xdim_2d = ydim_2d = 5; } case BLOCK_6x6: { xdim_2d = ydim_2d = 6; } break; case BLOCK_8x8: { xdim_2d = ydim_2d = 8; } break; case BLOCK_10x10: { xdim_2d = ydim_2d = 10; } break; case BLOCK_12x12: { xdim_2d = ydim_2d = 12; } break; default: return nullptr; } astc_decode_mode inner_decode_mode = (astc_decode_mode)decode_mode; //default weight params error_weighting_params ewp; ewp.rgb_power = 1.0f; ewp.alpha_power = 1.0f; ewp.rgb_base_weight = 1.0f; ewp.alpha_base_weight = 1.0f; ewp.rgb_mean_weight = 0.0f; ewp.rgb_stdev_weight = 0.0f; ewp.alpha_mean_weight = 0.0f; ewp.alpha_stdev_weight = 0.0f; ewp.rgb_mean_and_stdev_mixing = 0.0f; ewp.mean_stdev_radius = 0; ewp.enable_rgb_scale_with_alpha = 0; ewp.alpha_radius = 0; ewp.block_artifact_suppression = 0.0f; ewp.rgba_weights[0] = 1.0f; ewp.rgba_weights[1] = 1.0f; ewp.rgba_weights[2] = 1.0f; ewp.rgba_weights[3] = 1.0f; ewp.ra_normal_angular_scale = 0; ewp.max_refinement_iters = 0; ewp.block_mode_cutoff = 0; ewp.texel_avg_error_limit = 0; int padding = MAX(ewp.mean_stdev_radius, ewp.alpha_radius); swizzlepattern swz_encode = { 0, 1, 2, 3 }; //只能是4或5表示是否开启Alpha, 默认开启 if(sw_alpha != 4 || sw_alpha != 5) { sw_alpha = 5; } swz_encode.a = (uint8_t)sw_alpha; swizzlepattern swz_decode = { 0, 1, 2, 3 }; float oplimit = 0.0f; float mincorrel = 0.0f; int bmc = 0; int maxiters = 0; int plimit = 0; int pcdiv = 0; ECompressQuality e_quality = (ECompressQuality)quality; switch (e_quality) { case QUALITY_VERYFAST: { plimit = 2; oplimit = 1.0f; bmc = 25; mincorrel = 0.5; maxiters = 1; switch (ydim_2d) { case 4: pcdiv = 240; break; case 5: pcdiv = 56; break; case 6: pcdiv = 64; break; case 8: pcdiv = 47; break; case 10: pcdiv = 36; break; case 12: pcdiv = 30; break; default: pcdiv = 30; break; } } break; case QUALITY_FAST: { plimit = 4; oplimit = 1.0f; bmc = 50; mincorrel = 0.5; maxiters = 1; switch (ydim_2d) { case 4: pcdiv = 60; break; case 5: pcdiv = 27; break; case 6: pcdiv = 30; break; case 8: pcdiv = 24; break; case 10: pcdiv = 16; break; case 12: pcdiv = 20; break; default: pcdiv = 20; break; } } break; case QUALITY_MEDIUM: { plimit = 25; oplimit = 1.2f; bmc = 75; mincorrel = 0.75; maxiters = 2; switch (ydim_2d) { case 4: pcdiv = 25; break; case 5: pcdiv = 15; break; case 6: pcdiv = 15; break; case 8: pcdiv = 10; break; case 10: pcdiv = 8; break; case 12: pcdiv = 6; break; default: pcdiv = 6; break; } } break; case QUALITY_THOROUGH: { plimit = 100; oplimit = 2.5f; bmc = 95; mincorrel = 0.95; maxiters = 4; switch (ydim_2d) { case 4: pcdiv = 12; break; case 5: pcdiv = 7; break; case 6: pcdiv = 7; break; case 8: pcdiv = 5; break; case 10: pcdiv = 4; break; case 12: pcdiv = 3; break; default: pcdiv = 3; break; } } break; case QUALITY_EXHAUSTIVE: { plimit = PARTITION_COUNT; oplimit = 1000.0f; bmc = 100; mincorrel = 0.99; maxiters = 4; switch (ydim_2d) { case 4: pcdiv = 3; break; case 5: pcdiv = 1; break; case 6: pcdiv = 1; break; case 8: pcdiv = 1; break; case 10: pcdiv = 1; break; case 12: pcdiv = 1; break; default: pcdiv = 1; break; } } break; default: printf("ERROR: error quality\n"); break; } if(plimit < 1) { plimit = 1; } else if(plimit > PARTITION_COUNT) { plimit = PARTITION_COUNT; } ewp.partition_search_limit = plimit; ewp.partition_1_to_2_limit = oplimit; ewp.lowest_correlation_cutoff = mincorrel; ewp.block_mode_cutoff = bmc; ewp.max_refinement_iters = maxiters; printf("Tips: plimit: %d oplimit %f mincorrel %f bmc %d maxiters %d\n", plimit, oplimit, mincorrel, bmc, maxiters); expand_block_artifact_suppression(xdim_2d, ydim_2d, zdim_2d, &ewp); if (padding > 0 || ewp.rgb_mean_weight != 0.0f || ewp.rgb_stdev_weight != 0.0f || ewp.alpha_mean_weight != 0.0f || ewp.alpha_stdev_weight != 0.0f) { compute_averages_and_variances(input_image, ewp.rgb_power, ewp.alpha_power, ewp.mean_stdev_radius, ewp.alpha_radius, swz_encode); } int bitness = get_output_filename_enforced_bitness(file_path); //仅考虑8bit if(bitness == -1 || bitness != 8) { if(inner_decode_mode == DECODE_HDR) { logFunc("fail decode mode ...."); return nullptr; } bitness = 8; } else if(bitness != 8) { bitness = 8; } int thread_count = get_number_of_cpus(); astc_codec_image* output_image = pack_and_unpack_astc_image(input_image, xdim_2d, ydim_2d, zdim_2d, &ewp, inner_decode_mode, swz_encode, swz_decode, bitness, thread_count); //unpack array if(output_image != nullptr) { if(logFunc != nullptr) logFunc("sucess ...."); return output_image; } if(logFunc != nullptr) logFunc("sucess ...."); return nullptr; }
Joides Resolution research vessel drilled to find seabed sediment holding climate records up to 5m years old but discovered some dated to 50m years ago Knowledge of Australia’s climate history has been expanded to the past 50m years, up from the past 500,000 years, via a major international scientific voyage from Fremantle to Darwin. The two-month expedition involved drilling of the seabed off the Western Australian coast for study by the Joides Resolution research vessel – one of the world’s largest floating scientific facilities. New study warns of dangerous climate change risks to the Earth’s oceans | Dana Nuccitelli Read more The International Ocean Discovery Program-led mission planned to find sediments that would show climate records to 5m years in the past but one section of seabed had a record stretching back to 50m years. Removed cores of sediment will now be analysed by scientists but it has already become clear Australia’s deserts are among the youngest in the world. Evidence in the sediments, from dust and sand blown across the continent, shows the desert regions of Australia are 1.5m years old. Separately, the sediments appear to show the monsoonal seasons of northern Australia are far older than have previously been shown. The $20m mission involved 125 scientists and crew from around the world and managed to extract sediments as far down as 1.1km below the seafloor, before docking in Darwin on Wednesday. “To get a series of layers of sediment that go back to 50 million years is quite extraordinary and very exciting,” expedition co-leader Prof Stephen Gallagher, told Guardian Australia. “We have an environmental record that extends in detail to 500,000 years but we can definitely extend that now to five million years and then to 10 million years. The 50 million year record is quite thin, not as complete as the five million year record, but it’s a remarkable snapshot of the climate record no one has seen before.” Facebook Twitter Pinterest On board the research vessel, Joides Resolution. Photograph: Joides Resolution The north-west shelf, the area off the coast covered by the expedition, is a key system in the ocean circulation and monsoonal seasons that influence the climate of northern Australia. Ocean sediments in the area contain dust, pollen and other material blown off the land and deposited in the seabed. Scientists are also able to analyse small fossils to determine the chemistry of the ocean. “These sediments are preserved very well, far better than on continental Australia, where the weather is so harsh that most evidence is destroyed in desert conditions,” said Gallagher. “Ice cores can be examined for the past climate too, but they only go back to a maximum of 800,000 years.” The research expedition will be used to help broaden the knowledge of contemporary climate change. Carbon dioxide levels are now higher than at any recorded point since a period 3-5m years ago, a time in history when the Earth’s ice sheets were much smaller and the climate warmer and wetter than today. The release of vast amounts of CO2 from the burning of fossil fuels is the primary cause for the world warming by around 1C over the past century. “Climate change is happening, we are meddling with the climate system,” Gallagher said. “It’s great to have projections of climate in the future but how will we know how to react if we don’t we know how the climate behaved in the past? It’s like history, you ignore history at your peril.”
Pauline Hanson rejects claim she is chief of staff's James Ashby's puppet Posted One Nation senator Pauline Hanson has hit back at fresh criticism about the influence of her chief of staff, James Ashby, from a candidate dumped over a controversial website post. Peter Rogers, who was axed as the party's candidate for the Queensland seat of Mulgrave on Friday night, claimed Ms Hanson and One Nation had fallen under the control of Mr Ashby. "My personal view is Pauline is the puppet and he [Mr Ashby] is the puppeteer," he said on Saturday. Ms Hanson took to social media to dismiss Mr Rogers' claims. "Regardless of what the media says, I'm the leader of Pauline Hanson's One Nation and I have the final say on who represents One Nation," Ms Hanson said on Twitter. Mr Rogers was dumped over a post alleging the Port Arthur massacre and the death of a Syrian toddler who drowned at sea which sparked a global outcry last year, were fabrications. He says the post was written without his knowledge by a friend who had control of his website, but that he agreed with the sentiments in relation to Port Arthur. "I've always believed [there] is no way one man can do that much damage," he said, clarifying that he believed the massacre occurred, just that it could not have been the work of a lone gunman. One Nation has lost three candidates since December, when it unveiled a team of 36 to contest the next Queensland election. But the party also scored a major coup with the defection of former Newman government minister Steve Dickson earlier this month, giving it a toehold in State Parliament for the first time since 2009. And its ranks have been bolstered further by the announcement another Newman government figure, former Thuringowa MP Sam Cox, had jumped ship to join the party. Mr Cox, who lost his seat at the 2015 election after one term, will contest the seat of Burdekin at the next election, which is held by the LNP's Dale Last. AAP Topics: pauline-hanson, one-nation, political-parties, qld, australia
<reponame>TeamSPoon/logicmoo_nlu /* * Copyright 2007-2009 TIM/ETI University of Geneva. * All Rights Reserved. Use is subject to license terms. * * File: SocketObserver.hpp * Author: <NAME> <<EMAIL>> * * See the file "license.terms" for information on usage and * redistribution of this file, and for a DISCLAIMER OF ALL * WARRANTIES. */ #ifndef SOCKETOBSERVER_HPP_ #define SOCKETOBSERVER_HPP_ #include <SDL_net.h> #include "TcpSocket.hpp" using namespace std; class SocketObserver { public: virtual ~SocketObserver(){}; // Packet received virtual void PacketReceived(ostringstream& buffer)=0; // New Client arrived virtual void ClientConnected(TcpSocket* socket)=0; }; #endif /*SOCKETOBSERVER_HPP_*/
//=============================================================== // //! TRF79xxA_readContinuous - Read multiple TRF79xxA registers //! //! \param pui8Payload is the address of the first register as //! well as the pointer for buffer where the results will be //! \param ui8Length is the number of registers to read //! //! This function reads a specified number of TRF79xxA registers //! from a specified address. //! //! \return None. // //=============================================================== void TRF79xxA_readContinuous(uint8_t * pui8Payload, uint8_t ui8Length) { *pui8Payload = (0x7f & *pui8Payload); *pui8Payload = (0x60 | *pui8Payload); HAL_GPIO_WritePin(SPIx_SS_GPIO_PORT, SPIx_SS_PIN, GPIO_PIN_RESET); if (HAL_SPI_Transmit(&SpiHandle, pui8Payload, 1, 1000) != HAL_OK) { Error_Handler(); } if (HAL_SPI_Receive(&SpiHandle, pui8Payload, ui8Length, 1000) != HAL_OK) { Error_Handler(); } HAL_GPIO_WritePin(SPIx_SS_GPIO_PORT, SPIx_SS_PIN, GPIO_PIN_SET); }
package io.thebitspud.astroenvoys.levels; import com.badlogic.gdx.Gdx; import io.thebitspud.astroenvoys.AstroEnvoys; import io.thebitspud.astroenvoys.entities.EntityID; public class Level_Test extends Level { public Level_Test(AstroEnvoys app) { super(app); } @Override public String id() { return "Test"; } @Override public String title() { return "Game Testing"; } @Override public String desc() { return "Dev level for internal testing"; } @Override protected void onClear() { } @Override protected void addEvents() { final int y = Gdx.graphics.getHeight(); final int scrWidth = Gdx.graphics.getWidth(); // screen width game.spawnEnemy(r.nextInt(scrWidth - 100), y, EntityID.AZ_RAIDER); game.spawnEnemy(r.nextInt(scrWidth - 150), y, EntityID.AZ_SNIPER); } }
<filename>src/app/modules/campaign/campaign-landing/campaign-list-routing.module.ts<gh_stars>0 import { NgModule } from '@angular/core'; import { Routes, RouterModule } from '@angular/router'; import { CampaignLandingComponent } from './campaign-landing.component'; import { AllCampaignsComponent } from './tabs/all-campaigns/all-campaigns.component'; import { ActiveCampaignsComponent } from './tabs/active-campaigns/active-campaigns.component'; import { CompletedCampaignsComponent } from './tabs/completed-campaigns/completed-campaigns.component'; import { MyactiveCampaignsComponent } from './tabs/myactive-campaigns/myactive-campaigns/myactive-campaigns.component'; const routes: Routes = [ { path: '', component: CampaignLandingComponent, children: [ { path: '', redirectTo: 'ActiveCampaigns', pathMatch: 'full' }, { path: 'AllCampaigns', component: AllCampaignsComponent }, { path: 'ActiveCampaigns', component: ActiveCampaignsComponent }, { path: 'CompletedCampaigns', component: CompletedCampaignsComponent }, // { // path: 'myactiveCampaigns', // component: MyactiveCampaignsComponent // }, ] } ]; @NgModule({ imports: [RouterModule.forChild(routes)], exports: [RouterModule] }) export class CampaignListRoutingModule { }
Ethics in Accounting and Finance Ethics forms the cornerstone of business and commerce today. It is the lifeblood of every institution be it private or public enterprise. Organisations have to develop and implement a properly structured policy on ethics outlaying proper governance within the institution. Accounting and finance services are crucial in managing a company's finances and wealth. In the accounting profession, in order for there to be transparency and trust, ethics is a crucial area that needs to be present. The many scandals and ethical dilemmas faced by countries organisations throughout the world has made us pause and take stock of what is happening in industry and has prompted action in law and governance towards ethics and corporate governance. South Africa too, has had its fair share of scandal that has had its legal fraternity, government, and business sector develop the Kings I, Kings II, Kings III, and Kings IV laws that stipulate corporate governance and ethics. Organisations have to be honest, professional, and transparent in their business practices.
def run(self, use_cache=True) -> int: super().run(use_cache) notebook = self.notebook path = Path(notebook) output_nb_dir = self.output_nb_dir / path.parent output_nb_dir.mkdir(parents=True, exist_ok=True) reference_nb_dir = self.reference_nb_dir / path.parent reference_nb_dir.mkdir(parents=True, exist_ok=True) stripped_nb_dir = self.stripped_nb_dir / path.parent stripped_nb_dir.mkdir(parents=True, exist_ok=True) output_nb = output_nb_dir / path.name reference_nb = reference_nb_dir / path.name stripped_nb = stripped_nb_dir / path.name md5 = run_command(f'md5sum {str(self.absolute_notebook_path)}').split()[0] cache_dir = self.cache_dir / path.parent cache_dir.mkdir(parents=True, exist_ok=True) cache_nb_file = cache_dir / f'{md5}.json' to_cache = ['execution_time', 'fidelity', 'diff', 'text_diff', 'todos', 'headers', 'images'] if use_cache and cache_nb_file.exists(): with open(cache_nb_file, 'rb') as f: pickled = pickle.load(f) print(f'Reusing cached results for {self}') for key in to_cache: setattr(self, key, pickled[key]) return 0 notebook_json = self.notebook_json self.images = [ output['data']['image/png'] for cell in notebook_json['cells'] for output in cell.get('outputs', []) if 'data' in output and 'image/png' in output['data'] ] self.headers = [] for cell in notebook_json['cells']: if cell['cell_type'] == 'markdown': for line in cell['source']: if line.startswith('#'): self.headers.append(line) for cell in notebook_json['cells']: for line in cell.get('source', ''): if 'TODO' in line: self.todos.append(line) notebook_stripped = deepcopy(notebook_json) for cell in notebook_json['cells']: cell['outputs'] = [] with open(stripped_nb, 'w') as f: json.dump(notebook_stripped, f) if self.execute: start_time = time.time() status = system(f'papermill {stripped_nb} {output_nb} {self.serialized_arguments}') or 0 self.execution_time = time.time() - start_time else: status = 0 warn(f'Skipping {self} (execute != True)') if self.execute and self.generate_diff: system( f'papermill {self.absolute_notebook_path} {reference_nb} {self.serialized_arguments} --prepare-only' ' --log-level WARNING' ) with NamedTemporaryFile(delete=False) as tf: command = f'nbdiff {reference_nb} {output_nb} --ignore-metadata --ignore-details --out {tf.name}' result = run_command(command) with open(tf.name) as f: try: self.diff = json.load(f) except JSONDecodeError as e: warn(f'Could not load the diff file: {result}, {f.readlines()}') command = f'nbdiff {reference_nb} {output_nb} --ignore-metadata --ignore-details --no-use-diff --no-git' self.text_diff = run_command(command) from ansi2html import Ansi2HTMLConverter conv = Ansi2HTMLConverter() self.text_diff = conv.convert(self.text_diff) changes = len(self.diff[0]['diff']) if self.diff else 0 total_cells = len(notebook_json['cells']) self.fidelity = (total_cells - changes) / total_cells * 100 if status == 0: with open(cache_nb_file, 'wb') as f: pickle.dump({ key: getattr(self, key) for key in to_cache }, f) self.status = status return status
/* ** Append the contents of SrcList p2 to SrcList p1 and return the resulting ** SrcList. Or, if an error occurs, return NULL. In all cases, p1 and p2 ** are deleted by this function. */ SrcList *sqlite3SrcListAppendList(Parse *pParse, SrcList *p1, SrcList *p2){ assert( p1 && p1->nSrc==1 ); if( p2 ){ SrcList *pNew = sqlite3SrcListEnlarge(pParse, p1, p2->nSrc, 1); if( pNew==0 ){ sqlite3SrcListDelete(pParse->db, p2); }else{ p1 = pNew; memcpy(&p1->a[1], p2->a, p2->nSrc*sizeof(SrcItem)); sqlite3DbFree(pParse->db, p2); } } return p1; }
<gh_stars>0 use std; use std::sync::Arc; use failure::{Error, ResultExt}; use remoteprocess::{ProcessMemory, Pid, Process}; use serde_derive::Serialize; use crate::python_interpreters::{InterpreterState, ThreadState, FrameObject, CodeObject, TupleObject}; use crate::python_data_access::{copy_string, copy_bytes}; use crate::config::LineNo; /// Call stack for a single python thread #[derive(Debug, Clone, Serialize)] pub struct StackTrace { /// The process id than generated this stack trace pub pid: Pid, /// The python thread id for this stack trace pub thread_id: u64, // The python thread name for this stack trace pub thread_name: Option<String>, /// The OS thread id for this stack tracee pub os_thread_id: Option<u64>, /// Whether or not the thread was active pub active: bool, /// Whether or not the thread held the GIL pub owns_gil: bool, /// The frames pub frames: Vec<Frame>, /// process commandline / parent process info pub process_info: Option<Arc<ProcessInfo>> } /// Information about a single function call in a stack trace #[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize)] pub struct Frame { /// The function name pub name: String, /// The full filename of the file pub filename: String, /// The module/shared library the pub module: Option<String>, /// A short, more readable, representation of the filename pub short_filename: Option<String>, /// The line number inside the file (or 0 for native frames without line information) pub line: i32, /// Local Variables associated with the frame pub locals: Option<Vec<LocalVariable>>, } #[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize)] pub struct LocalVariable { pub name: String, pub addr: usize, pub arg: bool, pub repr: Option<String>, } #[derive(Debug, Clone, Serialize)] pub struct ProcessInfo { pub pid: Pid, pub command_line: String, pub parent: Option<Box<ProcessInfo>> } /// Given an InterpreterState, this function returns a vector of stack traces for each thread pub fn get_stack_traces<I>(interpreter: &I, process: &Process, lineno: LineNo) -> Result<Vec<StackTrace>, Error> where I: InterpreterState { // TODO: deprecate this method let mut ret = Vec::new(); let mut threads = interpreter.head(); while !threads.is_null() { let thread = process.copy_pointer(threads).context("Failed to copy PyThreadState")?; ret.push(get_stack_trace(&thread, process, false, lineno)?); // This seems to happen occasionally when scanning BSS addresses for valid interpeters if ret.len() > 4096 { return Err(format_err!("Max thread recursion depth reached")); } threads = thread.next(); } Ok(ret) } /// Gets a stack trace for an individual thread pub fn get_stack_trace<T>(thread: &T, process: &Process, copy_locals: bool, lineno: LineNo) -> Result<StackTrace, Error> where T: ThreadState { // TODO: just return frames here? everything else probably should be returned out of scope let mut frames = Vec::new(); let mut frame_ptr = thread.frame(); while !frame_ptr.is_null() { let frame = process.copy_pointer(frame_ptr).context("Failed to copy PyFrameObject")?; let code = process.copy_pointer(frame.code()).context("Failed to copy PyCodeObject")?; let filename = copy_string(code.filename(), process).context("Failed to copy filename")?; let name = copy_string(code.name(), process).context("Failed to copy function name")?; let line = match lineno { LineNo::NoLine => 0, LineNo::FirstLineNo => code.first_lineno(), LineNo::LastInstruction => match get_line_number(&code, frame.lasti(), process) { Ok(line) => line, Err(e) => { // Failling to get the line number really shouldn't be fatal here, but // can happen in extreme cases (https://github.com/benfred/py-spy/issues/164) // Rather than fail set the linenumber to 0. This is used by the native extensions // to indicate that we can't load a line number and it should be handled gracefully warn!("Failed to get line number from {}.{}: {}", filename, name, e); 0 } } }; let locals = if copy_locals { Some(get_locals(&code, frame_ptr, &frame, process)?) } else { None }; frames.push(Frame{name, filename, line, short_filename: None, module: None, locals}); if frames.len() > 4096 { return Err(format_err!("Max frame recursion depth reached")); } frame_ptr = frame.back(); } Ok(StackTrace{pid: process.pid, frames, thread_id: thread.thread_id(), thread_name: None, owns_gil: false, active: true, os_thread_id: None, process_info: None}) } impl StackTrace { pub fn status_str(&self) -> &str { match (self.owns_gil, self.active) { (_, false) => "idle", (true, true) => "active+gil", (false, true) => "active", } } pub fn format_threadid(&self) -> String { // native threadids in osx are kinda useless, use the pthread id instead #[cfg(target_os="macos")] return format!("{:#X}", self.thread_id); // otherwise use the native threadid if given #[cfg(not(target_os="macos"))] match self.os_thread_id { Some(tid) => format!("{}", tid), None => format!("{:#X}", self.thread_id) } } } /// Returns the line number from a PyCodeObject (given the lasti index from a PyFrameObject) fn get_line_number<C: CodeObject, P: ProcessMemory>(code: &C, lasti: i32, process: &P) -> Result<i32, Error> { let table = copy_bytes(code.line_table(), process).context("Failed to copy line number table")?; Ok(code.get_line_number(lasti, &table)) } fn get_locals<C: CodeObject, F: FrameObject, P: ProcessMemory>(code: &C, frameptr: *const F, frame: &F, process: &P) -> Result<Vec<LocalVariable>, Error> { let local_count = code.nlocals() as usize; let argcount = code.argcount() as usize; let varnames = process.copy_pointer(code.varnames())?; let ptr_size = std::mem::size_of::<*const i32>(); let locals_addr = frameptr as usize + std::mem::size_of_val(frame) - ptr_size; let mut ret = Vec::new(); for i in 0..local_count { let nameptr: *const C::StringObject = process.copy_struct(varnames.address(code.varnames() as usize, i))?; let name = copy_string(nameptr, process)?; let addr: usize = process.copy_struct(locals_addr + i * ptr_size)?; if addr == 0 { continue; } ret.push(LocalVariable{name, addr, arg: i < argcount, repr: None}); } Ok(ret) } impl ProcessInfo { pub fn to_frame(&self) -> Frame { Frame{name: format!("process {}:\"{}\"", self.pid, self.command_line), filename: String::from(""), module: None, short_filename: None, line: 0, locals: None} } } #[cfg(test)] mod tests { use super::*; use remoteprocess::LocalProcess; use crate::python_bindings::v3_7_0::{PyCodeObject}; use crate::python_data_access::tests::to_byteobject; #[test] fn test_get_line_number() { let mut lnotab = to_byteobject(&[0u8, 1, 10, 1, 8, 1, 4, 1]); let code = PyCodeObject{co_firstlineno: 3, co_lnotab: &mut lnotab.base.ob_base.ob_base, ..Default::default()}; let lineno = get_line_number(&code, 30, &LocalProcess).unwrap(); assert_eq!(lineno, 7); } }
// note: item width entails getfunc return and suffix. Excludes prefix // note: set width to 0 for variadic width baritem items[] = { /* function prefix phrase item width suffix phrase */ { getsong, "♫[ ", 0, "] " }, { getvolm, "🔈[", 3, "%] " }, { getvmst, "VM [", 8, "] "}, { getbatlvl, "🔋[", 3, "%] " }, { getramu, "内存[", 5, "GB/" }, { getramf, "", 5, "GB] " }, { getramp, "(", 3, "%) " }, { getnetu, "网[", 5, "上" }, { getnetd, " ", 5, "下] " }, { gettime, "时[", 21, "]" }, }; const char *net_pref = "wlp"; const char *alsa_device = "default"; // if using other than default, it will be something of the form "hw:0" const char *vmconn = "qemu:///system"; const char *vmname = "win10";
<filename>spring-creed-example/spring-creed-batch/src/main/java/com/creed/handler/ArchiveTasklet.java package com.creed.handler; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.repeat.RepeatStatus; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.stereotype.Component; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; /** * @className: ArchiveTasklet * @author: Ethan * @date: 14/12/2021 **/ @Component @Slf4j public class ArchiveTasklet implements Tasklet { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { PathMatchingResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver(); Resource[] resources = null; try { resources = patternResolver.getResources("classpath*:*.employee.txt"); } catch (IOException e) { e.printStackTrace(); } log.info("resources size:{}", resources.length); if (resources.length > 0) { File destDir = new File("/tmp/archive"); if (!destDir.exists()) { destDir.mkdirs(); } Path destPath = destDir.toPath(); for (Resource resource : resources) { File file = resource.getFile(); String fileName = String.format("%s_%s", file.getName(), LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMdd"))); // Files.copy(Paths.get(file.toURI()), destPath.resolve(fileName), StandardCopyOption.REPLACE_EXISTING); log.info("###Archive file to path {} ###", destPath); // Files.delete(file.toPath()); } } return RepeatStatus.FINISHED; } }
def execute_onnx(model, input_dict): model = si.infer_shapes(model) graph = model.graph execution_context = dict() for vi in graph.input: new_tensor = valueinfo_to_tensor(vi) execution_context[vi.name] = new_tensor for vi in graph.output: new_tensor = valueinfo_to_tensor(vi) execution_context[vi.name] = new_tensor for vi in graph.value_info: new_tensor = valueinfo_to_tensor(vi) execution_context[vi.name] = new_tensor for t in graph.initializer: execution_context[t.name] = np_helper.to_array(t) for inp_name in input_dict.keys(): if inp_name in execution_context: if execution_context[inp_name].shape == input_dict[inp_name].shape: execution_context[inp_name] = input_dict[inp_name] else: raise Exception( "Shape mismatch for provided input %s: found %s expected %s " % ( inp_name, str(execution_context[inp_name].shape), str(input_dict[inp_name].shape), ) ) else: raise Exception("Provided input not found in graph context: %s" % inp_name) for node in graph.node: execute_node(node, execution_context, graph) output_dict = dict() for out_tensor in graph.output: out_name = out_tensor.name output_dict[out_name] = execution_context[out_name] return output_dict
//***************************************************************************** // // GetStringDescriptor() // // hHubDevice - Handle of the hub device containing the port from which the // String Descriptor will be requested. // // ConnectionIndex - Identifies the port on the hub to which a device is // attached from which the String Descriptor will be requested. // // DescriptorIndex - String Descriptor index. // // LanguageID - Language in which the string should be requested. // //***************************************************************************** PSTRING_DESCRIPTOR_NODE GetStringDescriptor ( HANDLE hHubDevice, ULONG ConnectionIndex, UCHAR DescriptorIndex, USHORT LanguageID ) { BOOL success = 0; ULONG nBytes = 0; ULONG nBytesReturned = 0; UCHAR stringDescReqBuf[sizeof(USB_DESCRIPTOR_REQUEST) + MAXIMUM_USB_STRING_LENGTH]; PUSB_DESCRIPTOR_REQUEST stringDescReq = NULL; PUSB_STRING_DESCRIPTOR stringDesc = NULL; PSTRING_DESCRIPTOR_NODE stringDescNode = NULL; nBytes = sizeof(stringDescReqBuf); stringDescReq = (PUSB_DESCRIPTOR_REQUEST)stringDescReqBuf; stringDesc = (PUSB_STRING_DESCRIPTOR)(stringDescReq+1); memset(stringDescReq, 0, nBytes); stringDescReq->ConnectionIndex = ConnectionIndex; stringDescReq->SetupPacket.wValue = (USB_STRING_DESCRIPTOR_TYPE << 8) | DescriptorIndex; stringDescReq->SetupPacket.wIndex = LanguageID; stringDescReq->SetupPacket.wLength = (USHORT)(nBytes - sizeof(USB_DESCRIPTOR_REQUEST)); success = DeviceIoControl(hHubDevice, IOCTL_USB_GET_DESCRIPTOR_FROM_NODE_CONNECTION, stringDescReq, nBytes, stringDescReq, nBytes, &nBytesReturned, NULL); if (!success) { OOPS(); return NULL; } if (nBytesReturned < 2) { OOPS(); return NULL; } if (stringDesc->bDescriptorType != USB_STRING_DESCRIPTOR_TYPE) { OOPS(); return NULL; } if (stringDesc->bLength != nBytesReturned - sizeof(USB_DESCRIPTOR_REQUEST)) { OOPS(); return NULL; } if (stringDesc->bLength % 2 != 0) { OOPS(); return NULL; } stringDescNode = (PSTRING_DESCRIPTOR_NODE)ALLOC(sizeof(STRING_DESCRIPTOR_NODE) + stringDesc->bLength); if (stringDescNode == NULL) { OOPS(); return NULL; } stringDescNode->DescriptorIndex = DescriptorIndex; stringDescNode->LanguageID = LanguageID; memcpy(stringDescNode->StringDescriptor, stringDesc, stringDesc->bLength); return stringDescNode; }
<commit_msg>Add some mandatory and semi-mandatory dependencies <commit_before>from setuptools import setup, find_packages setup( name='couchforms', version='0.0.4', description='Dimagi Couch Forms for Django', author='Dimagi', author_email='[email protected]', url='http://www.dimagi.com/', install_requires = [ "couchexport" ], packages = find_packages(exclude=['*.pyc']), include_package_data=True ) <commit_after>from setuptools import setup, find_packages setup( name='couchforms', version='0.0.4', description='Dimagi Couch Forms for Django', author='Dimagi', author_email='[email protected]', url='http://www.dimagi.com/', install_requires = [ "couchdbkit", "couchexport", "dimagi-utils", "django", "lxml", "restkit", ], tests_require = [ 'coverage', 'django-coverage', ], packages = find_packages(exclude=['*.pyc']), include_package_data=True )
/** * Performs call with given request specifications. * * @param request any valid {@link MS_HttpRequest}. * @return response object. * @throws IOException if the request could not be executed due to cancellation, * a connectivity problem or timeout. Because networks can fail during an exchange, * it is possible that the remote server accepted the request before the failure. */ public static MS_HttpResponse call(MS_HttpRequest request) throws IOException { Objects.requireNonNull(request, "HTTP request must not be null"); Objects.requireNonNull(request.getUrl(), "URL / hostname must be provided in order to perform HTTP call"); Objects.requireNonNull(request.getMethod(), "HTTP method (GET, POST, PUT, DELETE) must be provided in order to perform HTTP call"); Request.Builder reqBuilder = new Request.Builder(); switch (request.getMethod()) { case GET: if (request.getParameters() != null) { HttpUrl.Builder httpBuilder = Objects.requireNonNull(HttpUrl.parse(request.getUrl())).newBuilder(); for (Map.Entry<String, String> parameters : request.getParameters().entrySet()) { httpBuilder.addQueryParameter(parameters.getKey(), parameters.getValue()); } reqBuilder.url(httpBuilder.build()); } else { reqBuilder.url(request.getUrl()); } reqBuilder.get(); break; case POST: reqBuilder.post(formBody(request)).url(request.getUrl()); break; case PUT: reqBuilder.put(formBody(request)).url(request.getUrl()); break; case DELETE: if (request.getBody() != null) { reqBuilder.delete(formBody(request)); } else if (request.getBodyAsString() != null) { reqBuilder.delete(formBody(request)); } else { reqBuilder.delete(); } reqBuilder.url(request.getUrl()); break; default: throw new MS_BadSetupException("Unsupported HTTP request method " + request.getMethod()); } if (request.getHeaders() != null) { for (Map.Entry<String, List<String>> headers : request.getHeaders().entrySet()) { List<String> headerValues = headers.getValue(); for (String value : headerValues) { reqBuilder.addHeader(headers.getKey(), value); } } } OkHttpClient clientConfigurations = request.getClientConfigurations() != null ? request.getClientConfigurations() : MS_HTTPConnectionConfigurations.DEFAULT_HTTP_CONFIG_FOR_CONNECTION.build(); Response response = clientConfigurations.newCall(reqBuilder.build()).execute(); MS_HttpResponse res = new MS_HttpResponse() .withUrl(request.getUrl()) .withMethod(request.getMethod()) .withResponse(response) .withResponseCode(response.code()); res.initJSONBody(Objects.requireNonNull(response.body()).string()); for (Map.Entry<String, List<String>> headers : response.headers().toMultimap().entrySet()) { List<String> headerValues = headers.getValue(); res.withHeader(headers.getKey(), headerValues.get(0)); } return res; }
use serde::{Deserialize}; use crate::utils::{err, ok, Result}; use crate::messages::*; #[derive(Deserialize, Debug, Clone)] pub struct GUIBundle { pub url: String, pub name: String, pub params: Vec<Value>, pub width: i32, pub height: i32, } #[derive(Deserialize, Debug, Clone)] pub struct Info { pub name: String, pub vendor: String, pub presets: u32, pub parameters: u32, pub inputs: i32, pub outputs: i32, pub midi_inputs: u32, pub midi_outputs: u32, pub id: u32, pub version: u32, pub category: String, pub initial_delay: u32, pub preset_chunks: bool, pub f64_precision: bool, pub silent_when_stopped: bool, } #[derive(Deserialize, Debug, Clone)] pub struct Bundle { pub wasm_url: String, pub gui: GUIBundle, pub info: Info, } impl Bundle { pub fn from_json(data: &str) -> Result<Self> { let bundle : serde_json::Result<Bundle> = serde_json::from_str(data); //println!("{:?}", bundle); bundle.map_or(err(), |b| ok(b)) } } #[derive(Deserialize, Debug, Clone)] pub struct Module { pub name: String, pub json_url: String, } #[derive(Deserialize, Debug, Clone)] pub struct Modules { pub default: String, pub modules: Vec<Module>, } impl Modules { pub fn from_json(data: &str) -> Result<Self> { let modules : serde_json::Result<Modules> = serde_json::from_str(data); modules.map_or(err(), |b| ok(b)) } }
<reponame>Martynas-P/pydht<gh_stars>0 import logging import unittest from unittest.mock import MagicMock, patch from pydht import PyDhtApp, IncomingConnectionHandler, RoutingTable logging.disable(logging.CRITICAL) class TestPyDhtApp(unittest.TestCase): def setUp(self): self.app = PyDhtApp() self.app.incoming_connection_handler = MagicMock(spec=IncomingConnectionHandler) self.app.client_id = 'CLIENT_ID' self.app.routing_table = MagicMock(spec=RoutingTable) def test_handle_ping(self): request = { 't': 'transaction', 'a': { 'id': b'REQUESTOR ID' } } self.app.handle_ping(request, ('localhost', 5000)) expected_response = { 't': 'transaction', 'y': 'r', 'r': { 'id': 'CLIENT_ID' } } self.app.incoming_connection_handler.respond.assert_called_once_with(expected_response, ('localhost', 5000)) self.assertEqual(self.app.routing_table.add_node.call_count, 1) def test_handles_malformed_request(self): request = None self.app.handle_ping(request, ('localhost', 5000)) self.assertFalse(self.app.incoming_connection_handler.respond.called)
//! Returns a list of all files that make up an asset which cannot be automatically regenerated. //! This list includes, for example, the meta-data (cryasset) file, source file, and all data files. //! The list does not include thumbnails. //! The list of independent files is used when moving an asset from a temporary folder to the game project folder. static std::vector<string> GetIndependentFiles(const CAsset* pAsset) { assert(0); std::vector<string> files; return files; }
//package com.ankushgrover.problems; import javafx.util.Pair; import java.util.*; /** * Created by Ankush Grover([email protected]) on 16/09/19 * <p> * http://codeforces.com/problemset/problem/431/B */ public class P117CFDiv2BShowerLine { public static void main(String[] args) { P117CFDiv2BShowerLine obj = new P117CFDiv2BShowerLine(); obj.run2(); } private void run2() { Scanner in = new Scanner(System.in); ArrayList<Integer> list = new ArrayList<>(); list.add(0); list.add(1); list.add(2); list.add(3); list.add(4); List<List<Integer>> perms = permutations(list); int[][] arr = new int[5][5]; for (int i = 0; i < 5; i++) { for (int j = 0; j < 5; j++) { arr[i][j] = in.nextInt(); } } int sum; int max = 0; for (List<Integer> l : perms) { sum = 0; for (int i = 0; i < 5; i++) { for (int j = i + 1; j < 5; j += 2) { sum = sum + arr[l.get(j)][l.get(j - 1)] + arr[l.get(j-1)][l.get(j)]; } } if(max < sum){ max = sum; } } System.out.println(max); } private List<List<Integer>> permutations(List<Integer> s) { if (s.size() == 1) { ArrayList<List<Integer>> list = new ArrayList<>(); List<Integer> l = new ArrayList<>(s); list.add(l); return list; } int first = s.get(0); s.remove(0); List<List<Integer>> perms = permutations(s); List<List<Integer>> result = new ArrayList<>(); for (List<Integer> perm : perms) { List<Integer> temp = new ArrayList<>(perm); for (int i = 0; i <= perm.size(); i++) { temp.add(i, first); result.add(new ArrayList<Integer>(temp)); temp.remove(i); } } return result; } private void run1() { Scanner in = new Scanner(System.in); int[][] arr = new int[5][5]; int[][] ref = new int[5][5]; for (int i = 0; i < 5; i++) { for (int j = 0; j < 5; j++) { arr[i][j] = in.nextInt(); ref[i][j] = arr[i][j]; } } int[] indexes = new int[5]; int x = -1, y = -1; // Find max sum of 2 int sum = 0; for (int i = 0; i < 5; i++) { for (int j = i + 1; j < 5; j++) { if (sum < arr[i][j] + arr[j][i]) { sum = arr[i][j] + arr[j][i]; x = i; y = j; } } } indexes[4] = x; indexes[3] = y; arr[x][y] = -1; arr[y][x] = -1; for (int i = 4; i > 2; i--) { Pair<Integer, Integer> p1 = findMax(arr[indexes[i]]); Pair<Integer, Integer> p2 = findMax(arr[indexes[i - 1]]); if (p2.getValue() < p1.getValue()) { int temp = indexes[i]; indexes[i] = indexes[i - 1]; indexes[i - 1] = temp; indexes[i - 2] = p1.getKey(); } else indexes[i - 2] = p2.getKey(); } sum = 0; for (int i = 0; i < 5; i++) { for (int j = i + 1; j < 5; j += 2) { sum = sum + ref[indexes[j]][indexes[j - 1]] + ref[indexes[j - 1]][indexes[j]]; } } System.out.print(sum); } // Pair of index and max value private Pair<Integer, Integer> findMax(int[] arr) { int max = 0; int index = -1; for (int i = 0; i < arr.length; i++) { if (max <= arr[i]) { max = arr[i]; index = i; } } arr[index] = -1; return new Pair<>(index, max); } private void run() { Scanner in = new Scanner(System.in); int[][] arr = new int[5][5]; for (int i = 0; i < 5; i++) { for (int j = 0; j < 5; j++) { arr[i][j] = in.nextInt(); } } TreeSet<Integer> set = new TreeSet<>(); for (int i = 0; i < 5; i++) { for (int j = i + 1; j < 5; j++) { set.add(arr[i][j] + arr[j][i]); } } int[] nums = new int[5]; int index = 4; Iterator<Integer> iterator = set.descendingIterator(); while (iterator.hasNext() && index >= 0) { nums[index--] = iterator.next(); } int sum = 0; for (int i = 0; i < 5; i++) { for (int j = i + 1; j < 5; j += 2) { sum = sum + nums[j] + nums[j - 1]; } } System.out.print(sum); } }
Role of coherent nuclear motion in the ultrafast intersystem crossing of ruthenium complexes. Ultrafast intersystem crossing (ISC) in transition metal complexes leads to a long-lived active state with a high yield, which leads to efficient light energy conversion. The detailed mechanism of ISC may lead to a rational molecular design of superior transition metal complexes. Coherent nuclear wave packets observed in femtosecond time-resolved spectroscopies provide important information on the excited-state dynamics. In particular, analyzing the nuclear wave packets in both the reactant and the product may unveil the molecular dynamics of an ultrafast reaction. In this study, experimental evidence proving the reaction coordinates of the ultrafast ISC of ruthenium(ii) complexes is presented using coherent vibrational spectroscopy with a quantum chemical simulation of coherent vibrational motion. We observed vibrational modes strongly coupled to the ISC, whose vibrational coherences undergo remarkable attenuation after the ISC. The coupled modes contain metal-ligand stretching or symmetry breaking components, and the faster ISC rates of lower-symmetry ruthenium(ii) complexes support the significance of the latter.
n,l=map(int,raw_input().split()) arr=map(float,raw_input().split()) arr.sort() arr2=[] arr2.append(-arr[0]) arr2.extend(arr) arr2.append(l+(l-arr[-1])) arr=arr2 radius=0.0 for i in range(1,len(arr)-1): radius=max(radius,max((arr[i]-arr[i-1])/2.0,(arr[i+1]-arr[i])/2.0)) print radius
// Copyright (C) 2019 Orange // // This software is distributed under the terms and conditions of the 'Apache License 2.0' // license which can be found in the file 'License.txt' in this package distribution // or at 'http://www.apache.org/licenses/LICENSE-2.0'. package v1 import ( "context" "errors" "fmt" grpc_middleware "optisam-backend/common/optisam/middleware/grpc" "optisam-backend/common/optisam/token/claims" v1 "optisam-backend/license-service/pkg/api/v1" repo "optisam-backend/license-service/pkg/repository/v1" "optisam-backend/license-service/pkg/repository/v1/mock" "testing" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" ) func Test_licenseServiceServer_LicensesForEquipAndMetric(t *testing.T) { coresAttr := &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, Name: "numCores", } cpuAttr := &repo.Attribute{ ID: "1B", Type: repo.DataTypeInt, Name: "numCPU", } coreFactorAttr := &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, Name: "coreFactor", } coresAttrSim := &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 1, Name: "numCores", } cpuAttrSim := &repo.Attribute{ ID: "1B", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 2, IntValOld: 1, Name: "numCPU", } coreFactorAttrSim := &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 0.25, FloatValOld: 1, Name: "coreFactor", } serverEquipment := &repo.EquipmentType{ ID: "2", Type: "Server", ParentID: "3", Attributes: []*repo.Attribute{ coresAttr, cpuAttr, coreFactorAttr, }, } clusterEquipment := &repo.EquipmentType{ ID: "3", Type: "Cluster", ParentID: "4", } eqTypeTree := []*repo.EquipmentType{ &repo.EquipmentType{ ID: "1", Type: "Partition", ParentID: "2", }, serverEquipment, clusterEquipment, &repo.EquipmentType{ ID: "4", Type: "Vcenter", ParentID: "5", }, &repo.EquipmentType{ ID: "5", Type: "Datacenter", }, } ctx := grpc_middleware.AddClaims(context.Background(), &claims.Claims{ UserID: "<EMAIL>", Role: "Admin", Socpes: []string{"Scope1", "Scope2", "Scope3"}, }) var mockCtrl *gomock.Controller var rep repo.License type args struct { ctx context.Context req *v1.LicensesForEquipAndMetricRequest } tests := []struct { name string s *licenseServiceServer args args setup func() want *v1.LicensesForEquipAndMetricResponse wantErr bool }{ {name: "SUCCESS - For OPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", }, &repo.ProductData{ Name: "Oracle2", }, &repo.ProductData{ Name: "Oracle3", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(350), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(100), 100.5, nil) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.processor.standard", OldLicences: int64(350), NewLicenses: int64(351), Delta: int64(1), Product: &v1.Product{ Name: "Oracle1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.processor.standard", OldLicences: int64(350), NewLicenses: int64(351), Delta: int64(1), Product: &v1.Product{ Name: "Oracle2", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.processor.standard", OldLicences: int64(350), NewLicenses: int64(351), Delta: int64(1), Product: &v1.Product{ Name: "Oracle3", }, }, }, }, }, {name: "SUCCESS OPS - Atleast one attribute is not simulable", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 6, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: false, DataType: v1.DataTypes_INT, // Val: &v1.Attribute_IntVal{ // IntVal: 1, // }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", }, &repo.ProductData{ Name: "Oracle2", }, &repo.ProductData{ Name: "Oracle3", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(350), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(100), 100.5, nil) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.processor.standard", OldLicences: int64(350), NewLicenses: int64(351), Delta: int64(1), Product: &v1.Product{ Name: "Oracle1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.processor.standard", OldLicences: int64(350), NewLicenses: int64(351), Delta: int64(1), Product: &v1.Product{ Name: "Oracle2", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.processor.standard", OldLicences: int64(350), NewLicenses: int64(351), Delta: int64(1), Product: &v1.Product{ Name: "Oracle3", }, }, }, }, }, {name: "FAILURE - For OPS metric - cannot simulate OPS metric for types other than base type", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Cluster", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) }, wantErr: true, }, {name: "FAILURE-cannot fetch OPS metrics", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return(nil, errors.New("Internal")) }, wantErr: true, }, {name: "FAILURE-metric does not exist", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "windows.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) }, wantErr: true, }, {name: "FAILURE-cannot fetch computed metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "2", BaseEqTypeID: "1", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) }, wantErr: true, }, {name: "FAILURE-Simulation not allowed for equipment other than base equipment type", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "3", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) }, wantErr: true, }, { name: "FAILURE-equipment does not exist", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return(nil, repo.ErrNodeNotFound) }, wantErr: true, }, { name: "FAILURE-can not fetch equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return(nil, errors.New("Internal")) }, wantErr: true, }, {name: "FAILURE-cannot fetch products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(nil, errors.New("Internal")) }, wantErr: true, }, {name: "FAILURE- for OPS metric - no data for products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(nil, repo.ErrNoData) }, want: &v1.LicensesForEquipAndMetricResponse{}, }, {name: "FAILURE-cannot fetch old licenses for OPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", }, &repo.ProductData{ Name: "Oracle2", }, &repo.ProductData{ Name: "Oracle3", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(0), errors.New("Internal")) }, wantErr: true, }, { name: "FAILURE-cannot fetch new licenses for OPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricOPS(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricOPS{ &repo.MetricOPS{ ID: "1M", Name: "oracle.processor.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", }, &repo.ProductData{ Name: "Oracle2", }, &repo.ProductData{ Name: "Oracle3", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(350), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ Name: "oracle.processor.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(0), float64(0), errors.New("Internal")) }, wantErr: true, }, { name: "FAILURE-Metric is not supported for simulation", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: "NoNameMetric", MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) }, wantErr: true, }, {name: "SUCCESS - For NUP metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", Swidtag: "O1", }, &repo.ProductData{ Name: "Oracle2", Swidtag: "O2", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(2000), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(1000), 1000.5, nil) gomock.InOrder( mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O1", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: coresAttrSim, NumCPUAttr: cpuAttrSim, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.User{ &repo.User{ ID: "1", UserID: "U1", UserCount: int64(100000), }, &repo.User{ ID: "2", UserID: "U2", UserCount: int64(200000), }, }, nil), mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O2", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: coresAttrSim, NumCPUAttr: cpuAttrSim, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.User{ &repo.User{ ID: "3", UserID: "U3", UserCount: int64(100000), }, &repo.User{ ID: "4", UserID: "U4", UserCount: int64(200000), }, }, nil), ) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.nup.standard", OldLicences: int64(400000), NewLicenses: int64(400200), Delta: int64(200), Product: &v1.Product{ Name: "Oracle1", SwidTag: "O1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.nup.standard", OldLicences: int64(400000), NewLicenses: int64(400200), Delta: int64(200), Product: &v1.Product{ Name: "Oracle2", SwidTag: "O2", }, }, }, }, }, {name: "SUCCESS NUP - Atleast one attribute is non simulable", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 6, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: false, DataType: v1.DataTypes_INT, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", Swidtag: "O1", }, &repo.ProductData{ Name: "Oracle2", Swidtag: "O2", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(2000), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(1000), 1000.5, nil) gomock.InOrder( mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O1", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: false, IntValOld: 1, Name: "numCores", }, NumCPUAttr: &repo.Attribute{ ID: "1B", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 6, IntValOld: 1, Name: "numCPU", }, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.User{ &repo.User{ ID: "1", UserID: "U1", UserCount: int64(100000), }, &repo.User{ ID: "2", UserID: "U2", UserCount: int64(200000), }, }, nil), mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O2", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: false, IntValOld: 1, Name: "numCores", }, NumCPUAttr: &repo.Attribute{ ID: "1B", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 6, IntValOld: 1, Name: "numCPU", }, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.User{ &repo.User{ ID: "3", UserID: "U3", UserCount: int64(100000), }, &repo.User{ ID: "4", UserID: "U4", UserCount: int64(200000), }, }, nil), ) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.nup.standard", OldLicences: int64(400000), NewLicenses: int64(400200), Delta: int64(200), Product: &v1.Product{ Name: "Oracle1", SwidTag: "O1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.nup.standard", OldLicences: int64(400000), NewLicenses: int64(400200), Delta: int64(200), Product: &v1.Product{ Name: "Oracle2", SwidTag: "O2", }, }, }, }, }, {name: "SUCCESS - For NUP metric product does not have user nodes", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", Swidtag: "O1", }, &repo.ProductData{ Name: "Oracle2", Swidtag: "O2", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(2000), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(1000), 1000.5, nil) gomock.InOrder( mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O1", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: coresAttrSim, NumCPUAttr: cpuAttrSim, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.User{ &repo.User{ ID: "1", UserID: "U1", UserCount: int64(100000), }, &repo.User{ ID: "2", UserID: "U2", UserCount: int64(200000), }, }, nil), mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O2", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: coresAttrSim, NumCPUAttr: cpuAttrSim, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return(nil, repo.ErrNoData), ) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.nup.standard", OldLicences: 400000, NewLicenses: 400200, Delta: 200, Product: &v1.Product{ Name: "Oracle1", SwidTag: "O1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "oracle.nup.standard", OldLicences: 200000, NewLicenses: 200100, Delta: 100, Product: &v1.Product{ Name: "Oracle2", SwidTag: "O2", }, }, }, }, }, {name: "Failure - Getting NUP metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return(nil, errors.New("test error")) }, wantErr: true, }, {name: "Failure - For NUP metric not found", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard_xyz", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) }, wantErr: true, }, {name: "Failure - For NUP metric cannot get computed nup metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "10", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) }, wantErr: true, }, {name: "FAILURE - For NUP metric - cannot simulate NUP metric for types other than base type", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Cluster", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) }, wantErr: true, }, {name: "Failure - For NUP metric equipment not found", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return(nil, repo.ErrNodeNotFound) }, wantErr: true, }, {name: "Failure - For NUP metric failed to fetch parents", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return(nil, errors.New("test error")) }, wantErr: true, }, {name: "Failure - For NUP metric equipment not found", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return(nil, repo.ErrNodeNotFound) }, wantErr: true, }, {name: "FAILURE - For NUP metric - cannot fetch products", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return(nil, errors.New("Internal")) }, wantErr: true, }, {name: "FAILURE - For NUP metric - no data for products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return(nil, repo.ErrNoData) }, want: &v1.LicensesForEquipAndMetricResponse{}, }, {name: "Failure - For NUP metric failed in getting old licenses", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", Swidtag: "O1", }, &repo.ProductData{ Name: "Oracle2", Swidtag: "O2", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(0), errors.New("test error")) }, wantErr: true, }, {name: "failure - For NUP metric getting simulated licenses", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", Swidtag: "O1", }, &repo.ProductData{ Name: "Oracle2", Swidtag: "O2", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(2000), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(0), float64(0), errors.New("test error")) }, wantErr: true, }, {name: "Failure - For NUP metric getting user nodes for product", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOracleNUPStandard.String(), MetricName: "oracle.nup.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 1, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricNUP(ctx, []string{"Scope1"}).Times(1).Return([]*repo.MetricNUPOracle{ &repo.MetricNUPOracle{ ID: "1M", Name: "oracle.nup.standard", NumCoreAttrID: "1A", NumCPUAttrID: "1B", CoreFactorAttrID: "1C", StartEqTypeID: "1", BaseEqTypeID: "2", AggerateLevelEqTypeID: "3", EndEqTypeID: "5", NumberOfUsers: 100, }, }, nil) mockLicense.EXPECT().ParentsHirerachyForEquipment(ctx, "e1ID", "Server", uint8(4), []string{"Scope1"}).Times(1).Return( &repo.Equipment{ Type: "Server", EquipID: "e1ID", Parent: &repo.Equipment{ Type: "Cluster", EquipID: "e2ID", Parent: &repo.Equipment{ Type: "Vcenter", EquipID: "e3ID", Parent: &repo.Equipment{ Type: "Datacenter", EquipID: "e4ID", Parent: nil, }, }, }, }, nil) mockLicense.EXPECT().ProductsForEquipmentForMetricOracleNUPStandard(ctx, "e4ID", "Datacenter", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", Swidtag: "O1", }, &repo.ProductData{ Name: "Oracle2", Swidtag: "O2", }, }, nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandard(ctx, "e4ID", "Datacenter", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(2000), nil) mockLicense.EXPECT().ComputedLicensesForEquipmentForMetricOracleProcessorStandardAll(ctx, "e2ID", "Cluster", &repo.MetricOPSComputed{ EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttr, NumCoresAttr: coresAttr, NumCPUAttr: cpuAttr, }, []string{"Scope1"}).Times(1).Return(int64(3000), float64(0), nil) gomock.InOrder( mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O1", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: coresAttrSim, NumCPUAttr: cpuAttrSim, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return([]*repo.User{ &repo.User{ ID: "1", UserID: "U1", UserCount: int64(100000), }, &repo.User{ ID: "2", UserID: "U2", UserCount: int64(200000), }, }, nil), mockLicense.EXPECT().UsersForEquipmentForMetricOracleNUP(ctx, "e4ID", "Datacenter", "O2", uint8(5), &repo.MetricNUPComputed{ Name: "oracle.nup.standard", EqTypeTree: eqTypeTree, BaseType: serverEquipment, AggregateLevel: clusterEquipment, CoreFactorAttr: coreFactorAttrSim, NumCoresAttr: coresAttrSim, NumCPUAttr: cpuAttrSim, NumOfUsers: 100, }, []string{"Scope1"}).Times(1).Return(nil, errors.New("no data")), ) }, wantErr: true, }, {name: "FAILURE-cannot find claims in context", args: args{ ctx: context.Background(), req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() {}, wantErr: true, }, {name: "FAILURE-requested scopes are outside the scope of user", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope4", }, }, setup: func() {}, wantErr: true, }, {name: "FAILURE-cannot fetch equipment types", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(nil, errors.New("Internal")) }, wantErr: true, }, {name: "FAILURE - equipment type does not exist", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server1", EquipId: "e1ID", MetricType: repo.MetricOPSOracleProcessorStandard.String(), MetricName: "oracle.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 0.25, }, }, &v1.Attribute{ ID: "1B", Name: "numCPU", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 2, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) }, wantErr: true, }, {name: "SUCCESS - For IPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "ibm.pvu.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return([]*repo.MetricIPS{ &repo.MetricIPS{ ID: "1M", Name: "ibm.pvu.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) mockLicense.EXPECT().ProductsForEquipmentForMetricIPSStandard(ctx, "e1ID", "Server", uint8(1), &repo.MetricIPSComputed{ Name: "ibm.pvu.standard", BaseType: serverEquipment, CoreFactorAttr: &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 1.25, FloatValOld: 1.5, Name: "coreFactor", }, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 5, Name: "numCores", }, }, []string{"Scope1"}).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", }, &repo.ProductData{ Name: "Oracle2", }, &repo.ProductData{ Name: "Oracle3", }, }, nil).Times(1) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "ibm.pvu.standard", OldLicences: 7, NewLicenses: 3, Delta: -4, Product: &v1.Product{ Name: "Oracle1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "ibm.pvu.standard", OldLicences: 7, NewLicenses: 3, Delta: -4, Product: &v1.Product{ Name: "Oracle2", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "ibm.pvu.standard", OldLicences: 7, NewLicenses: 3, Delta: -4, Product: &v1.Product{ Name: "Oracle3", }, }, }, }, }, {name: "FAILURE - For IPS metric - cannot fetch IPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "ibm.pvu.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return(nil, errors.New("Internal")).Times(1) }, wantErr: true, }, {name: "FAILURE - For IPS metric - metric does not exist", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "abc", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return([]*repo.MetricIPS{ &repo.MetricIPS{ ID: "1M", Name: "ibm.pvu.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) }, wantErr: true, }, {name: "FAILURE - For IPS metric -cannot compute IPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "ibm.pvu.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return([]*repo.MetricIPS{ &repo.MetricIPS{ ID: "1M", Name: "ibm.pvu.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "6", }, }, nil).Times(1) }, wantErr: true, }, {name: "FAILURE - For IPS metric - cannot simulate IPS metric for types other than base type", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Cluster", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "ibm.pvu.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return([]*repo.MetricIPS{ &repo.MetricIPS{ ID: "1M", Name: "ibm.pvu.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) }, wantErr: true, }, {name: "FAILURE - For IPS metric - cannot fetch products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "ibm.pvu.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return([]*repo.MetricIPS{ &repo.MetricIPS{ ID: "1M", Name: "ibm.pvu.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) mockLicense.EXPECT().ProductsForEquipmentForMetricIPSStandard(ctx, "e1ID", "Server", uint8(1), &repo.MetricIPSComputed{ Name: "ibm.pvu.standard", BaseType: serverEquipment, CoreFactorAttr: &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 1.25, FloatValOld: 1.5, Name: "coreFactor", }, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 5, Name: "numCores", }, }, []string{"Scope1"}).Return(nil, errors.New("Internal")).Times(1) }, wantErr: true, }, {name: "FAILURE - For IPS metric - no data for products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricIPSIbmPvuStandard.String(), MetricName: "ibm.pvu.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricIPS(ctx, []string{"Scope1"}).Return([]*repo.MetricIPS{ &repo.MetricIPS{ ID: "1M", Name: "ibm.pvu.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) mockLicense.EXPECT().ProductsForEquipmentForMetricIPSStandard(ctx, "e1ID", "Server", uint8(1), &repo.MetricIPSComputed{ Name: "ibm.pvu.standard", BaseType: serverEquipment, CoreFactorAttr: &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 1.25, FloatValOld: 1.5, Name: "coreFactor", }, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 5, Name: "numCores", }, }, []string{"Scope1"}).Return(nil, repo.ErrNoData).Times(1) }, want: &v1.LicensesForEquipAndMetricResponse{}, }, {name: "SUCCESS - For SPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "sag.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return([]*repo.MetricSPS{ &repo.MetricSPS{ ID: "1M", Name: "sag.processor.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) mockLicense.EXPECT().ProductsForEquipmentForMetricSAGStandard(ctx, "e1ID", "Server", uint8(1), &repo.MetricSPSComputed{ Name: "sag.processor.standard", BaseType: serverEquipment, CoreFactorAttr: &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 1.25, FloatValOld: 1.5, Name: "coreFactor", }, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 5, Name: "numCores", }, }, []string{"Scope1"}).Return([]*repo.ProductData{ &repo.ProductData{ Name: "Oracle1", }, &repo.ProductData{ Name: "Oracle2", }, &repo.ProductData{ Name: "Oracle3", }, }, nil).Times(1) }, want: &v1.LicensesForEquipAndMetricResponse{ Licenses: []*v1.ProductLicenseForEquipAndMetric{ &v1.ProductLicenseForEquipAndMetric{ MetricName: "sag.processor.standard", OldLicences: 7, NewLicenses: 3, Delta: -4, Product: &v1.Product{ Name: "Oracle1", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "sag.processor.standard", OldLicences: 7, NewLicenses: 3, Delta: -4, Product: &v1.Product{ Name: "Oracle2", }, }, &v1.ProductLicenseForEquipAndMetric{ MetricName: "sag.processor.standard", OldLicences: 7, NewLicenses: 3, Delta: -4, Product: &v1.Product{ Name: "Oracle3", }, }, }, }, }, {name: "FAILURE - For SPS metric - cannot fetch SPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "sag.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return(nil, errors.New("Internal")).Times(1) }, wantErr: true, }, {name: "FAILURE - For SPS metric - metric does not exist", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "abc", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return([]*repo.MetricSPS{ &repo.MetricSPS{ ID: "1M", Name: "sag.processor.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) }, wantErr: true, }, {name: "FAILURE - For SPS metric -cannot compute SPS metric", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "sag.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return([]*repo.MetricSPS{ &repo.MetricSPS{ ID: "1M", Name: "sag.processor.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "6", }, }, nil).Times(1) }, wantErr: true, }, {name: "FAILURE - For SPS metric - cannot simulate SPS metric for types other than base type", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Cluster", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "sag.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return([]*repo.MetricSPS{ &repo.MetricSPS{ ID: "1M", Name: "sag.processor.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) }, wantErr: true, }, {name: "FAILURE - For SPS metric - cannot fetch products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "sag.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return([]*repo.MetricSPS{ &repo.MetricSPS{ ID: "1M", Name: "sag.processor.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) mockLicense.EXPECT().ProductsForEquipmentForMetricSAGStandard(ctx, "e1ID", "Server", uint8(1), &repo.MetricSPSComputed{ Name: "sag.processor.standard", BaseType: serverEquipment, CoreFactorAttr: &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 1.25, FloatValOld: 1.5, Name: "coreFactor", }, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 5, Name: "numCores", }, }, []string{"Scope1"}).Return(nil, errors.New("Internal")).Times(1) }, wantErr: true, }, {name: "FAILURE - For SPS metric - no data for products for equipment", args: args{ ctx: ctx, req: &v1.LicensesForEquipAndMetricRequest{ EquipType: "Server", EquipId: "e1ID", MetricType: repo.MetricSPSSagProcessorStandard.String(), MetricName: "sag.processor.standard", Attributes: []*v1.Attribute{ &v1.Attribute{ ID: "1C", Name: "coreFactor", Simulated: true, DataType: v1.DataTypes_FLOAT, Val: &v1.Attribute_FloatVal{ FloatVal: 1.25, }, OldVal: &v1.Attribute_FloatValOld{ FloatValOld: 1.5, }, }, &v1.Attribute{ ID: "1A", Name: "numCores", Simulated: true, DataType: v1.DataTypes_INT, Val: &v1.Attribute_IntVal{ IntVal: 3, }, OldVal: &v1.Attribute_IntValOld{ IntValOld: 5, }, }, }, Scope: "Scope1", }, }, setup: func() { mockCtrl = gomock.NewController(t) mockLicense := mock.NewMockLicense(mockCtrl) rep = mockLicense mockLicense.EXPECT().EquipmentTypes(ctx, []string{"Scope1"}).Times(1).Return(eqTypeTree, nil) mockLicense.EXPECT().ListMetricSPS(ctx, []string{"Scope1"}).Return([]*repo.MetricSPS{ &repo.MetricSPS{ ID: "1M", Name: "sag.processor.standard", NumCoreAttrID: "1A", CoreFactorAttrID: "1C", BaseEqTypeID: "2", }, }, nil).Times(1) mockLicense.EXPECT().ProductsForEquipmentForMetricSAGStandard(ctx, "e1ID", "Server", uint8(1), &repo.MetricSPSComputed{ Name: "sag.processor.standard", BaseType: serverEquipment, CoreFactorAttr: &repo.Attribute{ ID: "1C", Type: repo.DataTypeFloat, IsSimulated: true, FloatVal: 1.25, FloatValOld: 1.5, Name: "coreFactor", }, NumCoresAttr: &repo.Attribute{ ID: "1A", Type: repo.DataTypeInt, IsSimulated: true, IntVal: 3, IntValOld: 5, Name: "numCores", }, }, []string{"Scope1"}).Return(nil, repo.ErrNoData).Times(1) }, want: &v1.LicensesForEquipAndMetricResponse{}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.setup() s := NewLicenseServiceServer(rep) got, err := s.LicensesForEquipAndMetric(tt.args.ctx, tt.args.req) if (err != nil) != tt.wantErr { t.Errorf("licenseServiceServer.LicensesForEquipAndMetric() error = %v, wantErr %v", err, tt.wantErr) return } if !tt.wantErr { compareLicensesForEquipAndMetricResponse(t, "LicensesForEquipAndMetric", tt.want, got) } }) } } func compareLicensesForEquipAndMetricResponse(t *testing.T, name string, exp, act *v1.LicensesForEquipAndMetricResponse) { if exp == nil && act == nil { return } if exp == nil { assert.Nil(t, act, "attribute is expected to be nil") } for i := range exp.Licenses { compareLicensesForEquipAndMetric(t, fmt.Sprintf("%s Licenses[%d]", name, i), exp.Licenses[i], act.Licenses[i]) } } func compareLicensesForEquipAndMetric(t *testing.T, name string, exp, act *v1.ProductLicenseForEquipAndMetric) { assert.Equalf(t, exp.MetricName, act.MetricName, "%s.MetricName are not same", name) assert.Equalf(t, exp.OldLicences, act.OldLicences, "%s.OldLicences are not same", name) assert.Equalf(t, exp.NewLicenses, act.NewLicenses, "%s.NewLicenses are not same", name) assert.Equalf(t, exp.Delta, act.Delta, "%s.Delta are not same", name) assert.Equalf(t, exp.Product, act.Product, "%s.Product are not same", name) }
import { api } from './index' export default api('Security', { SecItemCopyMatching: ['pointer', ['pointer', 'pointer']], SecItemDelete: ['pointer', ['pointer']], SecAccessControlGetConstraints: ['pointer', ['pointer']], })
module minerva { export interface IBrush { isTransparent(): boolean; setupBrush(ctx: CanvasRenderingContext2D, region: Rect); toHtml5Object(): any; } /* function isBrushTransparent (brush: IBrush) { if (!brush) return true; if (brush instanceof Media.SolidColorBrush) return (<Media.SolidColorBrush>brush).Color.A < 1.0; if (brush instanceof Media.LinearGradientBrush) { var enumerator = (<Media.LinearGradientBrush>brush).GradientStops.getEnumerator(); while (enumerator.moveNext()) { if (enumerator.current.Color.A < 1.0) return true; } return false; } return true; } */ export class FakeBrush implements IBrush { constructor (public raw: any) { } isTransparent (): boolean { return false; } setupBrush (ctx: CanvasRenderingContext2D, region: Rect): any { } toHtml5Object (): any { return this.raw; } } }
Right Paraduodenal Hernia: A Rare Cause of Small Bowel Strangulation Internal hernias are a rare cause of intestinal obstruction. Among the internal hernias, left paraduodenal hernia is the most typical type followed by the right paraduodenal hernia. It is impossible to make a clinical diagnosis of internal hernia, as there are no specific symptoms or physical signs. A high degree of suspicion is required, and an accurate diagnosis can be made using cross-sectional imaging of the abdomen like a Computed Tomography (CT) scan or Magnetic Resonance Imaging (MRI) scan. In this case report, we present our experience in managing a patient who had a right paraduodenal hernia with small bowel strangulation. We present this case report to highlight the importance of considering internal hernias like right paraduodenal hernia in the differential diagnosis of intestinal obstruction. Introduction Internal hernia is an abnormal protrusion of the abdominal viscera through a congenital or acquired defect in the peritoneum or mesentery. Internal hernias are a rare cause of intestinal obstruction. Acquired internal hernias usually occur following surgeries like Roux-en-Y gastric bypass (RYGB). In Congenital type, the herniation may either occur through normal foramina or recesses like the foramen of Winslow or through abnormal defects that arise due to disorder in the bowel rotation and fixation. Types of internal hernia include: Left paraduodenal hernia; Right paraduodenal hernia; Transmesenteric hernia; Foramen of Winslow hernia; Pericaecal hernia; Transomental hernia; Sigmoid mesocolon related hernia; Supravesical hernia . The left paraduodenal fossa (Fossa of Landzert) hernia is the most common type, followed by the right paraduodenal fossa (fossa of Waldeyer) hernia . The right paraduodenal hernia occurs due to abnormality in the second stage of embryonic midgut rotation. Although congenital, most patients are entirely asymptomatic and diagnosed incidentally during abdominal surgery or abdominal imaging . The clinical presentation in symptomatic patients can vary from recurrent episodes of abdominal pain to acute intestinal obstruction or strangulation . It is impossible to make a clinical diagnosis of right paraduodenal hernia as there are no specific symptoms or signs. Small bowel contrast studies may be helpful, but a Computed Tomography (CT) scan with oral and intravenous contrast is the best modality to establish the diagnosis . Although right paraduodenal hernia accounts for only 0.2 to 0.9% of all intestinal obstruction cases, the morbidity and mortality are high in patients with an acute presentation . In this case report, we present our experience in managing a patient of right paraduodenal hernia with small bowel strangulation. Case Presentation A 29-year-old male patient presented to our casualty with sudden onset diffuse abdominal pain, abdominal distension, and bilious vomiting. He had no history of previous abdominal surgery. He had few episodes of abdominal pain and vomiting in the past, which resolved with conservative management. He had no known comorbid illnesses. He underwent an ultrasound scan of the abdomen during one of the episodes, which was inconclusive. At presentation, he was dehydrated and tachycardic but not hypotensive. Abdominal examination revealed diffuse tenderness and guarding. Blood investigations revealed a leukocytosis. The serum amylase, lipase, renal and liver function tests were within normal limits. A CT scan of the abdomen with intravenous contrast was done, which showed a cluster of small bowel loops present to the right side of the duodenum, and there was no contrast enhancement in some small bowel loops (Figure 1). CT scan -Computed Tomography scan A diagnosis of right paraduodenal hernia with small bowel strangulation was made, and an immediate laparotomy was performed. During surgery, we noted that the small bowel loops were ischemic. The proximal jejunum was seen entering into a hernia sac located posterior to the ascending and transverse colon. The caecum and ascending colon were mobilized, the sac was opened on the lateral aspect near the ascending colon, and the small bowel loops were reduced. The small and large bowel were returned to their regular anatomical positions. After reducing the bowel to their anatomical position and applying warm laparotomy pads, the bowel loops became viable, and no bowel resection was required. He had an uneventful postoperative recovery. At a review visit in the outpatient department four weeks after surgery, he had no complications. Discussion Paraduodenal hernias are the most common type of internal hernia, accounting for approximately 53% of internal hernia cases . Left paraduodenal hernia is the most common type . Right paraduodenal hernia is a congenital disorder that occurs due to abnormality in the midgut rotation and fixation. The primitive midgut, which lies outside the abdominal cavity, is divided into pre-arterial and post-arterial segments by the superior mesenteric artery as its axis. The pre-arterial segment forms the distal duodenum, jejunum and proximal ileum. The distal ileum, caecum, ascending colon, and proximal transverse colon develops from the post-arterial segment. As the midgut loops return into the abdominal cavity, they undergo a 270°c ounterclockwise rotation. The pre-arterial segment returns first, followed by the post-arterial segment. In the Right paraduodenal hernia, the pre-arterial segment arrests in the second stage of rotation and continues to remain in the right side of the abdominal cavity. The post-arterial segment rotates usually, and its mesentery fuses to the posterior abdominal wall, entrapping the pre-arterial bowel loops in a peritoneal sac . The superior mesenteric artery, middle colic artery, and ileocolic artery lies on the sac's anterior surface, on the medial aspect . Right paraduodenal hernia is more common in males and mostly presents in the fourth to sixth decade. The earliest age of presentation documented in published literature is in a 1-week old neonate . They are mostly asymptomatic and may be diagnosed incidentally during surgery. Symptoms can vary from recurrent episodes of abdominal pain and vomiting to intestinal obstruction. A CT scan is the best method to establish the diagnosis . A high degree of suspicion is required while interpreting the CT images. In a case of right paraduodenal hernia, the CT scan will show a cluster of small bowel loops to the right of the duodenum, behind the superior mesenteric vessels . Sometimes the trapped bowel loops can be mistaken for a mass lesion prompting a biopsy, which can cause catastrophic complications. Hence it is essential to perform a CT scan after administering oral contrast, especially in patients who do not have clinical findings suggestive of intestinal obstruction. Surgical intervention is required if the diagnosis is established conclusively, even in asymptomatic patients because of the high risk of intestinal obstruction and strangulation. There is a 50% risk of bowel incarceration in the untreated paraduodenal hernia . Both open and laparoscopic approaches have been described . Care should be taken while opening the sac's neck to avoid injury to the superior mesenteric vessels and its branches. The morbidity and mortality are high in patients who require bowel resection. With the increased availability of CT scans, a more significant number of asymptomatic patients may be diagnosed incidentally. Conclusions To conclude, a CT scan should be considered in evaluating a patient presenting with recurrent episodes of abdominal pain. There should be a high degree of suspicion for an internal hernia while interpreting the CT images of such patients. Elective surgery, preferably by laparoscopic approach, is recommended to prevent the occurrence of complications, even in incidentally diagnosed patients. Additional Information Disclosures Human subjects: Consent was obtained by all participants in this study. Conflicts of interest: In compliance with the ICMJE uniform disclosure form, all authors declare the following: Payment/services info: All authors have declared that no financial support was received from any organization for the submitted work. Financial relationships: All authors have declared that they have no financial relationships at present or within the previous three years with any organizations that might have an interest in the submitted work. Other relationships: All authors have declared that there are no other relationships or activities that could appear to have influenced the submitted work.
/** * Reveal any emplacement in the masked word matching the letter. * * @param c the letter * * @return the number of match */ public int matches(char c) { int matches = 0; for (int i = 0; i < word.length; i++) { if (word[i] == c) { mask[i] = c; matches++; } } return matches; }
def parameters(*keys): def wrapper(url): query = parse_qs(urlsplit(url).query) return '-'.join(s for key in keys for value in query[key] for s in [key, value]) return wrapper
<gh_stars>0 import {EventEmitter} from 'events'; import * as net from 'net'; import {Socket} from 'net'; import {EmitEvent} from "./constants"; import {Protocol} from "./protocol"; export class Transport extends EventEmitter { private socket: Socket; private readonly port: number; constructor(port: number) { super(); this.port = port; } public connect():void { this.socket = new net.Socket(); this.socket.on(EmitEvent.Error, this.handleError.bind(this)); this.socket.on(EmitEvent.Data, this.handleData.bind(this)); this.socket.on(EmitEvent.End, this.handleEnd.bind(this)); this.socket.on(EmitEvent.Close, this.handleClose.bind(this)); this.socket.connect(this.port, this.handleConnection.bind(this)); } public write(data:Buffer):void { this.socket.write(data); } public close(): void { this.socket.end(); this.socket.removeAllListeners(); } private handleConnection(): void { //console.log(`connect to port[${this.port}] success`); this.emit(EmitEvent.Connected); } private handleError(err): void { this.emit(EmitEvent.Error, err); } private handleData(raw: any): void { //console.log(raw); const pack = Protocol.parseReceive(raw); this.emit(EmitEvent.Package, pack); } private handleEnd(): void { //this.emit(EmitEvent.Error, 'socket end'); } private handleClose(): void { //this.emit(EmitEvent.Error, 'socket close'); } }
This week our statistician provides a statistical review of Arsenal's year. Arsenal won 35 of their 54 games in 2013, recording their highest win average for a calendar year since 2002. In fact the win rate of 64.8 per cent was the second highest for any calendar year in Arsenal history, beating the 1971 and 1998 double years (63.9 per cent and 55.9 per cent respectively), the year of the 'Invincibles', 2004 (61.7 per cent) and even better than the statistically strongest year from the 1930s era (54 per cent in 1930). "The win rate of 64.8 per cent was the second highest for any calendar year in Arsenal history" The Gunners ended 2013 with a record of P54 W35 D8 L11 F93 A50, and finished the year on top of the Premier League table. All this after not winning any of the first three matches of the year. Arsenal won eight more games than they did in 2012, and conceded 17 fewer goals (from two more matches). In fact the 50 goals conceded over the past 12 months was the fewest Arsenal have let in since 2006 (47). The Gunners competed on four fronts in 2013, winning at least once in each of the four competitions. The most successful, in terms of win percentages, was the Champions League (70 per cent win rate), followed by the Premier League (65.8 per cent) and then the two domestic cups (50 per cent each). 2013 record P W D L F A League 38 25 7 6 70 34 FA Cup 4 2 1 1 6 5 Champions League 10 7 0 3 16 8 League Cup 2 1 0 1 1 3 Total 54 35 8 11 93 50 Yearly records since 2000 Year P W D L F A GPG Win % 2000 57 30 15 12 105 66 1.84 52.63 2001 60 34 11 15 105 55 1.75 56.67 2002 60 40 11 9 117 52 1.95 66.67 2003 57 34 18 5 115 48 2.02 59.65 2004 60 37 16 7 128 59 2.13 61.67 2005 56 34 11 11 96 40 1.71 60.71 2006 61 32 15 14 102 47 1.67 52.46 2007 59 36 15 8 113 52 1.92 61.02 2008 58 30 16 12 105 60 1.81 51.72 2009 60 37 12 11 126 55 2.10 61.67 2010 53 30 8 15 111 62 2.09 56.60 2011 60 31 15 14 102 70 1.70 51.67 2012 52 27 11 14 109 67 2.10 51.92 2013 54 35 8 11 93 50 1.72 64.81 HOME AND AWAY One of the most impressive stats from Arsenal's year is that they won more games away from home than at Emirates Stadium. The victory at St James' Park on Sunday was the 19th time the Gunners had won on their travels from 27 trips, compared to 16 home wins from the same number of games. Arsenal lost just five times away all year, and scored in all but two matches on the road. Most impressive were victories away to Bayern Munich and Borussia Dortmund - indeed the recent defeat at Napoli was the only away game in Europe Arsenal did not win in the whole of 2013. Between March and September Arsenal won a club record 12 straight away fixtures (including the penalty shoot-out victory at The Hawthorns in the Capital One Cup). They made it 15 consecutive away games without defeat with the 1-0 win at Dortmund in November. It was fitting Arsenal should see out the year with a 1-0 win at St James' Park, that was the second time in the past 12 months Arsenal had won there by that scoreline (the win in May sealing a top-four finish) and the ninth time overall in 2013. 1-0 was the joint most popular scoreline of 2013 (with 2-0) and six of the 1-0 wins were in away matches. The Gunners kept 22 clean sheets from the 54 matches this year (six more than in 2012), half of them away from home. 2013 record P W D L F A Win % GPG Home 27 16 5 6 47 24 59 1.7 Away 27 19 3 5 46 26 70 1.7 Total 54 35 8 11 93 50 65 1.7 Most common scorelines in 2013 MONTH-BY-MONTH STATS The hallmark of Arsenal's excellent run was remarkable consistency throughout the months. The Gunners won at least three matches in each of the 10 months they played, compared to 2012 when they won three or more in just five months. It's the first time Arsenal have won at least three games in every month in a calendar year since Arsène Wenger took over in 1996. "The Gunners won at least three matches in each of the 10 months they played" September was the best month of the year. It began with a 1-0 win over Tottenham at Emirates Stadium, which was followed 24 hours later by the signing of Mesut Ozil. Then after the international break, the Gunners won the remaining five fixtures in the month, four of which were away from home. Arsène Wenger was duly named Premier League Manager of the Month, while Aaron Ramsey took the player award. That was one of three months in the year Arsenal were unbeaten, following the undefeated run-in during April and May last season. After losing 2-1 at White Hart Lane on March 3, Arsenal lost just one of their next 24 games in all competitions. The longest unbeaten run of the year was 12 (between August 21 and October 19) and the longest winning streak was 10 (between August 21 and October 1). Month-by-month record P W D L F A Win % January 8 3 3 2 15 12 38 February 5 3 0 2 5 5 60 March 4 3 0 1 9 3 75 April 5 3 2 0 7 3 60 May 3 3 0 0 6 1 100 August 4 3 0 1 9 4 75 September 6 6 0 0 12 5 100 October 6 3 1 2 10 6 50 November 6 5 0 1 10 1 83 December 7 3 2 2 10 10 43 PLAYERS USED Sunday's game against Newcastle was only the third that Aaron Ramsey missed in 2013, in all competitions. Despite that, the Welshman still made more appearances than any other Arsenal player throughout the year (51), and gained the joint-most victories with Per Mertesacker (33). He was also the second top scorer with 14. In terms of win percentages, Nacho Monreal and Lukasz Fabianski deserve honourable mentions. Monreal joined the Club at the end of January, and since then has won 25 of the 31 games he has played a part in. That translates at a highly impressive 80.7 per cent win rate. Goalkeeper Fabianski only played seven games in the year, but he won all but one of them, including the 2-0 win away to Bayern Munich. Overall Arsène Wenger selected 36 different players during the year (three fewer than in 2012 and 10 fewer than 2011), though seven of them featured just once. Analysing the numbers, and comparing them to 2012, it seems the squad suffered fewer injuries last year than the previous year. Arsène Wenger was therefore able to name his 'strongest XI' more often in 2013 than in 2012. Two years ago just four players featured in at least 39 games, whereas in 2013, 11 players reached that tally. Those 11, incidentally, can be arranged into a theoretical starting XI: Szczesny, Sagna, Gibbs, Mertesacker, Koscielny, Ramsey, Arteta, Wilshere, Cazorla, Walcott, Giroud. Of the 36 players to represent the Club in 2013, 19 (53 per cent) got on the scoresheet, scoring 92 between them (there was also an own goal) at an average of 4.8 goals each. Olivier Giroud top-scored with 19 (20 per cent of the total), followed by Ramsey (14) and Walcott (12). Seven players made their Arsenal first-team debut throughout the year. Players used Player Pld Minutes Won Goals Aaron Ramsey 51 3820 33 14 Olivier Giroud 49 3947 31 19 Per Mertesacker 48 4318 33 4 Wojciech Szczesny 47 4230 29 0 Laurent Koscielny 44 3504 29 2 Santi Cazorla 44 3464 28 6 Jack Wilshere 44 2860 27 5 Bacary Sagna 42 3680 29 1 Kieran Gibbs 41 3176 27 2 Mikel Arteta 39 2987 26 3 Theo Walcott 39 2825 25 12 Nacho Monreal 31 1603 25 1 Tomas Rosicky 30 2076 19 2 Thomas Vermaelen 24 1529 13 0 Lukas Podolski 23 1061 14 10 Mesut Ozil 22 1867 14 5 Carl Jenkinson 21 1533 15 0 Mathieu Flamini 18 1168 13 1 Alex Oxlade-Chamberlain 16 583 10 0 Nicklas Bendtner 11 319 6 1 Abou Diaby 9 638 6 0 Gervinho 8 351 5 2 Lukasz Fabianski 7 660 6 0 Serge Gnabry 7 358 4 1 Ryo Miyaichi 5 218 4 0 Francis Coquelin 4 240 2 0 Andre Santos 2 152 2 0 Chuba Akpom 2 39 2 0 Yaya Sanogo 2 39 2 0 Isaac Hayden 1 84 1 0 Thomas Eisfeld 1 82 1 1 Kris Olsson 1 36 1 0 Hector Bellerin 1 25 1 0 Andrey Arshavin 1 15 0 0 Ju Young Park 1 8 0 0 Ignasi Miquel 1 3 1 0 MOST POINTS IN THE YEAR Arsenal played 38 league games in 2013, in effect the same as a regular Premier League campaign, and the Gunners top the table compiled purely on results from January 1 to December 31. The Gunners amassed 82 points in the year, five more than they did in the 1997/98 title-winning campaign, and 15 more than they managed in 2012. In the 2013 table, Arsenal finished five points ahead of Man City and Chelsea, though City played a game fewer. The Gunners won the most league games (25), lost the joint fewest (six) and only Everton conceded fewer goals. The last time Arsenal finished the calendar year with the most Premier League points was 2004 (89 points), and the other two occasions also came after title-winning seasons - 1998 and 2002. The most Premier League points Arsenal have ever amassed in a calendar year is 91, from 2002. But of course Arsenal also ended the year on top of the table that really matters. It's the first time since 2007/08 that the Gunners have found themselves top of the pile at the turn of the year. 2013 Premier League table P W D L F A Pts Arsenal 38 25 7 6 70 34 82 Manchester City 37 24 5 8 82 36 77 Chelsea 38 23 8 7 71 40 77 Manchester United 37 22 8 7 68 37 70 Tottenham Hotspur 37 20 10 7 52 44 70 Liverpool 37 20 9 8 84 40 69 Everton 37 18 13 6 53 33 67 Newcastle United 37 16 6 15 48 55 54 Southampton 38 12 15 11 49 43 51 Aston Villa 37 11 10 16 50 55 43 Swansea City 37 9 12 16 42 52 39 West Ham United 38 9 11 18 41 58 38 Norwich City 37 9 11 17 34 58 38 Fulham 37 11 5 21 39 65 38 West Bromwich Albion 37 7 13 17 47 59 34 Stoke City 37 8 10 19 31 57 34 Sunderland 37 7 10 20 35 60 31 Most Premier League points per year With thanks to Opta. Stats correct as at December 30, 2013.
module Main where import Test.Hspec import Types import Parser import Lib import qualified Data.Map as Map import Data.Ratio import TestSets import Util successful (Right x) = x successful (Left err) = error $ show err parse = successful . parseEquation spec :: Spec spec = do describe "inequality parser" $ do --it "should parse very trivial inequalities" $ parse "x > 0" `shouldBe` RawInequality [VarToken 2 'x'] [ConstantToken 0] ">" it "should parse trivial inequalities" $ parse "2x > 0" `shouldBe` RawInequality [VarToken 2 'x'] [ConstantToken 0] ">" describe "extracting variables" $ do it "should flip everything if the sign is positive" $ extractOne 'x' (ReducibleInequality (Map.fromList [('x', 1), ('y', 4)]) 0 LessThan) `shouldBe` ReducibleInequality {leftSide = Map.fromList [('y',-4)], constant = 0, inequalityType = GreaterThan} it "shouldn't flip anything if the sign is negative" $ extractOne 'x' (ReducibleInequality (Map.fromList [('x', -1), ('y', 4)]) 0 LessThan) `shouldBe` ReducibleInequality {leftSide = Map.fromList [('y',4)], constant = 0, inequalityType = LessThan} it "should normalize all coefficients properly if the sign is positive" $ extractOne 'x' (ReducibleInequality (Map.fromList [('x', 2), ('y', 4)]) 0 LessThan) `shouldBe` ReducibleInequality {leftSide = Map.fromList [('y',-2)], constant = 0, inequalityType = GreaterThan} it "should normalize all coefficients properly if the sign is negative" $ extractOne 'x' (ReducibleInequality (Map.fromList [('x', -2), ('y', 4)]) 0 LessThan) `shouldBe` ReducibleInequality {leftSide = Map.fromList [('y',2)], constant = 0, inequalityType = LessThan} it "should extract with the proper direction of inequality" $ extractOne 'x' (prs "1x < 10") `shouldBe` ReducibleInequality Map.empty (10) GreaterThan describe "extracting from a set of inequalities" $ do it "should reduce the variable properly in a simple (2-vars) set" $ (reduce startSet) `shouldBe` endSet it "should reduce the variable properly in a more complex (3-vars) set" $ (reduce startSet2) `shouldBe` endSet2 it "should reduce within a slightly skewed set" $ reduce startSet3 `shouldBe` endSet3 describe "double reduction" $ do it "should reduce properly one step after another" $ (reduce endSet2) `shouldBe` endEndSet2 describe "final checks" $ do it "should properly verify trivial cases" $ do checkFinal [prs "1 > 0"] `shouldBe` True checkFinal [prs "0 > 1"] `shouldBe` False it "should properly verify trivial cases in sets" $ do checkFinal [prs "1 > 0", prs "2 > 0"] `shouldBe` True checkFinal [prs "0 < 1", prs "1 > 0"] `shouldBe` True checkFinal [prs "1 < 0", prs "1 > 0"] `shouldBe` False checkFinal [prs "1 < 0", prs "0 > 1"] `shouldBe` False it "should verify open 1-var cases to true" $ do checkFinal [prs "1x > 0"] `shouldBe` True checkFinal [prs "1x < 0"] `shouldBe` True checkFinal [prs "1x > 0", prs "2x > 0"] `shouldBe` True it "should verify no-constant 1-var cases" $ do checkFinal [prs "1x > 0", prs "1x < 10"] `shouldBe` True checkFinal [prs "1x > 0", prs "1x < -5"] `shouldBe` False it "should verify complex cases with constants" $ do checkFinal [prs "2x > 6", prs "1x < 5"] `shouldBe` True checkFinal [prs "1x > 1", prs "-2x > -4", prs "2x > 3"] `shouldBe` True checkFinal [prs "3x < -5", prs "2x > 100"] `shouldBe` False it "should verify hybrid cases" $ do checkFinal [prs "1x > 10", prs "1 > 0"] `shouldBe` True checkFinal [prs "1x > 10", prs "2x < 8", prs "3 > 0"] `shouldBe` False checkFinal [prs "1x > 2", prs "2x < 8", prs "3 < 0"] `shouldBe` False describe "integration" $ do it "should properly solve a simple case" $ check startSet3 `shouldBe` True it "should properly solve a simple negative case" $ check startSet4 `shouldBe` False it "should solve a 3-variable case" $ check startSet2 `shouldBe` True main = hspec spec
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package br.com.cinema.model; import java.io.Serializable; import java.util.Date; import java.util.List; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import org.hibernate.annotations.ForeignKey; import org.hibernate.validator.constraints.NotBlank; /** * * @author cleiton */ @Entity @Table(name = "INSCRICAO") public class Inscricao implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue @Column(name = "INSCRICAO_ID", nullable = false) private Integer inscricao_id; @NotBlank @Size(min = 4, message = "completo") @Column(name = "INSCRICAO_NOME", nullable = true, length = 40) private String inscricao_nome; @Column(name = "INSCRICAO_NOME_MAE", nullable = true, length = 40) private String inscricao_nome_mae; @Column(name = "INSCRICAO_NOME_PAI", nullable = true, length = 40) private String inscricao_nome_pai; @Column(name = "INSCRICAO_CPF", nullable = false, length = 14) // 224.491.491-71 private String inscricao_cpf; @NotBlank @Column(name = "INSCRICAO_RG", nullable = true, length = 20) private String inscricao_rg; @NotBlank @Column(name = "INSCRICAO_OE", nullable = true, length = 30) private String inscricao_oe; @NotNull @Column(name = "INSCRICAO_DATANASC", nullable = true) @Temporal(javax.persistence.TemporalType.DATE) private Date inscricao_datanasc; @Column(name = "INSCRICAO_SEXO", nullable = true, columnDefinition = "TINYINT(1)") private boolean inscricao_sexo; @Column(name = "INSCRICAO_NATURALIDADE", nullable = true, length = 40) private String inscricao_naturalidade; @Column(name = "INSCRICAO_ENDERECO", nullable = true, length = 40) private String inscricao_endereco; @Column(name = "INSCRICAO_NUMERO", nullable = true, length = 10) private String inscricao_numero; @Column(name = "INSCRICAO_BAIRRO", nullable = true, length = 40) private String inscricao_bairro; @Column(name = "INSCRICAO_CEP", nullable = true, length = 10) // 27600-000 private String inscricao_cep; @Column(name = "INSCRICAO_TEL", nullable = true, length = 14) // (34)-8888-8888 private String inscricao_tel; @Column(name = "INSCRICAO_CEL", nullable = true, length = 15) // (34)-88888-8888 private String inscricao_cel; @NotBlank @Column(name = "INSCRICAO_EMAIL", nullable = true, length = 40) private String inscricao_email; @Column(name = "INSCRICAO_SENHA", nullable = true, length = 40) private String inscricao_senha; @Column(name = "INSCRICAO_PORTADOR_ESPECIAL", nullable = true, columnDefinition = "TINYINT(1)") private boolean inscricao_portador_especial; @Column(name = "INSCRICAO_PORTADOR_DESCRICAO", nullable = true, length = 100) private String inscricao_portador_descricao; @Column(name = "INSCRICAO_ESCOLA", nullable = true, length = 40) private String inscricao_escola; @Column(name = "INSCRICAO_STATUS", nullable = true, columnDefinition = "TINYINT(1)") private boolean inscricao_status; @Basic(optional = true) // funcionando // @Column(name = "INSCRICAO_DATACAD", insertable = false, updatable = // false) // @Temporal(TemporalType.TIMESTAMP) // private Date inscricao_datacad; @Column(name = "INSCRICAO_DATACAD", columnDefinition = "TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP") @Temporal(TemporalType.TIMESTAMP) private Date inscricao_datacad; // ////////////////////////////////////////////////////////// // RELACIONAMENTOS @ManyToOne(optional = false) @ForeignKey(name = "FK_ESTADO_INSCRICAO") @JoinColumn(name = "EST_ID", referencedColumnName = "est_id", nullable = true) private Estado estado; @ManyToOne(optional = false) @ForeignKey(name = "FK_CIDADE_INSCRICAO") @JoinColumn(name = "CID_ID", referencedColumnName = "cid_id", nullable = true) @NotNull private Cidade cidade; @OneToMany(mappedBy = "inscricao") @ForeignKey(name = "FK_INSCRICAO_INSCRICAO_FILME") private List<Inscricao_Filme> inscricao_filmes; public Inscricao() { this.estado = new Estado(); this.cidade = new Cidade(); } public Integer getInscricao_id() { return inscricao_id; } public void setInscricao_id(Integer inscricao_id) { this.inscricao_id = inscricao_id; } public String getInscricao_nome() { return inscricao_nome; } public void setInscricao_nome(String inscricao_nome) { this.inscricao_nome = inscricao_nome; } public String getInscricao_nome_mae() { return inscricao_nome_mae; } public void setInscricao_nome_mae(String inscricao_nome_mae) { this.inscricao_nome_mae = inscricao_nome_mae; } public String getInscricao_nome_pai() { return inscricao_nome_pai; } public void setInscricao_nome_pai(String inscricao_nome_pai) { this.inscricao_nome_pai = inscricao_nome_pai; } public String getInscricao_cpf() { return inscricao_cpf; } public void setInscricao_cpf(String inscricao_cpf) { this.inscricao_cpf = inscricao_cpf; } public String getInscricao_rg() { return inscricao_rg; } public void setInscricao_rg(String inscricao_rg) { this.inscricao_rg = inscricao_rg; } public String getInscricao_oe() { return inscricao_oe; } public void setInscricao_oe(String inscricao_oe) { this.inscricao_oe = inscricao_oe; } public Date getInscricao_datanasc() { return inscricao_datanasc; } public void setInscricao_datanasc(Date inscricao_datanasc) { this.inscricao_datanasc = inscricao_datanasc; } public boolean isInscricao_sexo() { return inscricao_sexo; } public void setInscricao_sexo(boolean inscricao_sexo) { this.inscricao_sexo = inscricao_sexo; } public String getInscricao_naturalidade() { return inscricao_naturalidade; } public void setInscricao_naturalidade(String inscricao_naturalidade) { this.inscricao_naturalidade = inscricao_naturalidade; } public String getInscricao_endereco() { return inscricao_endereco; } public void setInscricao_endereco(String inscricao_endereco) { this.inscricao_endereco = inscricao_endereco; } public String getInscricao_numero() { return inscricao_numero; } public void setInscricao_numero(String inscricao_numero) { this.inscricao_numero = inscricao_numero; } public String getInscricao_bairro() { return inscricao_bairro; } public void setInscricao_bairro(String inscricao_bairro) { this.inscricao_bairro = inscricao_bairro; } public String getInscricao_cep() { return inscricao_cep; } public void setInscricao_cep(String inscricao_cep) { this.inscricao_cep = inscricao_cep; } public String getInscricao_tel() { return inscricao_tel; } public void setInscricao_tel(String inscricao_tel) { this.inscricao_tel = inscricao_tel; } public String getInscricao_cel() { return inscricao_cel; } public void setInscricao_cel(String inscricao_cel) { this.inscricao_cel = inscricao_cel; } public String getInscricao_email() { return inscricao_email; } public void setInscricao_email(String inscricao_email) { this.inscricao_email = inscricao_email; } public String getInscricao_senha() { return inscricao_senha; } public void setInscricao_senha(String inscricao_senha) { this.inscricao_senha = inscricao_senha; } public boolean isInscricao_portador_especial() { return inscricao_portador_especial; } public void setInscricao_portador_especial( boolean inscricao_portador_especial) { this.inscricao_portador_especial = inscricao_portador_especial; } public String getInscricao_portador_descricao() { return inscricao_portador_descricao; } public void setInscricao_portador_descricao( String inscricao_portador_descricao) { this.inscricao_portador_descricao = inscricao_portador_descricao; } public String getInscricao_escola() { return inscricao_escola; } public void setInscricao_escola(String inscricao_escola) { this.inscricao_escola = inscricao_escola; } public boolean isInscricao_status() { return inscricao_status; } public void setInscricao_status(boolean inscricao_status) { this.inscricao_status = inscricao_status; } public Date getInscricao_datacad() { return inscricao_datacad; } public void setInscricao_datacad(Date inscricao_datacad) { this.inscricao_datacad = inscricao_datacad; } public Estado getEstado() { return estado; } public void setEstado(Estado estado) { this.estado = estado; } public Cidade getCidade() { return cidade; } public void setCidade(Cidade cidade) { this.cidade = cidade; } public List<Inscricao_Filme> getInscricao_filmes() { return inscricao_filmes; } public void setInscricao_filmes(List<Inscricao_Filme> inscricao_filmes) { this.inscricao_filmes = inscricao_filmes; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((inscricao_id == null) ? 0 : inscricao_id.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Inscricao other = (Inscricao) obj; if (inscricao_id == null) { if (other.inscricao_id != null) return false; } else if (!inscricao_id.equals(other.inscricao_id)) return false; return true; } @Transient // habilitar e desabilitar o campo portadorDescrição public boolean isHabilitarCampoPortadorDescricao() { return inscricao_portador_especial ? false : true; } }
import { github } from "../services"; import { Token } from "../entity/Token"; import { injectedServices } from "../server"; export default { github: (req, res, next) => { res.clearCookie('redirect'); res.setCookie('redirect', req.header('Referer').split("?")[0]), res.redirect({ hostname: 'github.com', pathname: '/login/oauth/authorize', secure: true, query: { redirect_uri: 'http://localhost:8080/auth/github/callback', client_id: github.client_id, } }, next); }, githubCallback: async (req, res, next) => { const { userService } = req.injected as injectedServices; try{ const token = await github.getAccessToken(req.query.code); const authorization = `${token.token_type} ${token.access_token}`; const user = await github.getUser(authorization); const tokenModel = new Token(); tokenModel.access_token = authorization; tokenModel.browser = req.userAgent(); userService.createUserFromGithub(user, tokenModel); res.redirect(`${req.cookies['redirect']}?token=${authorization}`, next); } catch (e) { res.redirect(`${req.cookies['redirect']}?error=error`, next); } } }
i= 1 while i<=3: print("Guess:", i) i=i+1 print("sorry you failed")
SIDN sounds the alarm on DNSSEC security status of Dutch domain names Dutch domain names don't have adequate DNSSEC security. That's the conclusion of a report presented today by SIDN, the company that runs the .nl domain. The DNSSEC Inventory 2017 (only in Dutch at this moment) describes the DNSSEC security status of the Netherlands' most important domain names. At present, 46 per cent of all .nl domains have digital signatures. However, the banking sector (6 per cent) and ISPs (22 per cent) are lagging behind other sectors when it comes to protecting domain names with DNSSEC. By contrast, government bodies have made great progress in the last three years. DNSSEC involves the cryptographic protection of domain name information. It makes the internet's 'signpost system' more secure and more reliable. If a domain name is secured with DNSSEC, people who want to visit the associated website are protected against being misdirected to a fraudster's IP address. Without DNSSEC, there's a risk that, despite entering the right domain name, people will end up on a fake site set up to trick them. DNSSEC also forms the basis for new applications, such as systems for making e-mail safer and easily sharing cryptographic keys for securing internet communications. Elections 2017 With elections to the lower house of the Dutch parliament coming up on 15 March 2017, SIDN decided to include the domain names of political parties, information sites and research bureaus in its inventory. More than half (54 per cent) of the seventy-four domains covered by the inventory had DNSSEC security flaws. Big improvement by government A previous inventory in 2014 found that financial service providers, listed companies, government organisations and internet service providers were lagging a long way behind other sectors. Since then, the number of signed domain names in all the underperforming sectors has risen, but most remain disappointing compared with the pace-setters. Government organisations form an exception, however: they are doing much better than three years ago. Back then, just 11 per cent of government websites were secured. Now the figure stands at 59 per cent, putting the government third in the sector league table. “Banken zouden de belangrijkste gebruikers moeten zijn van DNSSEC-beveiliging, maar zij scoren voor de tweede keer op rij het slechtst van alle onderzochte domeinnamen. Met het sluiten van de fysieke bankkantoren en het verminderen van het aantal pinautomaten wordt de online voordeur van de banken steeds belangrijker. Bovendien hebben zij van alle bedrijven het meeste last van phishing en spoofing, iets waar DNSSEC in combinatie met DKIM en DMARC bescherming tegen kan bieden”, aldus Roelof Meijer, algemeen directeur SIDN. DNSSEC status better Over the last two years, various new safety applications have been rolled out, which piggy-back on the DNSSEC infrastructure. As a result, DNSSEC has gone from being a technology-driven expense to being an enabler for key security applications designed to tackle phishing, spamming, spoofing and other e-mail abuses. In addition, the obstacles in the way of secure domain name transfers have recently been resolved. SIDN has developed a method that enables registrars all over the world to transfer domain names securely, by following a uniform procedure based on EPP (the Extensible Provisioning Protocol). Last week, the new method was formally adopted as a global standard by the Internet Engineering Task Force (IETF). "Against that backdrop, it's hard to think of any good reason for not implementing DNSSEC protection," continues Meijer. "We believe that it's now up to the big internet service providers to act. It's really important that they get behind DNSSEC, because the protocol is only effective if ISPs commit to validating domain names' digital signatures. Late last year, XS4ALL took the plunge and became the first national internet service provider to enable DNSSEC validation." For the DNSSEC Inventory 2017, SIDN analysed more than seven thousand domain names in four general sectors: financial services, the public sector, internet and telecom service providers, and listed companies. The analysis made use of the DNSSEC Portfolio Checker developed by SIDN labs.
// Verify comparison of fringe float values TEST_F(RowContainerTest, compareFloat) { testCompareFloats<float>(REAL(), true); testCompareFloats<float>(REAL(), false); }
/* strbuf_append_fmt() should only be used when an upper bound * is known for the output string. */ void strbuf_append_fmt(strbuf_t *s, int len, const char *fmt, ...) { va_list arg; int fmt_len; strbuf_ensure_empty_length(s, len); va_start(arg, fmt); fmt_len = vsnprintf(s->buf + s->length, len, fmt, arg); va_end(arg); if (fmt_len < 0) die("BUG: Unable to convert number"); s->length += fmt_len; }
/******************************************************************************/ /** * This function responds to the SET_INTERFACE command. * * @param InstancePtr is a pointer to the XUsb instance of the controller. * * @return None. * * @note None. * ******************************************************************************/ void SetInterface(XUsb * InstancePtr) { if ((Ch9_CmdBuf.Word1.wValue == 0) && (Ch9_CmdBuf.Word2.wIndex == 0)) { SetupControlWriteStatusStage(InstancePtr); } else { #ifdef MICROPHONE SetupControlWriteStatusStage(InstancePtr); #else XUsb_EpStall(InstancePtr, 0); #endif } }
// NOTE: this export is new to IE5, so it can move to browseui // along with the rest of this proxy desktop code BOOL SHOnCWMCommandLine(LPARAM lParam) { HNFBLOCK hnf = (HNFBLOCK)lParam; IETHREADPARAM *piei = ConvertHNFBLOCKtoNFI(hnf); if (piei) return SHOpenFolderWindow(piei); return FALSE; }
<gh_stars>0 import { Box, Button, Divider, Link, Stack, useToast } from '@chakra-ui/react' import { AssetNamespace, AssetReference, caip19 } from '@shapeshiftoss/caip' import { ChainTypes, NetworkTypes, SwapperType } from '@shapeshiftoss/types' import { useState } from 'react' import { useFormContext } from 'react-hook-form' import { useTranslate } from 'react-polyglot' import { RouterProps, useLocation } from 'react-router-dom' import { Card } from 'components/Card/Card' import { HelperTooltip } from 'components/HelperTooltip/HelperTooltip' import { Row } from 'components/Row/Row' import { SlideTransition } from 'components/SlideTransition' import { RawText, Text } from 'components/Text' import { TRADE_ERRORS, useSwapper } from 'components/Trade/hooks/useSwapper/useSwapper' import { TradeState } from 'components/Trade/Trade' import { useWallet } from 'context/WalletProvider/WalletProvider' import { useLocaleFormatter } from 'hooks/useLocaleFormatter/useLocaleFormatter' import { bnOrZero } from 'lib/bignumber/bignumber' import { firstNonZeroDecimal } from 'lib/math' import { selectLastTxStatusByAssetId } from 'state/slices/selectors' import { useAppSelector } from 'state/store' import { ValueOf } from 'types/object' import { WithBackButton } from '../WithBackButton' import { AssetToAsset } from './AssetToAsset' type TradeConfirmParams = { fiatRate: string } type ZrxError = Error & { message: string } export const TradeConfirm = ({ history }: RouterProps) => { const [txid, setTxid] = useState('') const { getValues, handleSubmit, formState: { isSubmitting } } = useFormContext<TradeState<ChainTypes, SwapperType>>() const toast = useToast() const translate = useTranslate() const { sellAsset, buyAsset, quote, fees, trade } = getValues() const { executeQuote, reset } = useSwapper() const location = useLocation<TradeConfirmParams>() const { fiatRate } = location.state const { number: { toFiat } } = useLocaleFormatter({ fiatType: 'USD' }) const { state: { wallet } } = useWallet() const { chain, tokenId } = sellAsset.currency const network = NetworkTypes.MAINNET const assetNamespace = AssetNamespace.ERC20 const extra = tokenId ? { assetNamespace, assetReference: tokenId } : { assetNamespace: AssetNamespace.Slip44, assetReference: AssetReference.Ethereum } const caip = caip19.toCAIP19({ chain, network, ...extra }) const status = useAppSelector(state => selectLastTxStatusByAssetId(state, caip)) // Parametrized errors cannot simply be matched with === since their param(s) might vary const PARAMETRIZED_ERRORS_TO_TRADE_ERRORS = { 'ZrxExecuteQuote - signAndBroadcastTransaction error': TRADE_ERRORS.TRANSACTION_REJECTED, 'ZrxExecuteQuote - Signed transaction is required': TRADE_ERRORS.SIGNING_REQUIRED, 'ZrxExecuteQuote - broadcastTransaction error': TRADE_ERRORS.BROADCAST_FAILED, 'ZrxExecuteQuote - invalid HDWallet config': TRADE_ERRORS.HDWALLET_INVALID_CONFIG, 'ZrxExecuteQuote - signTransaction error': TRADE_ERRORS.SIGNING_FAILED } as const const getParametrizedErrorMessageOrDefault = ( errorMessage: string ): ValueOf<typeof PARAMETRIZED_ERRORS_TO_TRADE_ERRORS> | TRADE_ERRORS.INSUFFICIENT_FUNDS => { // If no other error pattern is found, we assume the tx was rejected because of insufficient funds const defaultTradeError = TRADE_ERRORS.INSUFFICIENT_FUNDS return ( Object.entries(PARAMETRIZED_ERRORS_TO_TRADE_ERRORS).find(([error]) => errorMessage.includes(error) )?.[1] || defaultTradeError ) } const onSubmit = async () => { if (!wallet) return try { const result = await executeQuote({ wallet }) const transactionId = result?.txid if (transactionId) { setTxid(transactionId) } } catch (err) { console.error(`TradeConfirm:onSubmit - ${err}`) let errorMessage switch ((err as ZrxError).message) { case 'ZrxSwapper:ZrxExecuteQuote Cannot execute a failed quote': { errorMessage = TRADE_ERRORS.FAILED_QUOTE_EXECUTED break } case 'ZrxSwapper:ZrxExecuteQuote sellAssetAccountId is required': { errorMessage = TRADE_ERRORS.SELL_ASSET_REQUIRED break } case 'ZrxSwapper:ZrxExecuteQuote sellAmount is required': { errorMessage = TRADE_ERRORS.SELL_AMOUNT_REQUIRED break } case 'ZrxSwapper:ZrxExecuteQuote depositAddress is required': { errorMessage = TRADE_ERRORS.DEPOSIT_ADDRESS_REQUIRED break } case 'ZrxSwapper:ZrxExecuteQuote sellAssetNetwork and sellAssetSymbol are required': { errorMessage = TRADE_ERRORS.SELL_ASSET_NETWORK_AND_SYMBOL_REQUIRED break } default: { errorMessage = getParametrizedErrorMessageOrDefault((err as ZrxError).message) } } toast({ title: translate('trade.errors.title'), description: translate(errorMessage), status: 'error', duration: 9000, isClosable: true, position: 'top-right' }) } } const handleBack = () => { if (txid) { reset() } history.push('/trade/input') } return ( <SlideTransition> <Box as='form' onSubmit={handleSubmit(onSubmit)}> <Card variant='unstyled'> <Card.Header px={0} pt={0}> <WithBackButton handleBack={handleBack}> <Card.Heading textAlign='center'> <Text translation={txid ? 'trade.complete' : 'trade.confirmDetails'} /> </Card.Heading> </WithBackButton> <AssetToAsset buyAsset={buyAsset} sellAsset={sellAsset} mt={6} status={status} /> </Card.Header> <Divider /> <Card.Body pb={0} px={0}> <Stack spacing={4}> {txid && ( <Row> <Row.Label> <RawText>Tx ID</RawText> </Row.Label> <Box textAlign='right'> <Link isExternal color='blue.500' href={`${sellAsset.currency?.explorerTxLink}${txid}`} > <Text translation='trade.viewTransaction' /> </Link> </Box> </Row> )} <Row> <HelperTooltip label={translate('trade.tooltip.rate')}> <Row.Label> <Text translation='trade.rate' /> </Row.Label> </HelperTooltip> <Box textAlign='right'> <RawText>{`1 ${sellAsset.currency.symbol} = ${firstNonZeroDecimal( bnOrZero(quote?.rate) )} ${buyAsset?.currency?.symbol}`}</RawText> <RawText color='gray.500'>@{trade?.name}</RawText> </Box> </Row> <Row> <HelperTooltip label={translate('trade.tooltip.minerFee')}> <Row.Label> <Text translation='trade.minerFee' /> </Row.Label> </HelperTooltip> <Row.Value> {bnOrZero(fees?.fee).toNumber()} ≃{' '} {toFiat(bnOrZero(fees?.fee).times(fiatRate).toNumber())} </Row.Value> </Row> <Row> <HelperTooltip label={translate('trade.tooltip.shapeshiftFee')}> <Row.Label> <Text translation='trade.shapeshiftFee' /> </Row.Label> </HelperTooltip> <Row.Value>{toFiat(0)}</Row.Value> </Row> </Stack> </Card.Body> <Card.Footer px={0} py={0}> {!txid && ( <Button isLoading={isSubmitting} colorScheme='blue' size='lg' width='full' mt={6} type='submit' > <Text translation='trade.confirmAndTrade' /> </Button> )} </Card.Footer> </Card> </Box> </SlideTransition> ) }
<reponame>freedomshare/NFT-RPG-Game export const getShieldNameFromSeed = (seed: number, stars: number) => { if(seed === -1 && stars === -1) // TEMP anti lint return 'Templars will'; return 'Templars will'; };
/** * Add owl:imports from the T-Box ontology to SKOS and to the A-Box ontology. */ private void addImports () { OWLImportsDeclaration importsAxiom; AddImport imp; importsAxiom = odf.getOWLImportsDeclaration( ontA.getOntologyID().getOntologyIRI() ); imp = new AddImport( ontT, importsAxiom ); oom.applyChange( imp ); importsAxiom = odf.getOWLImportsDeclaration( IriUtil.skosIRI ); imp = new AddImport( ontT, importsAxiom ); oom.applyChange( imp ); }
import TelegramCommandBase from "./command-base"; import doorCommand from "./door-command"; import helloCommand from "./hello-command"; import usersCommand from "./users-command"; import startCommand from "./start-command"; const commands: TelegramCommandBase[] = [ doorCommand, helloCommand, usersCommand, startCommand ]; export default commands;
/** * Maps a fixture from the one returned from the FPL JSON API to one exposed by this library. * * @author James Amoore */ class FixtureMapper { private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); protected FPLFixture mapFixture(final JsonFixture jsonFixture, final Map<Integer, FPLTeam> teamMap) { FPLTeam homeTeam = teamMap.get(jsonFixture.getTeamH()); FPLTeam awayTeam = teamMap.get(jsonFixture.getTeamA()); LocalDateTime kickoff = null; if(jsonFixture.getKickoffTime() != null) { kickoff = LocalDateTime.parse(jsonFixture.getKickoffTime(), FORMATTER); } return new FPLFixture(jsonFixture.getId(), jsonFixture.getEvent(), kickoff, homeTeam, awayTeam, jsonFixture.getTeamHDifficulty(), jsonFixture.getTeamADifficulty()); } }
///Turns the content into a byte vector. Slices are copied. pub fn into_bytes(self) -> Vec<u8> { match self { Data::Bytes(bytes) => bytes.into_owned(), Data::String(string) => string.into_owned().into_bytes() } }
import * as openrazer from 'openrazer'; const wait = (millis: number) => new Promise(resolve => setTimeout(resolve, millis)); (async () => { const kbds = await openrazer.getKeyboards(); if (kbds.length === 0) return; const kbd = kbds[0]; console.log('serial:', await kbd.getSerialNumber()); console.log('firmware:', await kbd.getFirmwareVersion()); console.log('Setting to random breathing effect'); kbd.setMatrixEffectBreath(); await wait(5000); console.log('Setting to single color breathing effect'); kbd.setMatrixEffectBreath([255, 0, 0]); await wait(5000); console.log('Setting to dual color breathing effect'); kbd.setMatrixEffectBreath([255, 255, 255], [0, 0, 255]); await wait(5000); console.log('Setting starlight effect, speed 0'); kbd.setMatrixEffectStarlight(0); await wait(5000); console.log('Setting starlight effect, speed 100'); kbd.setMatrixEffectStarlight(100); await wait(5000); console.log('Setting starlight effect, speed 255'); kbd.setMatrixEffectStarlight(255); await wait(5000); console.log('Setting starlight effect, in red, speed 50'); kbd.setMatrixEffectStarlight(50, [255, 0, 0]); await wait(5000); console.log('Setting starlight effect, dual mode, speed 50'); kbd.setMatrixEffectStarlight(50, [0, 200, 255], [150, 0, 255]); await wait(5000); console.log('Setting reactive effect, yellow, short'); kbd.setMatrixEffectReactive(1, [255, 255, 0]); await wait(5000); console.log('Setting reactive effect, orange, long'); kbd.setMatrixEffectReactive(3, [255, 155, 0]); await wait(5000); })();
import { createLogger } from '@stoplight/prism-core'; import { IHttpOperation } from '@stoplight/types'; import * as fastify from 'fastify'; import { createServer } from '../'; import { IPrismHttpServer } from '../types'; const logger = createLogger('TEST', { enabled: false }); function instantiatePrism2(operations: IHttpOperation[]) { return createServer(operations, { components: { logger }, cors: true, config: { checkSecurity: true, validateRequest: true, validateResponse: true, mock: { dynamic: false }, errors: false, }, errors: false, }); } describe('body params validation', () => { let server: IPrismHttpServer; afterAll(() => { return server.fastify.close(); }); describe('http operation with body param', () => { beforeEach(() => { server = instantiatePrism2([ { id: '?http-operation-id?', method: 'post', path: '/json-body-no-request-content-type', responses: [ { code: '200', headers: [], contents: [ { mediaType: 'text/plain', schema: { type: 'string', $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, ], servers: [], request: { body: { contents: [ { mediaType: '', schema: { type: 'object', properties: { id: { type: 'integer', format: 'int64', minimum: -9223372036854776000, maximum: 9223372036854776000, }, }, $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, headers: [], query: [], cookie: [], path: [], }, tags: [], security: [], }, { id: '?http-operation-id?', method: 'post', path: '/json-body-optional', responses: [ { code: '200', headers: [], contents: [ { mediaType: 'text/plain', schema: { type: 'string', $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, ], servers: [], request: { body: { required: false, contents: [ { mediaType: 'application/json', schema: { type: 'object', properties: { id: { type: 'integer', format: 'int64', minimum: -9223372036854776000, maximum: 9223372036854776000, }, }, $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, headers: [], query: [], cookie: [], path: [], }, tags: [], security: [], }, { id: '?http-operation-id?', method: 'post', path: '/json-body-required', responses: [ { code: '200', headers: [], contents: [ { mediaType: 'text/plain', schema: { type: 'string', $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, ], servers: [], request: { body: { required: true, contents: [ { mediaType: 'application/json', schema: { type: 'object', properties: { id: { type: 'integer', format: 'int64', minimum: -9223372036854776000, maximum: 9223372036854776000, }, status: { type: 'string', enum: ['placed', 'approved', 'delivered'], }, }, $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, headers: [], query: [], cookie: [], path: [], }, tags: [], security: [], }, { id: '?http-operation-id?', method: 'post', path: '/json-body-property-required', responses: [ { code: '200', headers: [], contents: [ { mediaType: 'text/plain', schema: { type: 'string', $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, ], servers: [], request: { body: { contents: [ { mediaType: 'application/json', schema: { type: 'object', properties: { id: { type: 'integer', format: 'int64', minimum: -9223372036854776000, maximum: 9223372036854776000, }, }, required: ['id'], $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, headers: [], query: [], cookie: [], path: [], }, tags: [], security: [], }, ]); }); describe('operation with no request content type defined', () => { const operation: fastify.HTTPInjectOptions = { method: 'POST', url: '/json-body-no-request-content-type', }; describe('property type invalid', () => { test('returns 422 & error message', async () => { const response = await server.fastify.inject({ ...operation, payload: { id: 'string', }, }); expect(response.statusCode).toBe(422); expect(JSON.parse(response.payload)).toMatchObject({ validation: [ { code: 'type', location: ['body', 'id'], message: 'should be integer', severity: 'Error', }, ], }); }); }); }); describe('operation with required property', () => { const operation: fastify.HTTPInjectOptions = { method: 'POST', url: '/json-body-property-required', }; describe('when property not provided', () => { test('returns 422 & error message', async () => { const response = await server.fastify.inject({ ...operation, payload: {}, }); expect(response.statusCode).toBe(422); expect(JSON.parse(response.payload)).toMatchObject({ validation: [{ code: 'required', message: "should have required property 'id'", severity: 'Error' }], }); }); }); }); describe('operation with optional body', () => { describe('when no body provided', () => { test('returns 200', async () => { const response = await server.fastify.inject({ method: 'POST', url: '/json-body-optional', }); expect(response.statusCode).toBe(200); }); }); }); describe('operation with required body', () => { const operation: fastify.HTTPInjectOptions = { method: 'POST', url: '/json-body-required', }; describe('when no body provided', () => { test('returns 422 & error message', async () => { const response = await server.fastify.inject(operation); expect(response.statusCode).toBe(422); expect(JSON.parse(response.payload)).toMatchObject({ validation: [{ code: 'required', message: 'Body parameter is required', severity: 'Error' }], }); }); }); describe('when body provided', () => { describe('and property type invalid', () => { test('returns 422 & error message', async () => { const response = await server.fastify.inject({ ...operation, payload: { id: 'string', }, }); expect(response.statusCode).toBe(422); expect(JSON.parse(response.payload)).toMatchObject({ validation: [ { code: 'type', location: ['body', 'id'], message: 'should be integer', severity: 'Error', }, ], }); }); }); describe('and property not one of enum', () => { test('returns 422 & error message', async () => { const response = await server.fastify.inject({ ...operation, payload: { status: 'string', }, }); expect(response.statusCode).toBe(422); expect(JSON.parse(response.payload)).toMatchObject({ validation: [ { code: 'enum', location: ['body', 'status'], message: 'should be equal to one of the allowed values: placed, approved, delivered', severity: 'Error', }, ], }); }); }); }); }); }); describe('http operation with form data param', () => { beforeEach(() => { server = instantiatePrism2([ { id: '?http-operation-id?', method: 'post', path: '/path', responses: [ { code: '200', headers: [], contents: [ { mediaType: 'text/plain', schema: { type: 'string', $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, ], servers: [], request: { body: { contents: [ { mediaType: 'application/x-www-form-urlencoded', schema: { type: 'object', properties: { id: { type: 'integer', }, status: { type: 'string', enum: ['open', 'close'], }, }, required: ['id', 'status'], $schema: 'http://json-schema.org/draft-04/schema#', }, examples: [], encodings: [], }, ], }, headers: [], query: [], cookie: [], path: [], }, tags: [], security: [], }, ]); }); describe('required parameter not in body', () => { test('returns 422', async () => { const response = await server.fastify.inject({ method: 'POST', url: '/path', payload: {}, }); expect(response.statusCode).toBe(422); const parsed = JSON.parse(response.payload); expect(parsed).toMatchObject({ type: 'https://stoplight.io/prism/errors#UNPROCESSABLE_ENTITY', validation: [ { location: ['body'], severity: 'Error', code: 'required', message: "should have required property 'id'", }, { location: ['body'], severity: 'Error', code: 'required', message: "should have required property 'status'", }, ], }); }); }); describe('parameter does not match enum criteria', () => { test('returns 422 & proper validation message', async () => { const response = await server.fastify.inject({ method: 'POST', url: '/path', payload: { id: 'not integer', status: 'somerundomestuff', }, }); expect(response.statusCode).toBe(422); const parsed = JSON.parse(response.payload); expect(parsed).toMatchObject({ type: 'https://stoplight.io/prism/errors#UNPROCESSABLE_ENTITY', validation: [ { location: ['body', 'id'], severity: 'Error', code: 'type', message: 'should be integer', }, { location: ['body', 'status'], severity: 'Error', code: 'enum', message: 'should be equal to one of the allowed values: open, close', }, ], }); }); }); describe('valid parameter provided', () => { test('returns 200', async () => { const response = await server.fastify.inject({ method: 'POST', url: '/path', payload: { id: 123, status: 'open', }, }); expect(response.statusCode).toBe(200); }); }); }); });
SD to Enforce Anti-Abortion Law A federal judge ruled on Wednesday that starting July 1, South Dakota can enforce part of a stringent anti-abortion law which will require a doctor, before performing an abortion, to determine if a woman has been coerced into the abortion or if she is at risk of psychological disorders. The ruling was the result of an agreement between Planned Parenthood, who sued the state over the law, and the Attorney General. As part of the agreement, the language of the law has been changed so that doctors will no longer be required to ask about certain psychological risk factors. Executive Director of NARAL Pro-Choice South Dakota, Alisha Sedor, told the Argus Leader, "Like any patient, a woman considering abortion should receive full and unbiased information from her doctor about her medical options. However, this legislation imposes unnecessary government intrusion into private decisions and the doctor-patient relationship. We are disappointed to see that South Dakota legislators continue to insert themselves between a woman and her doctor." Planned Parenthood still has challenges pending to stop other parts of the anti-abortion law from going into effect. Under the portions of the law that a judge has yet to rule on, a woman will be required to wait 72 hours from the time of the initial appointment and the time of the abortion. She will also be required to consult with a pregnancy help center during the 72 hour waiting period. Media Resources: Argus Leader 6/27/12; AP 6/27/12; RH Reality Check 6/27/12 © Feminist Majority Foundation, publisher of Ms. magazine If you liked this story, consider making a tax-deductible donation to support Ms. magazine .
// Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.lucene; import static java.util.concurrent.TimeUnit.MILLISECONDS; import com.google.common.collect.Sets; import com.google.common.util.concurrent.AbstractFuture; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.gerrit.lucene.LuceneChangeIndex.GerritIndexWriterConfig; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.index.TrackingIndexWriter; import org.apache.lucene.search.ControlledRealTimeReopenThread; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ReferenceManager.RefreshListener; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** Piece of the change index that is implemented as a separate Lucene index. */ class SubIndex { private static final Logger log = LoggerFactory.getLogger(SubIndex.class); private final Directory dir; private final TrackingIndexWriter writer; private final SearcherManager searcherManager; private final ControlledRealTimeReopenThread<IndexSearcher> reopenThread; private final Set<NrtFuture> notDoneNrtFutures; SubIndex(File file, GerritIndexWriterConfig writerConfig) throws IOException { this(FSDirectory.open(file), file.getName(), writerConfig); } SubIndex(Directory dir, final String dirName, GerritIndexWriterConfig writerConfig) throws IOException { this.dir = dir; IndexWriter delegateWriter; long commitPeriod = writerConfig.getCommitWithinMs(); if (commitPeriod < 0) { delegateWriter = new IndexWriter(dir, writerConfig.getLuceneConfig()); } else if (commitPeriod == 0) { delegateWriter = new AutoCommitWriter(dir, writerConfig.getLuceneConfig(), true); } else { final AutoCommitWriter autoCommitWriter = new AutoCommitWriter(dir, writerConfig.getLuceneConfig(), false); delegateWriter = autoCommitWriter; new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder() .setNameFormat("Commit-%d " + dirName) .setDaemon(true) .build()) .scheduleAtFixedRate(new Runnable() { @Override public void run() { try { if (autoCommitWriter.hasUncommittedChanges()) { autoCommitWriter.manualFlush(); autoCommitWriter.commit(); } } catch (IOException e) { log.error("Error committing Lucene index " + dirName, e); } catch (OutOfMemoryError e) { log.error("Error committing Lucene index " + dirName, e); try { autoCommitWriter.close(); } catch (IOException e2) { log.error("SEVERE: Error closing Lucene index " + dirName + " after OOM; index may be corrupted.", e); } } } }, commitPeriod, commitPeriod, MILLISECONDS); } writer = new TrackingIndexWriter(delegateWriter); searcherManager = new SearcherManager( writer.getIndexWriter(), true, new SearcherFactory()); notDoneNrtFutures = Sets.newConcurrentHashSet(); reopenThread = new ControlledRealTimeReopenThread<>( writer, searcherManager, 0.500 /* maximum stale age (seconds) */, 0.010 /* minimum stale age (seconds) */); reopenThread.setName("NRT " + dirName); reopenThread.setPriority(Math.min( Thread.currentThread().getPriority() + 2, Thread.MAX_PRIORITY)); reopenThread.setDaemon(true); // This must be added after the reopen thread is created. The reopen thread // adds its own listener which copies its internally last-refreshed // generation to the searching generation. removeIfDone() depends on the // searching generation being up to date when calling // reopenThread.waitForGeneration(gen, 0), therefore the reopen thread's // internal listener needs to be called first. searcherManager.addListener(new RefreshListener() { @Override public void beforeRefresh() throws IOException { } @Override public void afterRefresh(boolean didRefresh) throws IOException { for (NrtFuture f : notDoneNrtFutures) { f.removeIfDone(); } } }); reopenThread.start(); } void close() { reopenThread.close(); // Closing the reopen thread sets its generation to Long.MAX_VALUE, but we // still need to refresh the searcher manager to let pending NrtFutures // know. // // Any futures created after this method (which may happen due to undefined // shutdown ordering behavior) will finish immediately, even though they may // not have flushed. try { searcherManager.maybeRefreshBlocking(); } catch (IOException e) { log.warn("error finishing pending Lucene writes", e); } try { writer.getIndexWriter().commit(); try { writer.getIndexWriter().close(); } catch (AlreadyClosedException e) { // Ignore. } } catch (IOException e) { log.warn("error closing Lucene writer", e); } try { dir.close(); } catch (IOException e) { log.warn("error closing Lucene directory", e); } } ListenableFuture<?> insert(Document doc) throws IOException { return new NrtFuture(writer.addDocument(doc)); } ListenableFuture<?> replace(Term term, Document doc) throws IOException { return new NrtFuture(writer.updateDocument(term, doc)); } ListenableFuture<?> delete(Term term) throws IOException { return new NrtFuture(writer.deleteDocuments(term)); } void deleteAll() throws IOException { writer.deleteAll(); } IndexSearcher acquire() throws IOException { return searcherManager.acquire(); } void release(IndexSearcher searcher) throws IOException { searcherManager.release(searcher); } private final class NrtFuture extends AbstractFuture<Void> { private final long gen; NrtFuture(long gen) { this.gen = gen; // Tell the reopen thread we are waiting on this generation so it uses the // min stale time when refreshing. isGenAvailableNowForCurrentSearcher(); } @Override public Void get() throws InterruptedException, ExecutionException { if (!isDone()) { reopenThread.waitForGeneration(gen); set(null); } return super.get(); } @Override public Void get(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException, ExecutionException { if (!isDone()) { if (reopenThread.waitForGeneration(gen, (int) MILLISECONDS.convert(timeout, unit))) { set(null); } else { throw new TimeoutException(); } } return super.get(timeout, unit); } @Override public boolean isDone() { if (super.isDone()) { return true; } else if (isGenAvailableNowForCurrentSearcher()) { set(null); return true; } return false; } @Override public void addListener(Runnable listener, Executor executor) { if (isGenAvailableNowForCurrentSearcher() && !isCancelled()) { set(null); } else if (!isDone()) { notDoneNrtFutures.add(this); } super.addListener(listener, executor); } @Override public boolean cancel(boolean mayInterruptIfRunning) { boolean result = super.cancel(mayInterruptIfRunning); if (result) { notDoneNrtFutures.remove(this); } return result; } void removeIfDone() { if (isGenAvailableNowForCurrentSearcher()) { notDoneNrtFutures.remove(this); if (!isCancelled()) { set(null); } } } private boolean isGenAvailableNowForCurrentSearcher() { try { return reopenThread.waitForGeneration(gen, 0); } catch (InterruptedException e) { log.warn("Interrupted waiting for searcher generation", e); return false; } } } }
<reponame>ysyun/angular-stub-proxy-environment import { Component, OnInit, OnDestroy } from '@angular/core'; import { #NAME#Service } from './#FILE_NAME#.service'; @Component({ moduleId: module.id, selector: '#FILE_NAME#', templateUrl: '#FILE_NAME#.component.html', styleUrls: ['#FILE_NAME#.component.css'] }) export class #NAME#Component implements OnInit, OnDestroy { constructor(private #FILE_NAME#Service: #NAME#Service) {} ngOnInit() { console.log(this.#FILE_NAME#Service.getData()); } ngOnDestroy() {} <<<<<<< HEAD } ======= } >>>>>>> 5974f3f78d9e775c69c4b83e93d9ad86a48faa25
/** * Provides a default implementation of <tt>FrameRateControl</tt>. * * @author Lyubomir Marinov */ public class FrameRateControlAdapter implements FrameRateControl { /** * Gets the UI <tt>Component</tt> associated with this <tt>Control</tt> object. * <tt>FrameRateControlAdapter</tt> always returns <tt>null</tt>. * * @return the UI <tt>Component</tt> associated with this <tt>Control</tt> object * @see Control#getControlComponent() */ public Component getControlComponent() { return null; } /** * Gets the current output frame rate. <tt>FrameRateControlAdapter</tt> always returns * <tt>-1</tt>. * * @return the current output frame rate if it is known; otherwise, <tt>-1</tt> * @see FrameRateControl#getFrameRate() */ public float getFrameRate() { return -1; } /** * Gets the maximum supported output frame rate. <tt>FrameRateControlAdapter</tt> always * returns <tt>-1</tt>. * * @return the maximum supported output frame rate if it is known; otherwise, <tt>-1</tt> * @see FrameRateControl#getMaxSupportedFrameRate() */ public float getMaxSupportedFrameRate() { return -1; } /** * Gets the default/preferred output frame rate. <tt>FrameRateControlAdapter</tt> always * returns * <tt>-1</tt>. * * @return the default/preferred output frame rate if it is known; otherwise, <tt>-1</tt> * @see FrameRateControl#getPreferredFrameRate() */ public float getPreferredFrameRate() { return -1; } /** * Sets the output frame rate. <tt>FrameRateControlAdapter</tt> always returns <tt>-1</tt>. * * @param frameRate * the output frame rate to change the current one to * @return the actual current output frame rate or <tt>-1</tt> if it is unknown or not * controllable * @see FrameRateControl#setFrameRate(float) */ public float setFrameRate(float frameRate) { return -1; } }
The collagen of the gingiva and of its blood vessels. The collagen of the gingiva and that of its blood vessels of several animal species and of man were studied with the scanning electron microscope following corrosion with pancreatin at 0.3%. The gingival collagen forms fundamental cells or fasciculi that are distributed throughout the entire territory. In the interior of the cells there are small balls either detached or in clusters. Because of the contact and the fusion of these balls with the collagenic fasciculi, and due to their resistance to the pancreatin corrosion procedure, we believe them to be condensations of collagen. The approximate size of these balls was 2 micrometer. The fasciculi that surround these cells have either the form of bundles or are united in bands. There is an abundance of blood vessels in the gingiva and they are surrounded by collagen. This collagen can assume any of varied positions. The relations that exist between the vascular collagen and that of the gingiva are different in every case.
#!/usr/bin/env python #encoding: utf8 import rospy from pimouse_search_wifi.msg import WiFiStatus if __name__ == '__main__': status_file = '/proc/net/wireless' rospy.init_node('scan_wifi') pub = rospy.Publisher('wifi_status', WiFiStatus, queue_size=5) rate = rospy.Rate(30) while not rospy.is_shutdown(): try: with open(status_file, 'r') as f: status = WiFiStatus() lines = f.readlines() target_line = lines[2].split() status.link = float(target_line[2]) status.level = float(target_line[3]) pub.publish(status) except IOError: rospy.logerr("cannot read from " + status_file) rate.sleep()
/** * Read a value from the buffered array. * * @param adress The adress of the calue to read * @param size The size of the value to read in bits * @return The value. */ public synchronized int read( int adress, int size ) { if( registers != null ) { if((adress >= offset ) && ( adress <= ( offset + count ))) { return registers[adress - offset]; } else return 0; } else return 0; }
// Last30DaysAverageScore returns the average score for all surveys submitted in the last 30 days. func (s *surveyResponses) Last30DaysAverageScore(ctx context.Context) (float64, error) { q := sqlf.Sprintf("SELECT AVG(score) FROM survey_responses WHERE created_at>%s", thirtyDaysAgo()) var avg sql.NullFloat64 err := dbconn.Global.QueryRowContext(ctx, q.Query(sqlf.PostgresBindVar), q.Args()...).Scan(&avg) return avg.Float64, err }
// Test that we can startup and cleaning shutdown the ACPICA library. TEST(X86DeviceTest, BasicAcpica) { std::unique_ptr<X86> dev; ASSERT_OK(X86::Create(nullptr, driver_unit_test::GetParent(), &dev)); ASSERT_OK(dev->EarlyAcpiInit()); }
Effect of heterologous antimacrophage serum on growth of Rous virus-induced sarcoma in the allogeneic and syngeneic system. The effect of heterologous antimacrophage serum (AMS) and normal rabbit serum (NRS) on the immune reaction against Rous virus-induced sarcoma (RSL) was studied in rats. The growth of RSL sarcoma transplanted against the H-1 barrier in AMS-treated rats was more progressive than in the untreated or NRS-treated control group. On the other hand, the growth of RSL sarcoma was significantly suppressed in syngeneic AMS- or NRS-treated recipients compared to the untreated control rats.
/** * Splits a list into non-view sublists of length size */ public static <T> List<List<T>> split(List<T> list, final int size) { if(list == null || list.isEmpty()) return Immutable.list(); final List<List<T>> parts = new ArrayList<>(); final int N = list.size(); for (int i = 0; i < N; i += size) { parts.add(Immutable.listOf(list.subList(i, Math.min(N, i + size)))); } return Immutable.listOf(parts); }
#ifndef __RFC_7234_HPP_INCLUDED__ #define __RFC_7234_HPP_INCLUDED__ /* https://tools.ietf.org/html/rfc7234 */ namespace xtd { namespace Grammars { namespace RFC7234 { #pragma region("forward declerations") #pragma endregion #pragma region("strings") #pragma endregion #pragma region("imports") #pragma endregion #pragma region("rules") #pragma endregion } } } #endif //__RFC_7234_HPP_INCLUDED__
/** * Class responsible for keeping convesation State of the objects are maintain in EntityManager */ public abstract class ConversationContext { public enum STATE {CURRENT, SUSPENDED} @Setter @Getter private STATE state = STATE.CURRENT; @Setter @Getter private Object contextOwner; @Getter private Stack<Snapshot> snapshotsStack = new Stack<>(); @Getter private ExchangedEntities exchangedEntities = new ExchangedEntities(); @Getter @Setter private Object[] outputParams = new Object[]{}; @Autowired @Setter private ModelStore dynamicModelStore; @Autowired ApplicationContext applicationContext; private boolean invalidToApprove = false; private static final ThreadLocal<ModelStore> overloadedDynamicModelStore = new ThreadLocal<>(); public abstract EntityManager getEntityManager(); public void suspend() { checkIfCurrent(); setState(STATE.SUSPENDED); } public void terminate() { checkIfCurrent(); if (getEntityManager() != null) { getEntityManager().clear(); ((Session) getEntityManager().getDelegate()).close(); } } @Transactional public void approve() { checkIfCurrent(); if (invalidToApprove) { throw new FhStaleConversationException(); } try { if (getEntityManager() != null) { getEntityManager().flush(); } } catch (Exception e) { invalidate(); throw e; } } @Transactional public void approveAndTerminate() { approve(); if (getEntityManager() != null) { ((Session) getEntityManager().getDelegate()).close(); } } public void clear() { checkIfCurrent(); if (getEntityManager() != null) { getEntityManager().clear(); } snapshotsStack.forEach(Snapshot::clear); dynamicModelStore = applicationContext.getBean(ModelStore.class); //dynamicModelStore.clearPersistentContext(); } public boolean snapshotExists() { return !snapshotsStack.isEmpty(); } public Snapshot getCurrentSnapshot() { return snapshotsStack.peek(); } public boolean isOwner(Object contextOwner) { return ReflectionUtils.objectsEqual(this.contextOwner, contextOwner); } public void overLoadDynamicModelStore(ModelStore dynamicModelStore) { if (dynamicModelStore == null) { overloadedDynamicModelStore.remove(); } else { overloadedDynamicModelStore.set(dynamicModelStore); } } public ModelStore getModelStore() { if (overloadedDynamicModelStore.get() != null) { return overloadedDynamicModelStore.get(); } return dynamicModelStore; } public boolean isValid() { return !invalidToApprove; } private void checkIfCurrent() { if (state == STATE.SUSPENDED) { throw new FhConversationException("Attempt to execute operation on SUSPENDED context"); } } public void invalidate() { invalidToApprove = true; } }
Deformation and Basin Formation Along Strike-Slip Faults Significant advances during the decade 1975 to 1985 in understanding the geology of basins along strike-slip faults include the following: (I) paleomagnetic and other evidence for very large magnitude strike slip in some orogenic belts; (2) abundant paleomagnetic evidence for the pervasive rotation of blocks about vertical axes within broad intracontinental transform boundaries; (3) greater appreciation for the wide range of structural styles along strike-slip faults; (4) new models for the evolution of strike-slip basins; and (5) a body of new geophysical and geological data for specific basins, In the light of this work. and as an introduction to the remainder of the volume, the purpose of this paper is to summarize the major characteristics of and controls on structural patterns along strikeslip faults, the processes and tectonic settings of basin formation, and distinctive stratigraphic characteristics of strike-slip basins, Strike-slip faults arc characterized hy a linear or curvilinear principal displacement zone in map view. and in profile, hy a subvertical fault zone that ranges from braided to upward-diverging within the sedilllentary cover. Many strike-slip faults. even those involving crystalline basement rocks. may be detached within the middle to upper crust. Two prominent characteri,tics arc the occurrence of en echelon fault> and folds, within or adjacent to the principal di;,placement zone. and the co-existence of faults with normal and reverse ,cparation, The main controls on the development of structural pattern, along strike-slip faults arc (I) the degrcc to which adjacent blocks either converge or diverge during strike slip: 121 the magnitude of displacement: (3) the material properties of the sediments and rocks being deformed; and (4) the configuration of pre-existing structures. Each of these tends to vary spatially, and, except for the last, to change through time. It is therefore not surprising that structural patterns along strike-slip faults differ in detail from simple predictions ba;,ed on the instantaneous deformation of hOlllogeneous materials, In the analysis of ;,tructural style, it is important to attempt to separate structures of different ages, and especially to distinguish structures due to strike-slip deformation from those predating or post-dating that deformation. Distinctive aspects of structural style for strike-slip deformation on a regional scale include evidence for simultaneous shortening and extension, and for random directions of vergence in associated thrusts and nappes, Sedimentary basins form along strike-slip faults as a result of localized crustal extension, and. especially in zones of continental convergence, of localized crustal shortening and flexural loading. A given basin may alternately experience both extension and shortening through variations in the motion of adjacent crustal blocks, or extension in one direction (or in one part of the basin) may be accompanied by shortening in another direction (or in another part of the basin). The directions of extension and shortening also tend to vary within a given basin. and to change through time; and the magnitude of extension may be depth-dependent. Theoretical studies and observations from basins where strike-slip deformation has ceased suggest that many strike-slip basins experience very little thermally driven post-rift subsidence, Strike-,Iip basins are typically narrow (less than about 50 km wide), and they rapidly lose anomalous heat by accentuated lateral as well as vertical conduction. Detached or thin-skinned basins also tend to be cooler after rifting has ended than thme resulting from the same amount of extension of the entire lithosphere. In some cases, subsidence may be arrested or its record destroyed as a result of subsequent deformation, Subsidence due to extension, thermal contraction. or crustal loads is amplified by sediment loading, The location of depositional sites is determined by I I) crustal typc and the configuration of pre-existing crustal structures; (21 variations in the motion of lithospheric plates; and (31 the kinematic bchavior of crustal blocks. The manner in which overall plate motion is accommodated by discrete slip on major faults, and by the rotation and internal deformation of blocks between those faults is especially important. Subsidence history cannot be determined with cnnfidencc from present fault geometry. which therefore provides a poor basis for basin Every basin is unique, and palinspastic reconstructions arc useful even if difficult to undertake, Distinctive aspects of the stratigraphic record along strike-slip faults include (I) geological mismatches within and at the boundaries of bm,ins: (2) a tendency for longitudinal as well as lateral basin asymmetry, owing to the migration of depocentcrs with time; (3) evidence for episodic rapid subsidence, recorded by thick stratigraphic sections, and in some marine basins by rapid deepening; (4) the occurrence of abrupt lateral facies changes and local unconformities; and (5) marked differences in stratigraphic thickness, facies geometry, and occurrences of unconformities from one basin to another in the same region.
/** * Type-level natural numbers. */ public abstract static class HNat<A extends HNat<A>> { public abstract Show<A> show(); public abstract Integer toInteger(); public static HZero hZero() { return new HZero(); } public static <N extends HNat<N>> HSucc<N> hSucc(final N n) { return new HSucc<N>(n); } public static <N extends HNat<N>> N hPred(final HSucc<N> n) { return n.pred; } }
import { Component, OnInit, OnDestroy } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; import { switchMap } from 'rxjs/operators'; import { Subscription } from 'rxjs'; @Component({ selector: 'app-result', templateUrl: './result.component.html', styleUrls: ['./result.component.scss'] }) export class ResultComponent implements OnInit, OnDestroy { path: string; subscriptions: Array<Subscription>; constructor(private _ar: ActivatedRoute) { } ngOnInit() { this.subscriptions = []; this.subscriptions.push(this._ar.params.subscribe((data) => { this.path = data['path']; })); } ngOnDestroy(): void { if (this.subscriptions && this.subscriptions.length > 0) { this.subscriptions.forEach((subs) => { if (subs) { subs.unsubscribe(); } }); } } }
package org.firstinspires.ftc.teamcode; import com.qualcomm.hardware.adafruit.BNO055IMU; import com.qualcomm.hardware.adafruit.JustLoggingAccelerationIntegrator; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorController; import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.HardwareMap; import com.qualcomm.robotcore.util.ElapsedTime; import com.qualcomm.robotcore.util.Range; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; import org.firstinspires.ftc.robotcore.external.navigation.Orientation; import org.firstinspires.ftc.robotcore.external.navigation.Position; import org.firstinspires.ftc.robotcore.external.navigation.Velocity; import java.util.Locale; public class DriveSystemBase { // Drive System public DcMotor mDriveL1 = null, mDriveL2 = null, mDriveR1 = null, mDriveR2 = null; public BNO055IMU imuChasis = null; BNO055IMU.Parameters parameters = null; static final double COUNTS_PER_MOTOR_REV = 1120; //(cpr): 1120 (280 rises of Channel A) // eg: TETRIX Motor Encoder: 1440 static final double DRIVE_GEAR_REDUCTION = 1; //2.0 ; // This is < 1.0 if geared UP static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415); // These constants define the desired driving/control characteristics // The can/should be tweaked to suite the specific Motors drive train. static final double DRIVE_SPEED = 0.7; // Nominal speed for better accuracy. static final double TURN_SPEED = 0.5; // Nominal half speed for better accuracy. static final double HEADING_THRESHOLD = 0.5; //Original: 1 ; // As tight as we can make it with an integer gyro static final double P_TURN_COEFF = 0.0375; //Original: .1 // Larger is more responsive, but also less stable static final double P_DRIVE_COEFF = 0.07; //Original: 0.15; // Larger is more responsive, but also less stable // State used for updating telemetry Orientation angles; public DriveSystemBase(){ } //initialize the drive system hardware public void init(HardwareMap HWMap) { // Drive System Init mDriveL1 = HWMap.dcMotor.get("mDriveL1"); mDriveL2 = HWMap.dcMotor.get("mDriveL2"); mDriveR1 = HWMap.dcMotor.get("mDriveR1"); mDriveR2 = HWMap.dcMotor.get("mDriveR2"); mDriveL1.setDirection(DcMotorSimple.Direction.REVERSE); mDriveL2.setDirection(DcMotorSimple.Direction.REVERSE); imuChasis = HWMap.get(BNO055IMU.class, "imu"); } public void init_gyro(){ // Set up the parameters with which we will use our IMU. Note that integration // algorithm here just reports accelerations to the logcat log; it doesn't actually // provide positional information. parameters = new BNO055IMU.Parameters(); parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES; parameters.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC; parameters.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode parameters.loggingEnabled = true; parameters.loggingTag = "IMU"; parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator(); //I moved these lines below to an init in LightningAutonomous.java so they would not interfere //with normal teleop drive mode -- just keeping as a reference for now imuChasis.initialize(parameters); //// Start the logging of measured acceleration imuChasis.startAccelerationIntegration(new Position(), new Velocity(), 1000); driveByGyroActive = false; turnByGyroActive = false; } private double deadzone(double input){ double dArea = 1.0; return (input > dArea ? dArea : (input < -dArea ? -dArea : input)); } private void driveSystem(double left, double right){ left = Range.clip(left,-1.0,1.0); right = Range.clip(right,-1.0,1.0); mDriveL1.setPower(left); mDriveL2.setPower(left); mDriveR1.setPower(right); mDriveR2.setPower(right); } public void ArcadeDrive(double ForwardPower, double TurnPower){ driveSystem(TurnPower - ForwardPower, TurnPower + ForwardPower); } public void runWithEncoder(){ mDriveL1.setMode(DcMotor.RunMode.RUN_USING_ENCODER); mDriveR1.setMode(DcMotor.RunMode.RUN_USING_ENCODER); } public void resetEncoders(){ mDriveL1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); mDriveR1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); } public void resetIMU_Position_Integration() { imuChasis.stopAccelerationIntegration(); // Start the logging of measured acceleration imuChasis.startAccelerationIntegration(new Position(), new Velocity(), 1000); } enum DirectionSelect{left,right,both} boolean atEncVal(DirectionSelect Select, int val){ switch (Select){ case left: return (Math.abs(mDriveL1.getCurrentPosition()) > val); case right: return (Math.abs(mDriveR1.getCurrentPosition()) > val); case both: return (Math.abs(mDriveL1.getCurrentPosition()) > val && (Math.abs(mDriveR1.getCurrentPosition()) > val)); default: return false; } } boolean hasEncoderReset(DcMotor check){ return (check.getCurrentPosition() == 0); } private final double power = 0.75; boolean AutonDrive(int length, double ForwardPower, double LateralPower) { // Always reset to false condition on check boolean l_return = false; // Ensure run mode is set to run with encoders runWithEncoder(); // Check if at position currently if (atEncVal(DirectionSelect.both, length)){ l_return = true; resetEncoders(); } return l_return; } public double getGyroAngle() { angles = imuChasis.getAngularOrientation().toAxesReference(AxesReference.INTRINSIC).toAxesOrder(AxesOrder.ZYX); return angleDegrees(angles.angleUnit, angles.firstAngle); } public double angleDegrees(AngleUnit angleUnit, double angle) { return AngleUnit.DEGREES.fromUnit(angleUnit, angle); } /** * Method to drive on a fixed compass bearing (angle), based on encoder counts. * Move will stop if either of these conditions occur: * 1) Move gets to the desired position * 2) Driver stops the opmode running. * * @param speed Target speed for forward motion. Should allow for _/- variance for adjusting heading * @param distance Distance (in inches) to move from current position. Negative distance means move backwards. * @param angle Absolute Angle (in Degrees) relative to last gyro reset. * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. * If a relative angle is required, add/subtract from current heading. */ public int newLeftTarget; public int newRightTarget; public int moveCounts; public double max; public double error; public double steer; public double leftSpeed; public double rightSpeed; public double angle; private double speed; private double distance; public boolean driveByGyroActive; public boolean turnByGyroActive; /******************************** * Encoder Functions */ public int getLeftMotorEncoderVal(){ return mDriveL1.getCurrentPosition();} public int getRightMotorEncoderVal(){ return mDriveR1.getCurrentPosition();} public void setLeftTargetPos(int target){mDriveL1.setTargetPosition(target);} public void setRightTargetPos(int target){mDriveR1.setTargetPosition(target);} public void runLeftMotorToPos(){mDriveL1.setMode(DcMotor.RunMode.RUN_TO_POSITION);} public void runRightMotorToPos() {mDriveR1.setMode(DcMotor.RunMode.RUN_TO_POSITION);} public void setGyroDrive ( double desiredSpeed, double desiredDistance, double desiredAngle) { // Determine new target position, and pass to motor controller moveCounts = (int) (distance * COUNTS_PER_INCH); newLeftTarget = mDriveL1.getCurrentPosition() + moveCounts; newRightTarget = mDriveR1.getCurrentPosition() + moveCounts; //copy desiredAngle, desiredDistance & desiredSpeed for the runGyroDrive() method angle = desiredAngle; distance = desiredDistance; speed = desiredSpeed; // Set Target and Turn On RUN_TO_POSITION mDriveL1.setTargetPosition(newLeftTarget); mDriveR1.setTargetPosition(newRightTarget); mDriveL1.setMode(DcMotor.RunMode.RUN_TO_POSITION); mDriveR1.setMode(DcMotor.RunMode.RUN_TO_POSITION); // start motion. speed = Range.clip(Math.abs(speed), 0.0, 1.0); mDriveL1.setPower(speed); mDriveL2.setPower(speed); mDriveR1.setPower(speed); mDriveR2.setPower(speed); driveByGyroActive = true; } //Returns true when encoder position reached public boolean runGyroDrive() { // keep looping while we are still active, and BOTH Motors are running. if (mDriveL1.isBusy() && mDriveR1.isBusy()) { // adjust relative speed based on heading error. error = getError(angle); steer = getSteer(error, P_DRIVE_COEFF); // if driving in reverse, the motor correction also needs to be reversed if (distance < 0) steer *= -1.0; leftSpeed = speed - steer; rightSpeed = speed + steer; // Normalize speeds if any one exceeds +/- 1.0; max = Math.max(Math.abs(leftSpeed), Math.abs(rightSpeed)); if (max > 1.0) { leftSpeed /= max; rightSpeed /= max; } mDriveL1.setPower(leftSpeed); mDriveL2.setPower(leftSpeed); mDriveR2.setPower(rightSpeed); mDriveR2.setPower(rightSpeed); return true; //true meaning the Motors is still driving } else { // Stop all motion; mDriveL1.setPower(0); mDriveL2.setPower(0); mDriveR1.setPower(0); mDriveR2.setPower(0); // Turn off RUN_TO_POSITION mDriveL1.setMode(DcMotor.RunMode.RUN_USING_ENCODER); mDriveL2.setMode(DcMotor.RunMode.RUN_USING_ENCODER); driveByGyroActive = false; } return false; //false meaning the Motors has stopped } /** * Method to spin on central axis to point in a new direction. * Move will stop if either of these conditions occur: * 1) Move gets to the heading (angle) * 2) Driver stops the opmode running. * * @param desiredSpeed Desired speed of turn. * @param desiredAngle Absolute Angle (in Degrees) relative to last gyro reset. * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. * If a relative angle is required, add/subtract from current heading. * @throws InterruptedException */ public boolean gyroTurn ( double desiredSpeed, double desiredAngle) { angle = desiredAngle; //for telemetry display in LightningAutonomous // keep looping while we are still active, and not on heading. if (onHeading(desiredSpeed, desiredAngle, P_TURN_COEFF)) { turnByGyroActive = false; return false; //false meaning the Motors is no longer turning } else { turnByGyroActive = true; return true; //true meaning the Motors is still turning } } /** * Method to obtain & hold a heading for a finite amount of time * Move will stop once the requested time has elapsed * * @param desiredSpeed Desired speed of turn. * @param desiredangle Absolute Angle (in Degrees) relative to last gyro reset. * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. * If a relative angle is required, add/subtract from current heading. * @param desiredHoldTime Length of time (in seconds) to hold the specified heading. * @throws InterruptedException */ private ElapsedTime holdTimer; public void startGyroHold() { holdTimer = new ElapsedTime(); holdTimer.reset(); } public boolean gyroHold( double desiredSpeed, double desiredAngle, double desiredHoldTime) { angle = desiredAngle; //for telemetry display in LightningAutonomous // keep looping while we have time remaining. if(holdTimer.time() < desiredHoldTime) { onHeading(desiredSpeed, desiredAngle, P_TURN_COEFF); turnByGyroActive = true; return true; //true meaning the Motors is still turning/desired time has not elapsed } // Stop all motion; mDriveL1.setPower(0); mDriveL2.setPower(0); mDriveR1.setPower(0); mDriveR2.setPower(0); turnByGyroActive = false; return false; //false meaning the Motors is done turning/desired time elapsed } /** * Perform one cycle of closed loop heading control. * * @param speed Desired speed of turn. * @param angle Absolute Angle (in Degrees) relative to last gyro reset. * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. * If a relative angle is required, add/subtract from current heading. * @param PCoeff Proportional Gain coefficient * @return */ boolean onHeading(double speed, double angle, double PCoeff) { double error ; double steer ; boolean onTarget = false ; double leftSpeed; double rightSpeed; // determine turn power based on +/- error error = getError(angle); if (Math.abs(error) <= HEADING_THRESHOLD) { steer = 0.0; leftSpeed = 0.0; rightSpeed = 0.0; onTarget = true; } else { steer = getSteer(error, PCoeff); rightSpeed = speed * steer; leftSpeed = -rightSpeed; } // Send desired speeds to Motors. mDriveL1.setPower(leftSpeed); mDriveL2.setPower(leftSpeed); mDriveR1.setPower(rightSpeed); mDriveR2.setPower(rightSpeed); return onTarget; } /** * getError determines the error between the target angle and the Motors's current heading * @param targetAngle Desired angle (relative to global reference established at last Gyro Reset). * @return error angle: Degrees in the range +/- 180. Centered on the Motors's frame of reference * +ve error means the Motors should turn LEFT (CCW) to reduce error. */ public double getError(double targetAngle) { double robotError; // calculate error in -179 to +180 range ( //newAngles = imu.getAngularOrientation().toAxesReference(AxesReference.INTRINSIC).toAxesOrder(AxesOrder.ZYX); //robotError = targetAngle + AngleUnit.DEGREES.fromUnit(newAngles.angleUnit, newAngles.firstAngle); robotError = targetAngle + getGyroAngle(); while (robotError > 180) robotError -= 360; while (robotError <= -180) robotError += 360; return robotError; } /** * returns desired steering force. +/- 1 range. +ve = steer left * @param error Error angle in Motors relative degrees * @param PCoeff Proportional Gain Coefficient * @return */ public double getSteer(double error, double PCoeff) { return Range.clip(error * PCoeff, -1, 1); } //---------------------------------------------------------------------------------------------- // Formatting //---------------------------------------------------------------------------------------------- String formatAngle(AngleUnit angleUnit, double angle) { return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle)); } String formatDegrees(double degrees){ return String.format(Locale.getDefault(), "%.1f", AngleUnit.DEGREES.normalize(degrees)); //return String.format("%.1f", AngleUnit.DEGREES.normalize(degrees)); } /********************************************************* * Gyro Drive */ }
<filename>generate_sitemap.py import os BASE_URL = "https://www.nicholasjunge.com" CONTENT_EXTS = [".md"] def generate_sitemap(): url_loc_bracket = "\n\t<url>\n\t\t<loc>{0}</loc>\n\t</url>\n" with open("public/sitemap.xml", mode="w", encoding="utf-8") as sitemap: sitemap.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") sitemap.write("<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">") sitemap.write(url_loc_bracket.format(BASE_URL)) for _, dirs, _ in os.walk("pages"): for section in dirs: sitemap.write(url_loc_bracket.format(BASE_URL + "/" + section)) for root, _, files in os.walk("content"): for f in files: # split off file extension content_uri, ext = os.path.splitext(os.path.join(os.path.split(root)[-1], f)) if ext in CONTENT_EXTS: sitemap.write(url_loc_bracket.format(BASE_URL + "/" + content_uri)) sitemap.write("</urlset>\n") if __name__ == "__main__": generate_sitemap()
// Some parameters may be fixed class GaussianFunctionFixed implements RegressionFunction{ public double[] param = new double[3]; public boolean[] fixed = new boolean[3]; public double function(double[] p, double[] x){ int ii = 0; for(int i=0; i<3; i++){ if(!fixed[i]){ param[i] = p[ii]; ii++; } } double y = (param[2]/(param[1]*Math.sqrt(2.0D*Math.PI)))*Math.exp(-0.5D*Fmath.square((x[0]-param[0])/param[1])); return y; } }
Story highlights Clinton collected $52.8 million in just the first 19 days of October Trump, meanwhile, has just $16 million in reserves Washington (CNN) Donald Trump is still refusing to donate significantly more money to his campaign, putting him at an overwhelming cash disadvantage to Hillary Clinton with less than two weeks to go before Election Day. The GOP nominee -- despite frequent promises to contribute $100 million to his campaign by Election Day -- donated a measly $31,000 in early October, a fundraising report released Thursday shows. He has only donated $56 million to his race as of October 20 and has just $16 million in reserves. Clinton, meanwhile, sits on $62 million as of October 20, meaning she could spend more than $3 million a day during the final two weeks of the election and not go broke. Clinton has also personally outspent Trump in early October, donating more than $50,000 to her campaign from her own funds, Federal Election Commission reports show. As recently as Wednesday, Trump told CNN's Dana Bash that he would dramatically ramp up his donations to his campaign. Read More
Coupling Ontology Driven Semantic Representation with Multilingual Natural Language Generation for Tuning International Terminologies OBJECTIVES The importance of clinical communication between providers, consumers and others, as well as the requisite for computer interoperability, strengthens the need for sharing common accepted terminologies. Under the directives of the World Health Organization (WHO), an approach is currently being conducted in Australia to adopt a standardized terminology for medical procedures that is intended to become an international reference. METHOD In order to achieve such a standard, a collaborative approach is adopted, in line with the successful experiment conducted for the development of the new French coding system CCAM. Different coding centres are involved in setting up a semantic representation of each term using a formal ontological structure expressed through a logic-based representation language. From this language-independent representation, multilingual natural language generation (NLG) is performed to produce noun phrases in various languages that are further compared for consistency with the original terms. RESULTS Outcomes are presented for the assessment of the International Classification of Health Interventions (ICHI) and its translation into Portuguese. The initial results clearly emphasize the feasibility and cost-effectiveness of the proposed method for handling both a different classification and an additional language. CONCLUSION NLG tools, based on ontology driven semantic representation, facilitate the discovery of ambiguous and inconsistent terms, and, as such, should be promoted for establishing coherent international terminologies.
import * as React from "react"; export interface SceneLayoutProps { title: string; menu?: React.ReactNode; top: React.ReactNode; left: React.ReactNode; right: React.ReactNode; footer: React.ReactNode; children?: React.ReactNode; } export declare const SceneLayout: React.FunctionComponent<SceneLayoutProps>; export default SceneLayout;
#ifndef _Cores_hh_ #define _Cores_hh_ #include <unistd.h> #include <vector> using namespace std; /** * get the list of cores available on the system * * if physicalView is true, "list" will contain the id of existing * cores in increasing order (from 0 to n-1). * * if physicalView is false, "list" will contain the id of existing * cores ordered by the processor to which they belong. For example, * list could contain 0,2,4,6,1,3,5,7 if the first processor contains * cores 0,2,4,6 and the second processor contains cores 1,3,5,7. */ void getExistingCores(vector<unsigned short int> &list, bool physicalView) { char fname[128]; string buffer; ifstream f; cpu_set_t cores; CPU_ZERO(&cores); list.clear(); for(unsigned int cpu=0;cpu<sizeof(cpu_set_t)<<3;++cpu) if(!CPU_ISSET(cpu,&cores)) // if we didn't already found that core { snprintf(fname,sizeof(fname), "/sys/devices/system/cpu/cpu%d/topology/core_siblings",cpu); f.open(fname); if(!f.good()) return; if(physicalView) { list.push_back(cpu); f.close(); continue; } // logical view getline(f,buffer); f.close(); int len=buffer.length()-1; while(isspace(buffer[len]) && len>0) --len; int id=0,mask; for(int i=len;i>=0;--i,id+=4) if(buffer[i]!='0' && buffer[i]!=',') { if(buffer[i]>='0' && buffer[i]<='9') mask=buffer[i]-'0'; else if(buffer[i]>='a' && buffer[i]<='f') mask=10+buffer[i]-'a'; else if(buffer[i]>='A' && buffer[i]<='F') mask=10+buffer[i]-'A'; else throw runtime_error("invalid character in cpu mask"); for(int j=0;j<4;++j) { if((mask & 1) && !CPU_ISSET(id+j,&cores)) { list.push_back(id+j); CPU_SET(id+j,&cores); // don't count it twice! } mask>>=1; } } } // if(CPU_ISET(...)) } /** * return the list of cores allocated to this process, ordered by * physical cpu */ void getAllocatedCoresByProcessorOrder(vector<unsigned short int> &allocatedCores) { char fname[128]; string buffer; ifstream f; cpu_set_t affinityMask; allocatedCores.clear(); sched_getaffinity(0,sizeof(cpu_set_t),&affinityMask); for(unsigned int cpu=0;cpu<sizeof(cpu_set_t)<<3;++cpu) if(CPU_ISSET(cpu,&affinityMask)) { snprintf(fname,sizeof(fname), "/sys/devices/system/cpu/cpu%d/topology/core_siblings",cpu); f.open(fname); if(!f.good()) return; getline(f,buffer); f.close(); int len=buffer.length()-1; while(isspace(buffer[len]) && len>0) --len; int id=0,mask; for(int i=len;i>=0;--i,id+=4) if(buffer[i]!='0' && buffer[i]!=',') { if(buffer[i]>='0' && buffer[i]<='9') mask=buffer[i]-'0'; else if(buffer[i]>='a' && buffer[i]<='f') mask=10+buffer[i]-'a'; else if(buffer[i]>='A' && buffer[i]<='F') mask=10+buffer[i]-'A'; else throw runtime_error("invalid character in cpu mask"); for(int j=0;j<4;++j) { if((mask & 1) && CPU_ISSET(id+j,&affinityMask)) { allocatedCores.push_back(id+j); CPU_CLR(id+j,&affinityMask); // don't count it twice! } mask>>=1; } } } // if(CPU_ISET(...)) } /** * get the list of cores allocated to this process */ void getAllocatedCores(vector<unsigned short int> &list, pid_t pid=0) { cpu_set_t mask; list.clear(); sched_getaffinity(pid,sizeof(cpu_set_t),&mask); for(unsigned int i=0;i<sizeof(cpu_set_t)<<3;++i) if(CPU_ISSET(i,&mask)) list.push_back(i); } /** * print the list of cores allocated to this process * (getAllocatedCores must be called first). */ void printAllocatedCores(ostream &s, const vector<unsigned short int> &list) { size_t end; for(size_t beg=0;beg<list.size();beg=end) { end=beg+1; while(end<list.size() && list[end]==list[end-1]+1) ++end; if(beg!=0) s << ','; if(end==beg+1) s << list[beg]; else s << list[beg] << '-' << list[end-1]; } } /** * generate a cpu_set mask from a list of cores */ cpu_set_t affinityMask(const vector<unsigned short int> &cores) { cpu_set_t mask; CPU_ZERO(&mask); for(size_t i=0;i<cores.size();++i) CPU_SET(cores[i],&mask); return mask; } #endif
/** * Called when a {@link BoxListItem} is bound to a ViewHolder. Customizations of UI elements * should be done by overriding this method. If extending from a {@link BoxBrowseActivity} * a custom BoxBrowseFolder fragment can be returned in * {@link BoxBrowseActivity#createBrowseFolderFragment(BoxItem, BoxSession)} * * @param holder the BoxItemHolder */ protected void onBindBoxItemViewHolder(BoxItemViewHolder holder) { if (holder.getItem() == null || holder.getItem().getBoxItem() == null) { return; } final BoxItem item = holder.getItem().getBoxItem(); holder.getNameView().setText(item.getName()); String description = ""; if (item != null) { String modifiedAt = item.getModifiedAt() != null ? DateFormat.getDateInstance(DateFormat.SHORT).format(item.getModifiedAt()).toUpperCase() : ""; description = String.format(Locale.ENGLISH, DESCRIPTION, modifiedAt); mThumbnailManager.setThumbnailIntoView(holder.getThumbView(), item); } holder.getMetaDescription().setText(description); holder.getProgressBar().setVisibility(View.GONE); holder.getMetaDescription().setVisibility(View.VISIBLE); holder.getThumbView().setVisibility(View.VISIBLE); if (!holder.getItem().getIsEnabled()) { holder.getView().setEnabled(false); holder.getNameView().setTextColor(getResources().getColor(com.box.androidsdk.browse.R.color.box_browsesdk_hint)); holder.getMetaDescription().setTextColor(getResources().getColor(com.box.androidsdk.browse.R.color.box_browsesdk_disabled_hint)); holder.getThumbView().setAlpha(0.26f); } else { holder.getView().setEnabled(true); holder.getNameView().setTextColor(getResources().getColor(com.box.androidsdk.browse.R.color.box_browsesdk_primary_text)); holder.getMetaDescription().setTextColor(getResources().getColor(com.box.androidsdk.browse.R.color.box_browsesdk_hint)); holder.getThumbView().setAlpha(1f); } }
A 17-year-old German skier involved in a serious crash at Lake Louise Ski Resort on Tuesday has died. Alpine Canada confirmed Max Burkhart died in hospital on Wednesday. READ MORE: 17-year-old skier seriously injured at Lake Louise The skier was taken to the Foothills Hospital in Calgary by STARS air ambulance after crashing into safety netting at about 2:20 p.m. “Alpine Canada and Alberta Alpine are devastated about this tragic loss of life and sends its deepest condolences to his family and teammates,” Alpine Canada said in a release. “Alpine Canada and Alberta Alpine encourage the ski family around the world to support the athlete’s family and teammates through this difficult time.” Burkhart was competing at the Nor-Am Cup when the accident happened. Emergency crews from Banff tried to treat the boy on the hill before the air ambulance arrived. Lake Louise spokesperson Dan Markham called the death a “terrible tragedy,” adding the ski resort staff are sending their sympathies to Burkhart’s family and teammates. RCMP said in a release Wednesday that the medical examiner is investigating his death.
#include <iostream> #include <typeinfo> #include <stdexcept> #include <algorithm> #include <vector> #include <cassert> #include <memory> #include "mdarraygen.hh" #include "fakearray.hh" using namespace std; int main() { using D1234 = MDArrayGen<double,1,2,3,4>; cout << "extent count for array dim 1,2,3,4: " << D1234::ndims << "\n"; cout << "first extent value for array dim 1,2,3,4: " << D1234::extent << "\n"; cout << "-----\n"; assert(D1234::ndims == 4); assert(D1234::extent == 1); assert(D1234::inner_t::ndims == 3); assert(D1234::inner_t::extent == 2); assert(D1234::inner_t::inner_t::ndims == 2); assert(D1234::inner_t::inner_t::extent == 3); assert(D1234::inner_t::inner_t::inner_t::ndims == 1); assert(D1234::inner_t::inner_t::inner_t::extent == 4); assert(D1234::inner_t::inner_t::inner_t::inner_t::ndims == 0); assert(D1234::inner_t::inner_t::inner_t::inner_t::extent == 0); // Make sure reference type works as expected. using F745 = MDArrayGen<float,7,4,5>; F745::type q; F745::reference q_ref(q); assert(q == q_ref); float (&rq)[7][4][5] = q; // hard-coded reference to data q[6][2][3]=1.75; // set element using real data assert( q[6][2][3] == rq[6][2][3]); cout << "sizeof(F745) = " << sizeof(q) << "\n"; assert(sizeof(q) == sizeof(float)*7*5*4); cout << "byte count of F745 = " << F745::size_bytes << "\n"; assert(F745::size_bytes == sizeof(q)); cout << "element count of F745 = " << F745::size_elements << "\n"; assert(F745::size_elements == 7*5*4); cout << "type name for F745: " << typeid(F745::type).name() << "\n"; assert(typeid(F745::type) == typeid(q)); cout << "type name for ref to F745: " << typeid(F745::reference).name() << "\n"; assert(typeid(F745::reference) == typeid(rq)); cout << "type name for ptr to F745: " << typeid(F745::pointer).name() << "\n"; assert(typeid(F745::pointer) == typeid(&q)); // Make sure all the values in the array are accessible as if the // FakeArray was a real array. using Fake4d = FakeArray<double, 8,6,5,4>; assert(Fake4d::size_bytes == 8*6*5*4*sizeof(double)); // Make sure Fake4d::size_bytes is a constant expression. typedef std::array<char, Fake4d::size_bytes> this_should_compile; assert(sizeof(this_should_compile) == Fake4d::size_bytes); // Allocate buffer for storage of the array elements, and fill with // known data. size_t nelements = Fake4d::size_bytes/sizeof(double); // The unique_ptr will delete the buffer, so we don't have to. unique_ptr<double[]> buffer { new double[nelements] }; double* data = buffer.get(); int init = 1; for (double *b = data, *e = data+nelements; b!=e; ++b) *b = (init++)*1.5; // Put 4d array on top of buffer Fake4d f4(data); cout << "after f4" << endl; for (int i = 0; i != 7; ++i) for (int j = 0; j != 5; ++j) for (int k = 0; k != 4; ++k) for (int l = 0; l != 3; ++l) assert(f4.data[i][j][k][l] == f4[i][j][k][l]); f4[7][5][4][2] = 1.5; cout << "FakeArray: element 7,5,4,2 using cast should 1.5, value is " << f4[7][5][4][2] << "\n"; // obtain reference to buffer as 4d array auto f4a = make_fake_array<double, 8,6,5,4>(data); cout << "make_fake_array: element 7,5,4,2 should be 1.5, value is " << f4a[7][5][4][2] << "\n"; for (int i = 0; i != 7; ++i) for (int j = 0; j != 5; ++j) for (int k = 0; k != 4; ++k) for (int l = 0; l != 3; ++l) assert(f4[i][j][k][l] == f4a[i][j][k][l]); // Test extent checker. std::vector<size_t> ext = { 8,6,5,4 }; std::vector<size_t> ext_wrong = { 9,6,5,4 }; std::vector<size_t> other_wrong = { 8, 6 }; cout << "matching extents for 8,6,5,4 using 8,6,5,4 -> " << verifyExtents<8,6,5,4>(ext) << " " << "\n"; assert((verifyExtents<8,6,5,4>(ext))); assert((!verifyExtents<8,6,5,4>(ext_wrong))); cout << "matching extents for 8,6,5,4 using 9,6,5,4 -> " << verifyExtents<8,6,5,4>(ext_wrong) << " " << "\n"; assert((!verifyExtents<8,6,5,4>(other_wrong))); }
// CloseEngine closes backend engine by uuid. func (local *local) CloseEngine(ctx context.Context, cfg *backend.EngineConfig, engineUUID uuid.UUID) error { engineI, ok := local.engines.Load(engineUUID) if !ok { db, err := local.openEngineDB(engineUUID, true) if err != nil { return err } engine := &Engine{ UUID: engineUUID, db: db, sstMetasChan: make(chan metaOrFlush), tableInfo: cfg.TableInfo, keyAdapter: local.keyAdapter, duplicateDetection: local.duplicateDetection, duplicateDB: local.duplicateDB, errorMgr: local.errorMgr, } engine.sstIngester = dbSSTIngester{e: engine} if err = engine.loadEngineMeta(); err != nil { return err } local.engines.Store(engineUUID, engine) return nil } engine := engineI.(*Engine) engine.rLock() if engine.closed.Load() { engine.rUnlock() return nil } err := engine.flushEngineWithoutLock(ctx) engine.rUnlock() engine.lock(importMutexStateClose) engine.closed.Store(true) close(engine.sstMetasChan) engine.unlock() if err != nil { return errors.Trace(err) } engine.wg.Wait() return engine.ingestErr.Get() }