content
stringlengths
10
4.9M
Dallas Cowboys quarterback Tony Romo throws a pass to fans while they played jackpot during the Core Power Tony Romo Experience at Gerald J. Ford Stadium at SMU in Dallas on Monday, June, 8, 2015. (Michael Reaves/The Dallas Morning News) briefbegjune He has been a Cowboy for the last 13 years. So when Romo was asked what he's going to be doing in 10 years, some might think the Dallas quarterback's answer was...a bit optimistic. "I'd like to still be playing football," Romo told Madison.com while in Wisconsin for his youth football camp. "Maybe 10 years is stretching it a little. We'll see. "The reality of it is that time usually tells you when you're done. I know I'm playing some of my best football this offseason. It's been very enjoyable. I'm pretty excited about what lies ahead." For those keeping track at home, former Cowboy Vinny Testaverde retired at 44, as did Warren Moon.
 /* Copyright (c) 2014-present Maximus5 All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the authors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #define HIDE_USE_EXCEPTION_INFO #define SHOWDEBUGSTR #include <iostream> #include "Header.h" #include "ConEmu.h" #include "Match.h" #include "RConData.h" #include <unordered_set> #include "../UnitTests/gtest.h" #define NEED_FIX_FAILED_TEST L"\1" TEST(Match, Hyperlinks) { CEStr szDir; GetDirectory(szDir); std::wstring lastCheckedFile; std::unordered_set<std::wstring> knownFiles = { L"license.txt", L"portable.txt", L"whatsnew-conemu.txt", L"abc.cpp", L"def.h", LR"(c:\abc.xls)", L"file.ext", L"makefile", LR"(c:\sources\conemu\realconsole.cpp)", L"defresolve.cpp", L"conemuc.cpp", L"1.c", L"file.cpp", LR"(common\pipeserver.h)", LR"(c:\sources\farlib\farctrl.pas)", L"farctrl.pas", L"script.ps1", LR"(c:\tools\release.ps1)", L"abc.py", L"/src/class.c", LR"(c:\vc\unicode_far\macro.cpp)", }; CMatch match([&knownFiles, &lastCheckedFile](LPCWSTR asSrc, CEStr&) { lastCheckedFile = asSrc; const auto found = knownFiles.count(lastCheckedFile); return found; }); struct TestMatch { LPCWSTR src{ nullptr }; ExpandTextRangeType etr{}; bool bMatch{ false }; LPCWSTR matches[5]{}; LPCWSTR pszTestCurDir{ nullptr }; } tests[] = { // Hyperlinks // RA layer request failed: PROPFIND request failed on '/svn': PROPFIND of '/svn': could // not connect to server (http://farmanager.googlecode.com) at /usr/lib/perl5/site_perl/Git/SVN.pm line 148 // 1. Must not match last bracket, dot, comma, semicolon, etc. // 2. If url exceeds the line, must request from owner additional data // if it is Far editor - the line must match the screen (no "tab" chars) {L"\t" L"(http://abc.com) <http://qwe.com> [http://rty.com] {http://def.com}" L"\t", etr_AnyClickable, true, {L"http://abc.com", L"http://qwe.com", L"http://rty.com", L"http://def.com"}}, {L"\t" L"(http://abc.com) http://qwe.com; http://rty.com, http://def.com." L"\t", etr_AnyClickable, true, {L"http://abc.com", L"http://qwe.com", L"http://rty.com", L"http://def.com"}}, {L"\t" L"text··http://www.abc.com/q?q··text" L"\t", // this line contains '·' which are visualisations of spaces in Far editor etr_AnyClickable, true, {L"http://www.abc.com/q?q"}}, {L"\t" L"file://c:\\temp\\qqq.html" L"\t", etr_AnyClickable, true, {L"file://c:\\temp\\qqq.html"}}, {L"\t" L"file:///c:\\temp\\qqq.html" L"\t", etr_AnyClickable, true, {L"file:///c:\\temp\\qqq.html"}}, {L"\t" L"http://www.farmanager.com" L"\t", etr_AnyClickable, true, {L"http://www.farmanager.com"}}, {L"\t" L"$ http://www.KKK.ru - some text - more text" L"\t", etr_AnyClickable, true, {L"http://www.KKK.ru"}}, {L"\t" L"C:\\ConEmu>http://www.KKK.ru - ..." L"\t", etr_AnyClickable, true, {L"http://www.KKK.ru"}}, {L"\t" L"http:/go/fwlink/?LinkID=1234." L"\t", etr_AnyClickable, true, {L"http:/go/fwlink/?LinkID=1234"}}, {L"\t" L"http://go/fwlink/?LinkID=1234." L"\t", etr_AnyClickable, true, {L"http://go/fwlink/?LinkID=1234"}}, // Just a text files {L"\t" L"License.txt Portable.txt WhatsNew-ConEmu.txt" L"\t", etr_AnyClickable, true, {L"License.txt", L"Portable.txt", L"WhatsNew-ConEmu.txt"}, gpConEmu->ms_ConEmuBaseDir}, {L"\t" L"License.txt:err" L"\t", etr_AnyClickable, true, {L"License.txt"}, gpConEmu->ms_ConEmuBaseDir}, {L"\t" L" \" abc.cpp \" \"def.h\" " L"\t", etr_AnyClickable, true, {L"abc.cpp", L"def.h"}}, {NEED_FIX_FAILED_TEST L"\t" L"class.func('C:\\abc.xls')" L"\t", etr_AnyClickable, true, {L"C:\\abc.xls"}}, {NEED_FIX_FAILED_TEST L"\t" L"class::func('C:\\abc.xls')" L"\t", etr_AnyClickable, true, {L"C:\\abc.xls"}}, {L"\t" L"file.ext 2" L"\t", etr_AnyClickable, true, {L"file.ext"}}, {L"\t" L"makefile" L"\t", etr_AnyClickable, true, {L"makefile"}}, // -- VC {NEED_FIX_FAILED_TEST L"\t" L"1>c:\\sources\\conemu\\realconsole.cpp(8104) : error C2065: 'qqq' : undeclared identifier" L"\t", etr_AnyClickable, true, {L"c:\\sources\\conemu\\realconsole.cpp(8104)"}}, {NEED_FIX_FAILED_TEST L"\t" L"DefResolve.cpp(18) : error C2065: 'sdgagasdhsahd' : undeclared identifier" L"\t", etr_AnyClickable, true, {L"DefResolve.cpp(18)"}}, {NEED_FIX_FAILED_TEST L"\t" L"DefResolve.cpp(18): warning: note xxx" L"\t", etr_AnyClickable, true, {L"DefResolve.cpp(18)"}}, {NEED_FIX_FAILED_TEST L"\t" L"C:\\Program Files (x86)\\Microsoft Visual Studio\\include\\intrin.h:56:1: error: expected function body" L"\t", etr_AnyClickable, true, {L"C:\\Program Files (x86)\\Microsoft Visual Studio\\include\\intrin.h:56:1"}}, // -- GCC {L"\t" L"ConEmuC.cpp:49: error: 'qqq' does not name a type" L"\t", etr_AnyClickable, true, {L"ConEmuC.cpp:49"}}, {L"\t" L"1.c:3: some message" L"\t", etr_AnyClickable, true, {L"1.c:3"}}, {L"\t" L"file.cpp:29:29: error" L"\t", etr_AnyClickable, true, {L"file.cpp:29"}}, // CPP Check {L"\t" L"[common\\PipeServer.h:1145]: (style) C-style pointer casting" L"\t", etr_AnyClickable, true, {L"common\\PipeServer.h:1145"}}, // Delphi {L"\t" L"c:\\sources\\FarLib\\FarCtrl.pas(1002) Error: Undeclared identifier: 'PCTL_GETPLUGININFO'" L"\t", etr_AnyClickable, true, {L"c:\\sources\\FarLib\\FarCtrl.pas(1002)"}}, // FPC {L"\t" L"FarCtrl.pas(1002,49) Error: Identifier not found 'PCTL_GETPLUGININFO'" L"\t", etr_AnyClickable, true, {L"FarCtrl.pas(1002,49)"}}, // PowerShell {L"\t" L"Script.ps1:35 знак:23" L"\t", etr_AnyClickable, true, {L"Script.ps1:35"}}, {L"\t" L"At C:\\Tools\\release.ps1:12 char:8" L"\t", etr_AnyClickable, true, {L"C:\\Tools\\release.ps1:12"}}, // -- Possible? {L"\t" L"abc.py (3): some message" L"\t", etr_AnyClickable, true, {L"abc.py (3)"}}, // ASM - should highlight "test.asasm(1,1)" {NEED_FIX_FAILED_TEST L"\t" L"<EMAIL>(1239): test.asasm(1,1):" L"\t", etr_AnyClickable, true, {L"<EMAIL>(1239)", L"test.asasm(1,1)"}}, // Issue 1594 {L"\t" L"/src/class.c:123:m_func(...)" L"\t", etr_AnyClickable, true, {L"/src/class.c:123"}}, {L"\t" L"/src/class.c:123: m_func(...)" L"\t", etr_AnyClickable, true, {L"/src/class.c:123"}}, // -- False detects {L"\t" L"29.11.2011 18:31:47" L"\t", etr_AnyClickable, false, {}}, {L"\t" L"C:\\VC\\unicode_far\\macro.cpp 1251 Ln 5951/8291 Col 51 Ch 39 0043h 13:54" L"\t", etr_AnyClickable, true, {L"C:\\VC\\unicode_far\\macro.cpp"}}, {L"\t" L"InfoW1900->SettingsControl(sc.Handle, SCTL_FREE, 0, 0);" L"\t", etr_AnyClickable, false, {}}, {L"\t" L"m_abc.func(1,2,3)" L"\t", etr_AnyClickable, false, {}}, }; auto intersect = [](const int start1, const int end1, const int start2, const int end2) { // [1, 4) [4, 10) if (end1 <= start2) return false; if (end2 <= start1) return false; if (start2 >= end1) return false; if (start1 >= end2) return false; // [1, 4) [3, 8) // [4, 8) [1, 6) // [4, 8) [1, 10) // [1, 10) [4, 8) return true; }; EXPECT_FALSE(intersect(1, 4, 4, 10)); EXPECT_FALSE(intersect(4, 10, 1, 4)); EXPECT_TRUE(intersect(1, 4, 3, 8)); EXPECT_TRUE(intersect(4, 8, 1, 6)); EXPECT_TRUE(intersect(4, 8, 1, 10)); EXPECT_TRUE(intersect(1, 10, 4, 8)); auto unitTestMatch = [&match](ExpandTextRangeType etr, LPCWSTR asLine, int anLineLen, int anMatchStart, int anMatchEnd, LPCWSTR asMatchText) { // ReSharper disable CppJoinDeclarationAndAssignment int iRc, iCmp; CRConDataGuard data; for (int i = anMatchStart; i <= anMatchEnd; i++) { iRc = match.Match(etr, asLine, anLineLen, i, data, 0); if (iRc <= 0) { FAIL() << L"Match: must be found; line=" << asLine << L"; match=" << asMatchText; // ReSharper disable once CppUnreachableCode break; } if (match.mn_MatchLeft != anMatchStart || match.mn_MatchRight != anMatchEnd) { FAIL() << L"Match: do not match required range; line=" << asLine << L"; match=" << asMatchText; // ReSharper disable once CppUnreachableCode break; } iCmp = lstrcmp(match.ms_Match, asMatchText); if (iCmp != 0) { FAIL() << L"Match: iCmp != 0; line=" << asLine << L"; match=" << asMatchText; // ReSharper disable once CppUnreachableCode break; } } }; auto unitTestNoMatch = [&match, &intersect](ExpandTextRangeType etr, LPCWSTR asLine, int anLineLen, int anStart, int anEnd) { int iRc; CRConDataGuard data; for (int i = anStart; i <= anEnd; i++) { iRc = match.Match(etr, asLine, anLineLen, i, data, 0); if (etr == etr_AnyClickable && iRc > 0) { FAIL() << L"Match: must NOT be found; line=" << asLine << L" in=[" << match.mn_MatchLeft << L"," << match.mn_MatchRight << L") from=" << i; // ReSharper disable once CppUnreachableCode break; } } }; for (const auto& test : tests) { if (test.src[0] == NEED_FIX_FAILED_TEST[0]) { wcdbg("FIX_ME") << (test.src + wcslen(NEED_FIX_FAILED_TEST)) << std::endl; continue; } int nStartIdx; const int iSrcLen = lstrlen(test.src) - 1; _ASSERTE(test.src && test.src[iSrcLen] == L'\t'); // Loop through matches int iMatchNo = 0, iPrevStart = 0; while (true) { if (test.bMatch) { const int iMatchLen = lstrlen(test.matches[iMatchNo]); const auto* pszFirst = wcsstr(test.src, test.matches[iMatchNo]); _ASSERTE(pszFirst); nStartIdx = static_cast<int>(pszFirst - test.src); unitTestNoMatch(test.etr, test.src, iSrcLen, iPrevStart, nStartIdx - 1); iPrevStart = nStartIdx + iMatchLen; unitTestMatch(test.etr, test.src, iSrcLen, nStartIdx, iPrevStart - 1, test.matches[iMatchNo]); } else { // ReSharper disable once CppAssignedValueIsNeverUsed nStartIdx = 0; unitTestNoMatch(test.etr, test.src, iSrcLen, 0, iSrcLen); break; } // More matches waiting? if (test.matches[++iMatchNo] == nullptr) { unitTestNoMatch(test.etr, test.src, iSrcLen, iPrevStart, iSrcLen); break; } std::ignore = nStartIdx; } //_ASSERTE(iRc == lstrlen(p->txtMatch)); //_ASSERTE(match.m_Type == p->etrMatch); } ::SetCurrentDirectoryW(szDir); } TEST(Match, Words) { auto testMatch = [](const wchar_t* source, const int from, const int to, const wchar_t* expected) { CMatch match([](LPCWSTR asSrc, CEStr&) {return false; }); CRConDataGuard dummyData; for (int i = from; i <= to; ++i) { const int rcLen = match.Match(etr_Word, source, lstrlen(source), i, dummyData, 0); EXPECT_LT(0, rcLen); EXPECT_STREQ(match.ms_Match.c_str(L""), expected) << L"source=" << source << L" from=" << i; } }; const wchar_t wxiWarning[] = LR"(test C:\SRC\Setup\ConEmu_Conditions.wxi(8): warning)"; // #TODO Should be without "(8)" ending testMatch(wxiWarning, 6, 38, LR"(C:\SRC\Setup\ConEmu_Conditions.wxi(8))"); const wchar_t dirFolderInfo[] = L"15.03.2021 00:18 <DIR> .del-git "; testMatch(dirFolderInfo, 0, 9, L"15.03.2021"); testMatch(dirFolderInfo, 12, 16, L"00:18"); // #TODO should be only "<" testMatch(dirFolderInfo, 21, 21, L"<DIR>"); // #TODO should be only "DIR" testMatch(dirFolderInfo, 22, 22, L"<DIR>"); // #TODO should be only "DIR" testMatch(dirFolderInfo, 23, 24, L"DIR"); // #TODO should be either "<DIR>" or ">" testMatch(dirFolderInfo, 25, 25, L"DIR"); // #TODO should be only " " testMatch(dirFolderInfo, 26, 26, L"DIR> "); testMatch(dirFolderInfo, 27, 34, L" "); // #TODO should be only " " testMatch(dirFolderInfo, 35, 35, L" .del-git"); testMatch(dirFolderInfo, 36, 43, L".del-git"); // #TODO should be only " " testMatch(dirFolderInfo, 44, 44, L".del-git "); }
// ParseAndValidate parses the request and applies data validation. func ParseAndValidate(ctx *echo.Context, target interface{}) error { if err := (*ctx).Bind(target); err != nil { (*ctx).Logger().Error(err) return echo.NewHTTPError( http.StatusBadRequest, "invalid parameter type was passed", ) } if err := (*ctx).Validate(target); err != nil { return err } return nil }
<gh_stars>0 package com.sn.openfeign; import com.sn.IHelloService; import org.springframework.cloud.openfeign.FeignClient; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestParam; import java.util.List; // value 指定要调用的服务名称 // fallback 指定Hystrix对应的服务降级处理类 // fallbackFactory 和 fallback 的用途一样,定义一个即可 @FeignClient(value = "provider", /*fallback = HelloServiceFallback.class, */ fallbackFactory = HelloServiceFallbackFactory.class) public interface HelloService extends IHelloService { // 指定目标服务的接口,方法名可任意指定 @GetMapping("/hello") String hello(); @GetMapping("/data/{ids}") List<String> getDataByIds(@PathVariable("ids") String ids); // key-value格式传递数据时,需要添加@RequestParam @GetMapping("/hello2") String hello2(@RequestParam("name") String name); // 使用@RequestHeader传递中文需要编码,接收服务则需要转码 @GetMapping("/hello3") String hello3(@RequestHeader("name") String name); }
<reponame>chendouble/midpoint package com.evolveum.midpoint.web.component.prism; public abstract class PrismWrapper { private boolean showEmpty; private boolean minimalized; private boolean sorted; private boolean showMetadata; public boolean isMinimalized() { return minimalized; } public void setMinimalized(boolean minimalized) { this.minimalized = minimalized; } public boolean isSorted() { return sorted; } public void setSorted(boolean sorted) { this.sorted = sorted; } public boolean isShowMetadata() { return showMetadata; } public void setShowMetadata(boolean showMetadata) { this.showMetadata = showMetadata; } public boolean isShowEmpty() { return showEmpty; } public void setShowEmpty(boolean showEmpty, boolean recursive) { this.showEmpty = showEmpty; computeStripes(); } public abstract void computeStripes(); }
<filename>client/src/elements/Input/Input.tsx /* eslint-disable indent */ /* eslint-disable react/jsx-max-props-per-line */ import React, { forwardRef } from 'react'; import colors from 'Styles/color-variables'; import styled, { css } from 'styled-components'; import WarningIcon from '@material-ui/icons/Warning'; interface InputProps { type: 'email' | 'password'; label?: string | null; error?: boolean; errorMessage?: string | null; placeholder: string; value?: string; onChange: (e: React.ChangeEvent<HTMLInputElement>) => void; } const Input = forwardRef<HTMLInputElement, InputProps>( ( { label = null, errorMessage = null, error = false, ...rests }: InputProps, ref, ) => { return ( <Container> {label && <Label>{label}</Label>} <InputBoxWrapper> <InputBox ref={ref} error={error} {...rests} /> </InputBoxWrapper> {errorMessage && ( <Error> <WarningIcon fontSize="small" /> {errorMessage} </Error> )} </Container> ); }, ); Input.displayName = 'Input'; export default Input; const Container = styled.div` display: flex; flex-direction: column; `; const Label = styled.label` color: ${colors.black}; font-weight: bold; font-size: 15px; line-height: 15px; margin-bottom: 13px; margin-top: 13px; `; const Error = styled.div` margin-bottom: 5px; color: ${colors.inputRed}; font-size: 12px; display: flex; align-items: center; margin-top: 3px; font-weight: bold; animation: fadein 0.4s; @keyframes fadein { from { opacity: 0; } to { opacity: 1; } } `; const InputBoxWrapper = styled.div``; const InputBox = styled.input<{ error: boolean }>` background: ${colors.white}; border: 1px solid ${colors.gray[500]}; border-radius: 3px; width: 323px; height: 46px; padding: 0px; font-size: 16px; padding: 0 10px; &::placeholder { font-size: 13px; color: ${colors.gray[800]}; } &:focus { box-shadow: 0 0 2px 2px ${colors.inputBlue}; outline: none; } ${({ error }) => error && css` box-shadow: 0 0 2px 2px ${colors.inputRed}; outline: none; &:focus { box-shadow: 0 0 2px 2px ${colors.inputRed}; outline: none; } animation: fadein 0.4s; @keyframes fadein { from { opacity: 0; } to { opacity: 1; } } `} `;
Robots in special education: reasons for low uptake Purpose The purpose of this paper is to identify the main reasons for low uptake of robots in special education (SE), obtained from an analysis of previous studies that used robots in the area, and from interviewing SE teachers about the topic. Design/methodology/approach An analysis of 18 studies that used robots in SE was performed, and the conclusions were complemented and compared with the feedback from interviewing 13 SE teachers from Spain and the UK about the reasons they believed caused the low uptake of robots in SE classrooms. Findings Five main reasons why SE schools do not normally use robots in their classrooms were identified: the inability to acquire the system due to its price or availability; its difficulty of use; the low range of activities offered; the limited ways of interaction offered; and the inability to use different robots with the same software. Originality/value Previous studies focussed on exploring the advantages of using robots to help children with autism spectrum conditions and learning disabilities. This study takes a step further and looks into the reasons why, despite the benefits shown, robots are rarely used in real-life settings after the relevant study ends. The authors also present a potential solution to the issues found: involving end users in the design and development of new systems using a user-centred design approach for all the components, including methods of interaction, learning activities and the most suitable type of robots.
<filename>server/lib/emote/emote.config.ts export default { "SERVER_GETS": { "EMOTE": {"name": "emoted", "log": true} }, "CLIENT_GETS": { "EMOTE": {"name": "actor_emote"} } }
<gh_stars>1-10 export interface EventAppContext { isBrowser: boolean; }
<filename>src/controllers/offline.map.ts import { IOfflineProps } from "../components/offline/Container.if"; import { IGlobalState } from "../state/interface"; const mapStateToProps = (state: IGlobalState): IOfflineProps => { const timeFormat = new Intl.DateTimeFormat('en', { hour: 'numeric', minute: 'numeric' }); return { title: "Connection Lost!", subtitle: "Last online at " + timeFormat.format(state.OnlineStatus.dateWasLastOnline), iconName: "plug", }; } export default mapStateToProps;
/** * Logical "and" used in the composite pattern to represent column condition. Contains subcondition * that are concatenated by "and". * * @author Jens Ehrlich */ @JsonTypeName("ColumnConditionAnd") public class ColumnConditionAnd implements ColumnCondition { protected boolean isNegated = false; protected float coverage = Float.NaN; protected Set<ColumnCondition> columnValues; /** * Exists for Gwt serialization */ protected ColumnConditionAnd() { this.columnValues = new TreeSet<>(); } public ColumnConditionAnd(Map<ColumnIdentifier, String> conditionMap) { this(); for (ColumnIdentifier column : conditionMap.keySet()) { columnValues.add(new ColumnConditionValue(column, conditionMap.get(column))); } } public ColumnConditionAnd(TreeSet<ColumnCondition> treeSet) { this.columnValues = new TreeSet<>(treeSet); } public ColumnConditionAnd(ColumnCondition... conditions) { this(); for (ColumnCondition condition : conditions) { this.columnValues.add(condition); } } public Set<ColumnCondition> getColumnValues() { return columnValues; } public void setColumnValues(Set<ColumnCondition> columnValues) { this.columnValues = columnValues; } public boolean isNegated() { return isNegated; } public void setNegated(boolean isNegated) { this.isNegated = isNegated; } @Override public float getCoverage() { if (Float.isNaN(this.coverage)) { float coverage = Float.MAX_VALUE; for (ColumnCondition subCondition : this.columnValues) { if (coverage > subCondition.getCoverage()) { coverage = subCondition.getCoverage(); } } return coverage; } else { return this.coverage; } } @Override public void setCoverage(float coverage) { this.coverage = coverage; } @Override public ColumnCondition add(ColumnCondition condition) { this.columnValues.add(condition); return this; } @Override @JsonIgnore public TreeSet<ColumnIdentifier> getContainedColumns() { TreeSet<ColumnIdentifier> result = new TreeSet<>(); for (ColumnCondition subElement : this.columnValues) { result.addAll(subElement.getContainedColumns()); } return result; } @Override @JsonIgnore public List<Map<ColumnIdentifier, String>> getPatternConditions() { List<Map<ColumnIdentifier, String>> result = new LinkedList<>(); Map<ColumnIdentifier, String> condition = new TreeMap<>(); for (ColumnCondition columnCondition : this.columnValues) { condition.putAll(columnCondition.getPatternConditions().get(0)); } result.add(condition); return result; } @Override public int compareTo(ColumnCondition o) { if (o instanceof ColumnConditionAnd) { ColumnConditionAnd other = (ColumnConditionAnd) o; int lengthComparison = this.columnValues.size() - other.columnValues.size(); if (lengthComparison != 0) { return lengthComparison; } else { Iterator<ColumnCondition> otherIterator = other.columnValues.iterator(); int equalCount = 0; while (otherIterator.hasNext()) { ColumnCondition currentOther = otherIterator.next(); // because the order of the single column values can differ, // you have to compare all permutations for (ColumnCondition currentThis : this.columnValues) { int currentComparison = currentThis.compareTo(currentOther); if (currentComparison == 0) { equalCount++; } } } if (equalCount == this.columnValues.size()) return 0; else return 1; } } else { //and always last return 1; } } @Override public String toString() { String delimiter = " " + AND + " "; StringBuilder builder = new StringBuilder(); if (isNegated) { builder.append(NOT); } builder.append(OPEN_BRACKET); for (ColumnCondition value : this.columnValues) { builder.append(value.toString()); builder.append(delimiter); } return builder.substring(0, builder.length() - delimiter.length()) .concat(CLOSE_BRACKET); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ColumnConditionAnd that = (ColumnConditionAnd) o; if (isNegated != that.isNegated) { return false; } if (columnValues != null ? !columnValues.equals(that.columnValues) : that.columnValues != null) { return false; } return true; } @Override public int hashCode() { int result = (isNegated ? 1 : 0); result = 31 * result + (columnValues != null ? columnValues.hashCode() : 0); return result; } }
#include<stdio.h> int main() { int a,b,c,count=0,i; scanf("%d %d %d",&a,&b,&c); for(i=1;i<=a;i++){ if(i%10+(i/10)%10+(i/100)%10+(i/1000)%10+(i/10000)%10>=b&&i%10+(i/10)%10+(i/100)%10+(i/1000)%10+(i/10000)%10<=c){ count= count+i; } } printf("%d",count); return 0; }
import React from 'react'; import api from '../services/Api' import HelloLaunch from '../components/HelloLaunch'; import ServerResponse from '../types/ServerResponse'; const Home = ({ message }: { message: string }) => ( <HelloLaunch message={message} /> ); export default Home; interface HomeResponse extends ServerResponse { data: { message: string } } export async function getServerSideProps() { const props = { message: '' }; await api.get('/') .then((res: HomeResponse) => { if (res.status === 200) { props.message = res.data.message } else { props.message = 'Some error has occurred.' } }) return { props, }; }
/** * @param queueKey * Key from the work queue * @return Components which created the queue key */ public static Entry<String,ReplicationTarget> fromQueueKey(String queueKey) { requireNonNull(queueKey); int index = queueKey.indexOf(KEY_SEPARATOR); if (index == -1) { throw new IllegalArgumentException( "Could not find expected separator in queue key '" + queueKey + "'"); } String filename = queueKey.substring(0, index); int secondIndex = queueKey.indexOf(KEY_SEPARATOR, index + 1); if (secondIndex == -1) { throw new IllegalArgumentException( "Could not find expected separator in queue key '" + queueKey + "'"); } int thirdIndex = queueKey.indexOf(KEY_SEPARATOR, secondIndex + 1); if (thirdIndex == -1) { throw new IllegalArgumentException( "Could not find expected separator in queue key '" + queueKey + "'"); } return Maps.immutableEntry(filename, new ReplicationTarget(queueKey.substring(index + 1, secondIndex), queueKey.substring(secondIndex + 1, thirdIndex), TableId.of(queueKey.substring(thirdIndex + 1)))); }
from .views import api from .wechat import wechat from .sms import send_code from .selectize import load_symbols __all__ = ['api', 'wechat', 'send_code', 'load_symbols']
/** * * @author Jaroslav Tulach <[email protected]> */ public class NamedServiceProcessorTest extends NbTestCase { public NamedServiceProcessorTest(String n) { super(n); } @Override protected void setUp() throws Exception { clearWorkDir(); } public void testNamedDefinition() throws Exception { System.setProperty("executed", "false"); String content = "import " + RunTestReg.class.getCanonicalName() + ";\n" + "@RunTestReg(position=10,when=\"now\")\n" + "public class Test implements Runnable {\n" + " public void run() { System.setProperty(\"executed\", \"true\"); }\n" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); assertTrue("Compiles OK", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, System.err) ); URLClassLoader l = new URLClassLoader(new URL[] { getWorkDir().toURI().toURL() }, NamedServiceProcessorTest.class.getClassLoader()); Lookup lkp = Lookups.metaInfServices(l, "META-INF/namedservices/runtest/now/below/"); for (Runnable r : lkp.lookupAll(Runnable.class)) { r.run(); } assertEquals("Our runnable was executed", "true", System.getProperty("executed")); } public void testNamedDefinitionWithArray() throws Exception { System.setProperty("executed", "false"); String content = "import " + RunTestArray.class.getCanonicalName() + ";\n" + "@RunTestArray(position=10,array={\"now\", \"then\" })\n" + "public class Test implements Runnable {\n" + " public void run() { System.setProperty(\"executed\", \"true\"); }\n" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); assertTrue("Compiles OK", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, System.err) ); URLClassLoader l = new URLClassLoader(new URL[] { getWorkDir().toURI().toURL() }, NamedServiceProcessorTest.class.getClassLoader()); Lookup lkp = Lookups.metaInfServices(l, "META-INF/namedservices/runtest/now/"); for (Runnable r : lkp.lookupAll(Runnable.class)) { r.run(); } assertEquals("Our runnable was executed", "true", System.getProperty("executed")); System.setProperty("executed", "false"); Lookup lkp2 = Lookups.metaInfServices(l, "META-INF/namedservices/runtest/then/"); for (Runnable r : lkp2.lookupAll(Runnable.class)) { r.run(); } assertEquals("Our runnable was executed again", "true", System.getProperty("executed")); } public void testDoesNotImplementInterfaces() throws Exception { System.setProperty("executed", "false"); String content = "import " + RunTestReg.class.getCanonicalName() + ";\n" + "@RunTestReg(position=10,when=\"now\")\n" + "public class Test {\n" + " public void run() { System.setProperty(\"executed\", \"true\"); }\n" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("java.lang.Runnable") == -1) { fail("The error messages should say something about interface Runnable\n" + err); } if (err.indexOf("Callable") == -1) { fail("The error messages should say something about interface Callable\n" + err); } } public void testDoesImplementInterface() throws Exception { System.setProperty("executed", "false"); String content = "import " + RunTestReg.class.getCanonicalName() + ";\n" + "import java.util.concurrent.Callable;\n" + "@RunTestReg(position=10,when=\"now\")\n" + "public class Test implements Callable<Boolean> {\n" + " public Boolean call() { return true; }\n" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); assertTrue("Compilation succeeds", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, System.err) ); } public void testMissingPathAttribute() throws Exception { String content = "import org.openide.util.lookup.NamedServiceDefinition;\n" + "@NamedServiceDefinition(path=\"runtest/@when()/below\",serviceType=Runnable.class)\n" + "public @interface Test {\n" + " String noWhenAttributeHere();" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("@when()") == -1) { fail("The error messages should say something about missing @when\n" + err); } } public void testNonStringPathAttribute() throws Exception { String content = "import org.openide.util.lookup.NamedServiceDefinition;\n" + "@NamedServiceDefinition(path=\"runtest/@when()/below\",serviceType=Runnable.class)\n" + "public @interface Test {\n" + " int when();" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("@when()") == -1) { fail("The error messages should say something about missing @when\n" + err); } } public void testNonExistentPositionAttribute() throws Exception { String content = "import org.openide.util.lookup.NamedServiceDefinition;\n" + "@NamedServiceDefinition(path=\"fixed\",serviceType=Runnable.class,position=\"where\")\n" + "public @interface Test {\n" + " int when();" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("where") == -1) { fail("The error messages should say something about missing where\n" + err); } } public void testNonIntegerPositionAttribute() throws Exception { String content = "import org.openide.util.lookup.NamedServiceDefinition;\n" + "@NamedServiceDefinition(path=\"fixed\",serviceType=Runnable.class,position=\"where\")\n" + "public @interface Test {\n" + " Class<?> where();" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("where") == -1) { fail("The error messages should say something about missing where\n" + err); } } public void testMissingRetention() throws Exception { String content = "import org.openide.util.lookup.NamedServiceDefinition;\n" + "@NamedServiceDefinition(path=\"fixed\",serviceType=Object.class)\n" + "@java.lang.annotation.Target(java.lang.annotation.ElementType.TYPE)\n" + "public @interface Test {\n" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("specify @Retention") == -1) { fail("The error messages should say something about missing where\n" + err); } if (err.indexOf("specify @Target") != -1) { fail("Be silent about @Target\n" + err); } } public void testMissingTarget() throws Exception { String content = "import org.openide.util.lookup.NamedServiceDefinition;\n" + "@NamedServiceDefinition(path=\"fixed\",serviceType=Object.class)" + "@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.SOURCE)\n" + "public @interface Test {\n" + "}\n"; AnnotationProcessorTestUtils.makeSource(getWorkDir(), "x.Test", content); ByteArrayOutputStream os = new ByteArrayOutputStream(); assertFalse("Compilation fails", AnnotationProcessorTestUtils.runJavac(getWorkDir(), null, getWorkDir(), null, os) ); String err = new String(os.toByteArray(), StandardCharsets.UTF_8); if (err.indexOf("specify @Retention") != -1) { fail("Be silent about Retention\n" + err); } if (err.indexOf("specify @Target") == -1) { fail("The error messages should say something about missing where\n" + err); } } @NamedServiceDefinition( path="runtest/@when()/below", serviceType={ Runnable.class, Callable.class } ) @Retention(RetentionPolicy.SOURCE) @Target(ElementType.TYPE) public static @interface RunTestReg { public int position(); public String when(); } @NamedServiceDefinition( path="runtest/@array()", serviceType={ Runnable.class, Callable.class } ) @Target(ElementType.TYPE) @Retention(RetentionPolicy.SOURCE) public static @interface RunTestArray { public int position(); public String[] array(); } }
#include<bits/stdc++.h> using namespace std; typedef long long ll; const int N=2005,mod=998244353; char s[N]; int sum1[N],sum2[N]; ll dp[N<<1][N<<1]; int main() { scanf("%s",s+1); int n=strlen(s+1); for(int i=1;i<=n;i++) { sum1[i]=sum1[i-1]; sum2[i]=sum2[i-1]; if(s[i]=='0') sum1[i]+=2; else if(s[i]=='1') sum1[i]++,sum2[i]++; else sum2[i]+=2; } dp[0][0]=1; for(int i=0;i<=sum1[n];i++) for(int j=0;j<=sum2[n];j++) if(dp[i][j]) { if(i<sum1[min(n,i+j+1)]) (dp[i+1][j]+=dp[i][j])%=mod; if(j<sum2[min(n,i+j+1)]) (dp[i][j+1]+=dp[i][j])%=mod; } printf("%lld\n",dp[sum1[n]][sum2[n]]); }
// // Copyright 2019 Authors All Rights Reserved. // // FileName: B.cc // Author: Beiyu Li <[email protected]> // Date: 2019-04-29 // #include <bits/stdc++.h> using namespace std; #define rep(i,n) for (register int i = 0; i < (n); ++i) #define For(i,s,t) for (register int i = (s); i <= (t); ++i) typedef long long LL; typedef pair<int, int> Pii; const int inf = 0x3f3f3f3f; const LL infLL = 0x3f3f3f3f3f3f3f3fLL; const int maxn = 100000 + 5; const int maxl = 250 + 5; int n, q; char s[maxn], t[4][maxl]; int l[4]; int nxt[maxn][26]; int f[maxl][maxl][maxl]; bool vis[maxl][maxl]; int dp1(int y, int z) { if (vis[y][z]) return f[l[1]][y][z]; vis[y][z] = true; f[l[1]][y][z] = nxt[f[l[1]-1][y][z]][t[1][l[1]]-'a']; if (y) f[l[1]][y][z] = min(f[l[1]][y][z], nxt[dp1(y-1,z)][t[2][y]-'a']); if (z) f[l[1]][y][z] = min(f[l[1]][y][z], nxt[dp1(y,z-1)][t[3][z]-'a']); return f[l[1]][y][z]; } int dp2(int x, int z) { if (vis[x][z]) return f[x][l[2]][z]; vis[x][z] = true; f[x][l[2]][z] = nxt[f[x][l[2]-1][z]][t[2][l[2]]-'a']; if (x) f[x][l[2]][z] = min(f[x][l[2]][z], nxt[dp2(x-1,z)][t[1][x]-'a']); if (z) f[x][l[2]][z] = min(f[x][l[2]][z], nxt[dp2(x,z-1)][t[3][z]-'a']); return f[x][l[2]][z]; } int dp3(int x, int y) { if (vis[x][y]) return f[x][y][l[3]]; vis[x][y] = true; f[x][y][l[3]] = nxt[f[x][y][l[3]-1]][t[3][l[3]]-'a']; if (x) f[x][y][l[3]] = min(f[x][y][l[3]], nxt[dp3(x-1,y)][t[1][x]-'a']); if (y) f[x][y][l[3]] = min(f[x][y][l[3]], nxt[dp3(x,y-1)][t[2][y]-'a']); return f[x][y][l[3]]; } int main(int argc, char *argv[]) { scanf("%d%d", &n, &q); scanf("%s", s + 1); rep(c,26) nxt[n+1][c] = n + 1; for (int i = n; i >= 0; --i) { memcpy(nxt[i], nxt[i+1], sizeof(nxt[i+1])); if (i + 1 <= n) nxt[i][s[i+1]-'a'] = i + 1; } For(x,0,250) For(y,0,250) For(z,0,250) f[x][y][z] = n + 1; f[0][0][0] = 0; while (q--) { char op[2]; int i; scanf("%s%d", op, &i); if (op[0] == '+') { ++l[i]; scanf("%s", t[i] + l[i]); memset(vis, false, sizeof(vis)); if (i == 1) printf("%s\n", dp1(l[2], l[3]) <= n? "YES": "NO"); if (i == 2) printf("%s\n", dp2(l[1], l[3]) <= n? "YES": "NO"); if (i == 3) printf("%s\n", dp3(l[1], l[2]) <= n? "YES": "NO"); } else { --l[i]; printf("%s\n", f[l[1]][l[2]][l[3]] <= n? "YES": "NO"); } } return 0; }
package com.mrbysco.uhc.lists; import com.mrbysco.uhc.lists.info.ItemConversionInfo; import net.minecraft.item.ItemStack; import java.util.ArrayList; public class ConversionList { public static ArrayList<ItemConversionInfo> conversionList = new ArrayList<>(); public static ItemConversionInfo conversion_info; public static void initializeConversion() { } public static void addConversion(ItemStack itemIn, ItemStack result, ItemStack result2, ItemStack result3, ItemStack result4, ItemStack result5, ItemStack result6, ItemStack result7, ItemStack result8, ItemStack result9) { // Check if the info doesn't already exist conversion_info = new ItemConversionInfo(itemIn, result, result2, result3, result4, result5, result6, result7, result8, result9); if(conversionList.contains(conversion_info)) return; else conversionList.add(conversion_info); } }
import {Component, Inject, OnInit} from "@angular/core"; import {PaintManager} from "./PaintManager"; @Component({ selector: 'mini-view', template: ` <svg class="miniView" [attr.viewBox]="viewBox" preserveAspectRatio="none"> <rect x="0" [attr.y]="viewBoxStart" opacity="0.2" fill="blue" width = "20" [attr.height]="viewBoxHeight" (mousedown)="onMouseDown($event)" (mouseup)="stopDragging()" (mousemove)="onMouseMove($event)" (mouseout)="stopDragging()"/> </svg> ` }) export class MiniView implements OnInit { fullHeight:number = 100; viewBox:string = "0 0 20 100"; viewBoxStart:number = 0; viewBoxHeight:number; private initialRangeFactor:number; dragStart:number = 0; mouseButtonDown:boolean = false; constructor(@Inject(PaintManager) private paintManager:PaintManager) { } ngOnInit() { this.paintManager.getParameterUpdates().subscribe(input => { this.initialRangeFactor = this.paintManager.getZoomFactor(); this.viewBoxHeight = this.initialRangeFactor * this.fullHeight; }); } onMouseDown(event:MouseEvent) { this.dragStart = event.clientX; this.mouseButtonDown = true; } stopDragging() { this.mouseButtonDown = false; } onMouseMove(event:MouseEvent) { if (this.mouseButtonDown) { var newStart:number = event.movementY + this.viewBoxStart; console.log("New start: " +newStart); if(newStart + this.viewBoxHeight > this.fullHeight) { newStart = this.fullHeight - this.viewBoxHeight; } else if(newStart < 0) { newStart = 0; } if(this.viewBoxStart != newStart) { this.viewBoxStart = newStart; this.paintManager.visibleRangeChanged(this.transformToIndexRange(this.viewBoxStart)); } } } private transformToIndexRange(scrollIndex:number):number { return Math.floor((scrollIndex / 100) * (this.paintManager.maxIndex - this.paintManager.minIndex) + this.paintManager.minIndex); } }
<reponame>WildKey-Dev/ideafast-keyboard<filename>keyboard/src/main/java/pt/lasige/inputmethod/metrics/data/SubstitutionsErrorRate.java package pt.lasige.inputmethod.metrics.data; import pt.lasige.inputmethod.metrics.textentry.datastructures.TextEntryTrial; public class SubstitutionsErrorRate extends Metric { public SubstitutionsErrorRate() { super(); } public float execute(TextEntryTrial trial){ return trial.getTrialResults().getCharacterLevelResults().getSubstitutionErrRate(); } }
from checkio.home.min_and_max import max, min def test_min_max(): assert max(3, 2) == 3, "Simple case max" assert min(3, 2) == 2, "Simple case min" assert max([1, 2, 0, 3, 4]) == 4, "From a list" assert min("hello") == "e", "From string" assert max(2.2, 5.6, 5.9, key=int) == 5.6, "Two maximal items" assert min([[1, 2], [3, 4], [9, 0]], key=lambda x: x[1]) == [9, 0], "lambda key"
package me.caspardev.enigma.module.render; import de.Hero.settings.Setting; import me.caspardev.enigma.Enigma; import me.caspardev.enigma.module.Category; import me.caspardev.enigma.module.Module; import org.lwjgl.input.Keyboard; import java.util.ArrayList; public class ClickGUI extends Module { public ClickGUI() { super("ClickGUI", Keyboard.KEY_RSHIFT, Category.RENDER); } @Override public void setup() { ArrayList<String> options = new ArrayList<>(); options.add("New"); options.add("JellyLike"); Enigma.instance.settingsManager.rSetting(new Setting("Design", this, "JellyLike", options)); Enigma.instance.settingsManager.rSetting(new Setting("Sound", this, false)); Enigma.instance.settingsManager.rSetting(new Setting("GuiRed", this, 255, 0, 255, true)); Enigma.instance.settingsManager.rSetting(new Setting("GuiGreen", this, 0, 0, 255, true)); Enigma.instance.settingsManager.rSetting(new Setting("GuiBlue", this, 0, 0, 255, true)); } @Override public void onEnable() { super.onEnable(); mc.displayGuiScreen(Enigma.instance.clickGui); toggle(); } }
Wavelet characterization of Sobolev norms ∗ Sobolev space is a vector space of functions equipped with a norm that is a combination of Lp norms of the function itself as well as its derivatives up to a given order. The derivatives are understood in a suitable weak sense to make the space complete, thus a Banach space. We begin with the classical definition of Sobolev spaces. Definition 1. Let k be a nonnegative integer and let 1 < p < ∞ . The Sobolev space W k,p(Rn) is defined as the space of functions f in Lp(Rn) all of whose distributional derivatives ∂αf are also in Lp(Rn) for all multi-indices α that satisfy |α| ≤ k. This space is normed by the expression ‖f‖W k,p = ∑
/* * Wrapper for the function comparing by order metadata. As * qsort is not stable returning 0 on missing order may * mess up the original order. */ int elektraKeyCmpOrderWrapper (const void * a, const void * b) { const Key ** ka = (const Key **) a; const Key ** kb = (const Key **) b; int orderResult = elektraKeyCmpOrder (*ka, *kb); if (orderResult == 0) return keyCmp (*ka, *kb); return orderResult; }
//if you have a exception and you do not want handle it then throws keyword is used public class ThrowsDemo { public static void m1() { System.out.println(10 /0);//1 -route cause of exception } public static void m2() { ThrowsDemo.m1(); //2 // try { // ThrowsDemo.m1(); //2 // } catch (ArithmeticException ae) { // System.out.println("Wrong"); // } } public static void m3() { ThrowsDemo.m1(); //3 } public static void main(String[] args) { System.out.println("start"); ThrowsDemo.m3(); //4 System.out.println("End"); } }
#include "mindex.hpp" #include <fstream> #include <algorithm> #include <sstream> #include <unordered_set> Mindex::Mindex(const size_t k_, const size_t w_): k(k_), w(w_), invalid(false) {} Mindex::Mindex(): invalid(true) {} bool Mindex::build(const Mindex_opt& opt) { size_t file_id = 0; size_t num_min = 0; string s,name; int nseq = 0, sseq = 0; limit = (uint64_t)( ((double)numeric_limits<uint64_t>::max()) * opt.ratio); minimizers.clear(); minimizers.resize(opt.files.size()); // Main worker thread auto minz_worker_function = [&](const vector<string>& vseq, vector<Kmer> &out) { for (const auto &seq : vseq) { const char* str = seq.c_str(); const int len = seq.size(); minHashIterator<RepHash> it_min(str, len, w, k, RepHash(), false), it_min_end; uint64_t lasthash = it_min.getHash()+1; // for every minimizer in the sequence for (; it_min != it_min_end; ++it_min) { // downsample by a factor of r uint64_t h = it_min.getHash(); if (lasthash != h && scramble(h) <= limit) { auto x = Kmer(seq.c_str() + it_min.getPosition()).rep(); out.push_back(x); lasthash = h; } } } unordered_set<Kmer, KmerHash> tmp(out.begin(), out.end()); out.clear(); out.assign(tmp.begin(), tmp.end()); }; // goes through every file in batched mode size_t nfiles = opt.files.size(); for (size_t i = 0; i < nfiles; ) { size_t batch = std::min(nfiles-i, opt.threads); vector<thread> workers; for (size_t b = 0; b < batch; b++,i++) { // for each file, read all the sequences vector<string> readv; gzFile fp = gzopen(opt.files[i].c_str(), "r"); kseq_t* kseq = kseq_init(fp); int r; while ((r = kseq_read(kseq)) >= 0) { readv.push_back(kseq->seq.s); nseq++; sseq += kseq->seq.l; } kseq_destroy(kseq); gzclose(fp); kseq = nullptr; // process in thread workers.push_back(thread(minz_worker_function, readv, std::ref(minimizers[i]))); } for (auto &t : workers) { t.join(); } } for (auto &x : minimizers) { num_min += x.size(); } cerr << "Number of files: " << nfiles <<", number of sequences: " << nseq << ", total bp: " << sseq << std::endl; cerr << "Number of minimizers considered: " << num_min << endl; // count the number of occurrances for each minimizer KmerHashTable<uint32_t> occ_table; occ_table.reserve(num_min); for (const auto &minv : minimizers) { for (const auto x : minv) { auto it = occ_table.find(x); if (it != occ_table.end()) { (*it)++; } else { occ_table.insert(x,1); } } } uint32_t maxocc = 0; for (const auto &x : occ_table) { if (maxocc < x) { maxocc = x; } } size_t K = opt.K; size_t max_deg = (opt.maxdeg <= 0) ? maxocc : opt.maxdeg; if (max_deg < maxocc) { // maybe: clean up unneccessary minimizers, maybe remove options } vector<size_t> degree(nfiles); vector<vector<pair<size_t,Kmer>>> full_minimizers(nfiles); auto deg_sorter = [&](const pair<size_t,Kmer>& a, const pair<size_t,Kmer>& b) -> bool { return a.first < b.first; // sort by degree }; for (size_t i = 0; i < nfiles; i++) { for (const auto &x : minimizers[i]) { const auto px = occ_table.find(x); if (px != occ_table.end()) { if ((*px) <= max_deg) { full_minimizers[i].push_back({*px,x}); } } } std::sort(full_minimizers[i].begin(), full_minimizers[i].end(), deg_sorter); minimizers[i].clear(); } occ_table.clear(); // not needed again // minimizers is empty, full_minimizers contains all info needed // basic strategy, start with unique int num_ins = 0; for (size_t i = 0; i < nfiles; i++) { for (auto &x : full_minimizers[i]) { if (x.first == 1) { degree[i]++; if (degree[i] >= K) { break; } num_ins++; min_table.insert(x.second, {(uint32_t)i}); } else { break; // exit early } } } cerr << "degree " << 1 << ", inserted " << num_ins << " minimizers" << endl; // greedy strategy, prioritize by degree, pick all that are needed bool done = false; size_t deg = 2; // start at degree 2 while (!done && deg <= max_deg) { num_ins = 0; done = true; // go throught every file and see if we need to insert for (size_t i = 0; i < nfiles; i++) { if (degree[i] < K) { done = false; const auto &v = full_minimizers[i]; // for each minimizer of degree deg int goal = (int) (K - degree[i]); auto start = lower_bound(v.begin(), v.end(), make_pair(deg,Kmer()), deg_sorter); for (; start != v.end() && goal > 0 ; ++start) { if (start->first > deg) { break; } auto x = start->second; goal--; auto px = min_table.find(x); if (px == min_table.end()) { // newly inserted min_table.insert(x, {}); } } } } for (size_t i = 0; i < nfiles; i++) { const auto &v = full_minimizers[i]; // for each minimizer of degree deg auto start = lower_bound(v.begin(), v.end(), make_pair(deg,Kmer()), deg_sorter); for (; start != v.end(); ++start) { if (start->first > deg) { break; } auto x = start->second; auto px = min_table.find(x); if (px != min_table.end()) { minimizers[i].push_back(x); px->push_back(i); degree[i]++; num_ins++; } } } cerr << "degree " << deg << ", inserted " << num_ins << " minimizers" << endl; deg++; // increase the degree } if (!done && num_ins == 0) { cerr << "Impossible to fill table, increase max_deg and retry" << endl; } // check statistics size_t sdeg = 0; for (size_t i = 0; i < nfiles; i++) { sdeg += degree[i]; } size_t sdeg2 = 0; for (auto x : min_table) { sdeg2 += x.size(); } // fix the minimizers full_minimizers.clear(); cerr << endl; cerr << "Inserted " << min_table.size() << " minimizers into the graph " << endl; cerr << "Left degree = " << sdeg << ", right degree " << sdeg2 << endl; return writeToFile("index.txt", opt);; } bool Mindex::writeToFile(string fn, const Mindex_opt& opt) const{ size_t nfiles = opt.files.size(); ofstream of(fn); of << "#k,w,limit,nfiles\n"; of << k << "\n" << w << "\n" << limit << "\n" << nfiles << "\n"; of << "#files\n"; for (size_t i = 0; i < nfiles; i++) { of << i << "\t" << opt.files[i] << "\n"; } of << "#minimizers\n"; for (size_t i = 0; i < nfiles; i++) { const auto &v = minimizers[i]; of << i << "\t" << v.size() << "\n"; for (const auto &x : v) { of << x.toString() << "\n"; } } of << "#min_table\n"; of << min_table.size() << "\n"; const auto end = min_table.end(); for (auto x = min_table.begin(); x != end; ++x) { of << x.getKey().toString() << "\t" << x->size() << "\n"; const auto v = *x; for (const auto t : v) { of << t << "\n"; } } of.close(); return true; } bool Mindex::loadFromFile(string fn, Mindex_opt& opt) { ifstream in(fn); string line,tl; // temporary int stage = 0; size_t nfiles; while (in) { if (in.peek() == '#') { getline(in,line); // skip } else { if (stage == 0) { // read basic params in >> opt.k >> opt.w >> limit >> nfiles; getline(in,line); k = opt.k; w = opt.w; Kmer::set_k(opt.k); Minimizer::set_g(opt.k); stage = 1; } else if (stage == 1) { // read files names opt.files.clear(); minimizers.resize(nfiles); for (size_t i = 0; i < nfiles; i++) { size_t j; getline(in,line); stringstream ss(line); ss >> j >> tl; opt.files.push_back(tl); } stage = 2; } else if (stage == 2) { // read minimizers for (size_t i = 0; i < nfiles; i++) { size_t tmp, num; getline(in,line); stringstream ss(line); ss >> tmp >> num; auto &v = minimizers[i]; for (size_t j = 0; j < num; j++) { getline(in,line); v.push_back(Kmer(line.c_str())); } } stage = 3; } else if (stage == 3) { size_t num_min,sz; getline(in,line); stringstream ss(line); ss >> num_min; min_table.reserve(num_min); for (size_t i = 0; i < num_min; i++) { getline(in,line); stringstream ss(line); ss >> tl >> sz; Kmer x(tl.c_str()); auto p = min_table.insert(x, {}); auto &v = *(p.first); v.reserve(sz); uint32_t t; for (size_t j = 0; j < sz; j++) { in >> t; getline(in,line); v.push_back(t); } } stage = 4; break; // break here } } } if (stage == 4) { invalid = false; } return !invalid; } bool Mindex::countData(const Mindex_opt& opt) { // each file is independent for now size_t found = 0; // clear the counts counts.clear(); counts.reserve(min_table.size()); auto it_end = min_table.end(); for (auto it = min_table.begin(); it != it_end; ++it) { counts.insert(it.getKey(), 0); } auto count_end = counts.end(); minHashIterator<RepHash> it_min(w,k,RepHash()), it_min_end; for (auto fn : opt.input) { gzFile fp = gzopen(fn.c_str(), "r"); kseq_t* kseq = kseq_init(fp); int r; while ((r = kseq_read(kseq)) >= 0) { const char *seq = kseq->seq.s; it_min.initString(kseq->seq.s, kseq->seq.l); uint64_t lasthash = it_min.getHash()+1; // for every minimizer in the sequence for (; it_min != it_min_end; ++it_min) { // downsample by a factor of r uint64_t h = it_min.getHash(); if (lasthash != h && scramble(h) <= limit) { // check if we find this lasthash = h; auto it = counts.find(Kmer(seq + it_min.getPosition()).rep()); if (it != count_end) { ++(*it); ++found; } } } } kseq_destroy(kseq); gzclose(fp); kseq = nullptr; } cerr << "Found " << found << " hits for " << counts.size() << " minimizers" << endl; //print out assignment table /* for (auto it = counts.begin(); it != count_end; ++it) { if ((*it) > 1) { auto x = it.getKey(); cerr << x.toString() << "\t"; auto it2 = min_table.find(x); for (auto t : *(it2)) { cerr << t << ","; } cerr << "\t" << *it << "\n"; } } */ return true; } bool Mindex::findExistent(const Mindex_opt& opt, vector<int>& present) { // remove k-mers found with little support { vector<Kmer> rem; for (auto it = counts.begin(); it != counts.end(); ++it) { if (*it <= 1) { rem.push_back(it.getKey()); } } for (auto &x : rem) { counts.erase(x); } rem.clear(); } if (counts.empty()) { return false; } size_t nfiles = minimizers.size(); vector<int> support(nfiles, 0); vector<int> max_support(nfiles,0); vector<int> num_uniq(nfiles,0); vector<int> count_uniq(nfiles,0); auto cnt_end = counts.end(); auto min_end = min_table.end(); for (auto it = counts.begin(); it != cnt_end; ++it) { auto x = it.getKey(); auto px = min_table.find(x); if (px != min_end) { for (auto i : *px) { support[i] = 1; max_support[i]++; } } } int round = 1; bool done = false; const int min_num_uniq = 2; const double min_uniq_ratio = 0.5; const double min_ratio = 0.75; while (!done) { done = true; vector<size_t> rem; // clear max_support for (auto &x : max_support) { x = 0; } // count the number of unique hits for (auto it = min_table.begin(); it != min_end; ++it) { auto x = it.getKey(); if (it->size() == 1) { size_t i = (*it)[0]; ++num_uniq[i]; auto px = counts.find(x); if (px != counts.end()) { if (*px > 1) { ++count_uniq[i]; } } } for (auto& i : *it) { if (support[i] > 0) { max_support[i]++; } } } // remove things for (size_t i = 0; i < nfiles; i++) { if (support[i] > 0) { // remove if too few unique k-mers are missing size_t c = count_uniq[i]; size_t nm = num_uniq[i]; //cerr << i << "\t" << c << "\t" << nm << "\t" << c / double(nm) << endl; if (nm >= 4) { double ru = c / double(nm); if (ru < min_uniq_ratio) { rem.push_back(i); } } // remove if max_support is too small if (max_support[i] / double(minimizers[i].size()) < min_ratio) { rem.push_back(i); } } } for (auto i : rem) { support[i] = 0; auto v = minimizers[i]; // explicit copy minimizers[i].clear(); for (auto x : v) { auto px = min_table.find(x); if (px != min_table.end()) { if (px->size() == 1) { min_table.erase(px); auto cx = counts.find(x); if (cx != counts.end()) { counts.erase(cx); } } else { auto &t = *px; // remove from min_table; auto ipos = find(t.begin(), t.end(), (uint32_t) i); if (ipos != t.end()) { t.erase(ipos); // linear, but who cares } } } } } done = rem.empty(); ++round; if (!done) { cerr << "Round " << round << ", removed " << rem.size() << " entries, count size = " << min_table.size() << endl; } } cerr << endl; cerr << "Entries with support" << endl; int supp= 0; for (size_t i = 0; i < nfiles; i++) { if (support[i] > 0) { supp++; cerr << i << "\n"; } } cerr << endl; cerr << "Total of " << supp << " entries" << endl; for (size_t i = 0; i < nfiles; i++) { if (support[i] > 0) { present.push_back(i); } } //print out assignment table cnt_end = counts.end(); for (auto it = counts.begin(); it != cnt_end; ++it) { if ((*it) > 1) { auto x = it.getKey(); /* cerr << x.toString() << "\t"; auto it2 = min_table.find(x); for (auto t : *(it2)) { cerr << t << ","; } cerr << "\t" << *it << "\n"; */ } } return true; } bool Mindex::probData(vector<double> &prob, const Mindex_opt& opt) { // returns the probability of presence for each id prob.clear(); prob.resize(opt.files.size()); if (counts.empty()) { return false; } // figure out how to estimate the probability return true; }
/** * Gets a single line string for logging the details of this interval to a log stream. */ @Override public String logString() { StringBuilder buf = new StringBuilder(100); buf.append("any ").append(operandNumber).append(':').append(operand).append(' '); if (!isRegister(operand)) { if (location != null) { buf.append("location{").append(location).append("} "); } } buf.append("hints{").append(splitParent.operandNumber); IntervalHint hint = locationHint(false); if (hint != null) { buf.append(", ").append(hint.location()); } buf.append("} ranges{"); buf.append("[" + from() + ", " + to() + "]"); buf.append("} uses{"); int prev = -1; for (int i = numUsePos() - 1; i >= 0; --i) { assert prev < getUsePos(i) : "use positions not sorted"; if (i != numUsePos() - 1) { buf.append(", "); } buf.append(getUsePos(i)).append(':').append(getUsePosRegisterPriority(i).shortName()); prev = getUsePos(i); } buf.append("} spill-state{").append(spillState()).append("}"); if (canMaterialize()) { buf.append(" (remat:").append(getMaterializedValue().toString()).append(")"); } return buf.toString(); }
class JwksStaticEndpoint: """ configure a static files endpoint on a fastapi app, exposing JWKs. """ def __init__( self, signer: JWTSigner, jwks_url: str, jwks_static_dir: str, ): self._signer = signer self._jwks_url = Path(jwks_url) self._jwks_static_dir = Path(jwks_static_dir) def configure_app(self, app: FastAPI): # create the directory in which the jwks.json file should sit self._jwks_static_dir.mkdir(parents=True, exist_ok=True) # get the jwks contents from the signer jwks_contents = {} if self._signer.enabled: jwk = json.loads(self._signer.get_jwk()) jwks_contents = { "keys": [jwk] } # write the jwks.json file filename = self._jwks_static_dir / self._jwks_url.name with open(filename, "w") as f: f.write(json.dumps(jwks_contents)) route_url = str(self._jwks_url.parent) app.mount(route_url, StaticFiles(directory=str(self._jwks_static_dir)), name="jwks_dir")
// checkSubDir is a helper function that confirms sub directories are created as // expected func checkSubDir(path string, files, dirs, levels int) error { if levels == 0 { return nil } dirFiles, err := ioutil.ReadDir(path) if err != nil { return errors.AddContext(err, "could not read directory") } numFiles := 0 numDirs := 0 for _, f := range dirFiles { if f.IsDir() { numDirs++ if err = checkSubDir(filepath.Join(path, f.Name()), files, dirs, levels-1); err != nil { return err } continue } numFiles++ } if numFiles != files { return fmt.Errorf("Did not find expected number of files, found %v expected %v", numFiles, files) } if numDirs != dirs { return fmt.Errorf("Did not find expected number of directories, found %v expected %v", numDirs, dirs) } return nil }
// YieldSomeTime() just cooperatively yields some time to other processes running on classic Mac OS static Boolean YieldSomeTime(UInt32 milliseconds) { extern Boolean SIOUXQuitting; EventRecord e; WaitNextEvent(everyEvent, &e, milliseconds / 17, NULL); SIOUXHandleOneEvent(&e); return(SIOUXQuitting); }
<reponame>stratosnet/stratos-chain package types import ( "github.com/cosmos/cosmos-sdk/codec" ) // RegisterCodec registers concrete types on codec func RegisterCodec(cdc *codec.Codec) { cdc.RegisterConcrete(MsgCreateResourceNode{}, "register/MsgCreateResourceNode", nil) cdc.RegisterConcrete(MsgRemoveResourceNode{}, "register/MsgRemoveResourceNode", nil) cdc.RegisterConcrete(MsgUpdateResourceNode{}, "register/MsgUpdateResourceNode", nil) cdc.RegisterConcrete(MsgCreateIndexingNode{}, "register/MsgCreateIndexingNode", nil) cdc.RegisterConcrete(MsgRemoveIndexingNode{}, "register/MsgRemoveIndexingNode", nil) cdc.RegisterConcrete(MsgUpdateIndexingNode{}, "register/MsgUpdateIndexingNode", nil) cdc.RegisterConcrete(MsgIndexingNodeRegistrationVote{}, "register/MsgIndexingNodeRegistrationVote", nil) } // ModuleCdc defines the module codec var ModuleCdc *codec.Codec func init() { ModuleCdc = codec.New() RegisterCodec(ModuleCdc) codec.RegisterCrypto(ModuleCdc) ModuleCdc.Seal() }
<reponame>hhy37/decisiontrees<gh_stars>10-100 package decisiontrees import ( "code.google.com/p/goprotobuf/proto" pb "github.com/ajtulloch/decisiontrees/protobufs" "math" "sync" ) func splitExamples(t *pb.TreeNode, e Examples) (left Examples, right Examples) { by(func(e1, e2 *pb.Example) bool { return e1.Features[t.GetFeature()] < e2.Features[t.GetFeature()] }).Sort(e) splitIndex := 0 for i, ex := range e { splitIndex = i if ex.Features[t.GetFeature()] > t.GetSplitValue() { break } } left, right = e[:splitIndex], e[splitIndex:] return } // TreeMapperFunc returns the mapped node and a boolean representing whether // we should continue traversal type TreeMapperFunc func(t *pb.TreeNode, e Examples) (*pb.TreeNode, bool) func mapTree(t *pb.TreeNode, e Examples, m TreeMapperFunc) *pb.TreeNode { left, right := splitExamples(t, e) result, continueTraversal := m(t, e) if continueTraversal == false { return result } if result.GetLeft() != nil { result.Left, _ = m(t.GetLeft(), left) } if result.GetRight() != nil { result.Right, _ = m(t.GetRight(), right) } return result } func weakestLinkCostFunction(t *pb.TreeNode, e Examples) (float64, int) { left, right := splitExamples(t, e) if !isLeaf(t) { leftSquaredDivergence, leftNodes := weakestLinkCostFunction(t.GetLeft(), left) rightSquaredDivergence, rightNodes := weakestLinkCostFunction(t.GetRight(), right) return leftSquaredDivergence + rightSquaredDivergence, leftNodes + rightNodes } return constructLoss(e).sumSquaredDivergence, 1 } type pruner struct { pruningConstraints pb.PruningConstraints lossFunction LossFunction } type prunedStage struct { alpha float64 tree *pb.TreeNode } // func (p *pruner) pruneTree(t *pb.TreeNode, e Examples) prunedStage { bestNode, bestCost, bestLeaves := &pb.TreeNode{}, math.MaxFloat64, 0 mapTree(t, e, TreeMapperFunc(func(n *pb.TreeNode, ex Examples) (*pb.TreeNode, bool) { nodeSquaredDivergence, nodeLeaves := weakestLinkCostFunction(n, ex) nodeCost := nodeSquaredDivergence / float64(nodeLeaves) if nodeCost < bestCost { bestNode = t bestCost = nodeCost bestLeaves = nodeLeaves } return proto.Clone(n).(*pb.TreeNode), true })) prunedTree := mapTree(t, e, TreeMapperFunc(func(n *pb.TreeNode, ex Examples) (*pb.TreeNode, bool) { if n != bestNode { return proto.Clone(n).(*pb.TreeNode), true } // Otherwise, return the leaf constructed by pruning all subtrees leafWeight := p.lossFunction.GetLeafWeight(ex) prior := p.lossFunction.GetPrior(ex) return &pb.TreeNode{ LeafValue: proto.Float64(leafWeight * prior), }, false })) rootCost, rootLeaves := weakestLinkCostFunction(t, e) alpha := (rootCost - bestCost) / float64(rootLeaves-bestLeaves) return prunedStage{ alpha: alpha, tree: prunedTree, } } func (p *pruner) constructPrunedSequence(originalTree *pb.TreeNode, e Examples) []prunedStage { sequence := make([]prunedStage, 0) sequence = append(sequence, prunedStage{0.0, originalTree}) for { lastPruned := sequence[len(sequence)-1] if isLeaf(lastPruned.tree) { break } sequence = append(sequence, p.pruneTree(lastPruned.tree, e)) } return sequence } func (p *pruner) Prune(t *pb.TreeNode, trainingSet Examples, testingSet Examples) *pb.TreeNode { prunedSequence := p.constructPrunedSequence(t, trainingSet) result := make([]float64, 0, len(prunedSequence)) w := sync.WaitGroup{} for i := range prunedSequence { w.Add(1) go func(pos int) { rootCost, _ := weakestLinkCostFunction(prunedSequence[pos].tree, testingSet) result[pos] = rootCost / float64(len(testingSet)) }(i) } w.Done() minCost, minCostTree := math.MaxFloat64, &pb.TreeNode{} for i, testingCost := range result { if testingCost < minCost { minCostTree = prunedSequence[i].tree minCost = testingCost } } return minCostTree }
import * as React from "react" import gql from "graphql-tag" import { Mutation, MutationResult } from "react-apollo" import { AUTH_TOKEN } from "../constants" import { navigate } from "gatsby" import LoginForm from "../components/login-form" import { MutationOptions } from "apollo-client" const LOGIN_MUTATION = gql` mutation login($email: String!, $password: String!) { Login(data: { email: $email, password: $password, rememberMe: true }) { token } } ` interface LoginFormContainerProps {} export default class LoginFormContainer extends React.Component<any, any> { render() { return ( <Mutation mutation={LOGIN_MUTATION} onCompleted={data => this.handleCompleted(data)} > {( mutate: (options?: MutationOptions) => Promise<any>, result: MutationResult ) => { return this.props.render(this.submitCallback(mutate), result) }} </Mutation> ) } submitCallback = (mutate: (variables) => Promise<any>) => { return async (email: string, password: string) => { await mutate({ variables: { email: email, password: password }, }) } } handleCompleted = data => { const { token } = data.Login localStorage.setItem(AUTH_TOKEN, token) navigate("/UserListPage") } }
Mandatory HIV testing and occupational therapists. OBJECTIVES As the prevalence of human immunodeficiency virus (HIV) increases, so does the prevalence of HIV-positive health care workers. This study explored what effect this will have on occupational therapy service provision. Attitudes and policies of 118 occupational therapy administrators were examined in relation to mandatory testing for HIV, attitudes on treating HIV-positive patients, working with HIV-positive staff members and students, and use of Centers for Disease Control's guidelines on universal precautions. METHODS A stratified sample of 200 occupational therapy administrators, drawn proportionally from all occupational therapy fieldwork centers, was sent questionnaires. The respondents (N = 118) were asked questions reflecting policy and attitude regarding HIV-positive staff members, students, and patients and mandatory testing. Descriptive statistics and chi-square analyses were computed to examine variances related to policy, ethics, and attitudes. RESULTS Few occupational therapy departments have policies regarding HIV-positive health care workers or students. Those policies in place involve disability discrimination acts and using universal precautions. More than one third of the respondents support mandatory testing of all health care workers and notifying patients if their occupational therapist is HIV-positive. A large minority of respondents would either refuse to hire or train an HIV-positive therapist or student, or would restrict patient care responsibilities. CONCLUSIONS Although most occupational therapy administrators adhere to CDC guidelines and anti-discrimination policies, some concern and fear was expressed regarding HIV transmission through occupational therapy practice. This may result in administrative decisions regarding work and training responsibilities that are unnecessarily restrictive, such as limiting all patient care responsibilities.
/** * Adjacency set based implementation of an undirected graph. * * TODO: thorough testing * * @author Armin Reichert * * @param <V> * vertex type * @param <E> * edge type */ public class UGraph<V, E> implements Graph<V, E> { protected final VertexLabeling<V> vertexLabeling = new VertexLabelsMap<>(null); protected final EdgeLabeling<E> edgeLabeling = new EdgeLabelsMap<>((u, v) -> null); protected final Set<Integer> vertexSet = new HashSet<>(); protected final Map<Integer, Set<Edge>> adjEdges = new HashMap<>(); protected int numEdges; // number of undirected edges public UGraph() { } @Override public VertexLabeling<V> getVertexLabeling() { return vertexLabeling; } @Override public EdgeLabeling<E> getEdgeLabeling() { return edgeLabeling; } @Override public void addVertex(int v) { vertexSet.add(v); adjEdges.put(v, new HashSet<>()); } @Override public void removeVertex(int v) { if (!vertexSet.contains(v)) { throw new IllegalStateException(); } vertexSet.remove(v); adjEdges.remove(v); } @Override public void addEdge(int v, int w, E e) { assertVertexExists(v); assertVertexExists(w); Edge edge = new UndirectedEdge(v, w); setEdgeLabel(v, w, e); adjEdges.get(v).add(edge); adjEdges.get(w).add(edge); numEdges += 1; } @Override public void addEdge(int v, int w) { assertVertexExists(v); assertVertexExists(w); Edge edge = new UndirectedEdge(v, w); adjEdges.get(v).add(edge); adjEdges.get(w).add(edge); numEdges += 1; } @Override public Optional<Edge> edge(int v, int w) { assertVertexExists(v); assertVertexExists(w); for (Edge edge : adjEdges.get(v)) { if (w == edge.either() || w == edge.other()) { return Optional.of(edge); } } return Optional.empty(); } @Override public void removeEdge(int v, int w) { assertVertexExists(v); assertVertexExists(w); edge(v, w).ifPresent(edge -> { adjEdges.get(v).remove(edge); adjEdges.get(w).remove(edge); numEdges -= 1; }); } @Override public void removeEdges() { for (int v : vertexSet) { adjEdges.get(v).clear(); } numEdges = 0; } @Override public boolean adjacent(int v, int w) { assertVertexExists(v); assertVertexExists(w); for (Edge edge : adjEdges.get(v)) { if (w == edge.either() || w == edge.other()) { return true; } } return false; } @Override public IntStream vertices() { return vertexSet.stream().mapToInt(Integer::intValue); } @Override public int numVertices() { return vertexSet.size(); } @Override public boolean containsVertex(int v) { return vertexSet.contains(v); } private Set<Edge> createEdgeSet() { Set<Edge> edges = new HashSet<>(); for (int v : vertexSet) { for (Edge edge : adjEdges.get(v)) { edges.add(edge); } } return Collections.unmodifiableSet(edges); } @Override public Stream<Edge> edges() { return createEdgeSet().stream(); } @Override public int numEdges() { return numEdges; } @Override public int degree(int v) { assertVertexExists(v); return adjEdges.get(v).size(); } @Override public Stream<Integer> adj(int v) { assertVertexExists(v); List<Integer> result = new ArrayList<>(); for (Edge e : adjEdges.get(v)) { if (e.either() == v) { result.add(e.other()); } else { result.add(e.either()); } } return result.stream(); } @Override public String toString() { StringBuilder s = new StringBuilder(); s.append(numVertices()).append("\n"); s.append(numEdges()).append("\n"); for (int v : vertexSet) { s.append(v).append("\n"); } for (Edge e : createEdgeSet()) { s.append(e.either()).append(" ").append(e.other()).append("\n"); } return s.toString(); } protected void assertVertexExists(int v) { if (!vertexSet.contains(v)) { throw new IllegalStateException("Vertex not in graph: " + v); } } }
<reponame>greifentor/archimedes-legacy /* * Utils.java * * 19.12.2011 * * (c) by ollie * */ package archimedes.legacy; import gengen.metadata.AttributeMetaData; import gengen.metadata.ClassMetaData; import java.util.Arrays; import java.util.List; import java.util.Vector; import org.easymock.EasyMock; import archimedes.legacy.scheme.DefaultIndexMetaData; import archimedes.model.IndexMetaData; /** * Eine Sammlung von Utility-Methoden, die in mehreren Packages Anwendung * finden. * * @author ollie * * @changed OLI 19.12.2011 - Hinzugef&uuml;gt. */ public class Utils { /** * Erzeugt ein AttributeMetaDataMock mit dem angegebenen Namen. * * @param name * Der Name der Tabellespalte zu der das Mock erzeugt werden * soll. * @return Ein Mock-Objekt mit dem angegebenen Namen. * * @changed OLI 19.12.2011 - Hinzugef&uuml;gt. */ public static AttributeMetaData createAttributeMetaDataMock(String name) { AttributeMetaData amd = EasyMock.createMock(AttributeMetaData.class); EasyMock.expect(amd.getName()).andReturn(name).anyTimes(); EasyMock.replay(amd); return amd; } /** * Erzeugt ein ClassMetaDataMock mit dem angegebenen Namen. * * @param name * Der Name der Tabelle zu der das Mock erzeugt werden soll. * @return Ein Mock-Objekt mit dem angegebenen Namen. * * @changed OLI 19.12.2011 - Hinzugef&uuml;gt. */ public static ClassMetaData createClassMetaDataMock(String name) { return createClassMetaDataMock(name, new Vector<AttributeMetaData>()); } /** * Erzeugt ein ClassMetaDataMock mit dem angegebenen Namen und den * angegebenen Attributen. * * @param name * Der Name der Tabelle zu der das Mock erzeugt werden soll. * @param attributes * Die Attribute, die an die Tabelle angeh&auml;ngt werden * sollen. * @return Ein Mock-Objekt mit dem angegebenen Namen und den Attributen der * Liste. * * @changed OLI 19.12.2011 - Hinzugef&uuml;gt. */ public static ClassMetaData createClassMetaDataMock(String name, AttributeMetaData... attributes) { return createClassMetaDataMock(name, Arrays.asList(attributes)); } /** * Erzeugt ein ClassMetaDataMock mit dem angegebenen Namen und den * angegebenen Attributen. * * @param name * Der Name der Tabelle zu der das Mock erzeugt werden soll. * @param attributes * Die Attribute, die an die Tabelle angeh&auml;ngt werden * sollen. * @return Ein Mock-Objekt mit dem angegebenen Namen und den Attributen der * Liste. * * @changed OLI 19.12.2011 - Hinzugef&uuml;gt. */ public static ClassMetaData createClassMetaDataMock(String name, List<AttributeMetaData> attributes) { ClassMetaData cmd = EasyMock.createMock(ClassMetaData.class); EasyMock.expect(cmd.getName()).andReturn(name).anyTimes(); for (int i = 0, leni = attributes.size(); i < leni; i++) { EasyMock.expect(cmd.getAttribute(i)).andReturn(attributes.get(i)).anyTimes(); EasyMock.expect(cmd.getAttribute(attributes.get(i).getName())).andReturn(attributes.get(i)).anyTimes(); } EasyMock.expect(cmd.getAttributes()).andReturn(attributes).anyTimes(); EasyMock.replay(cmd); return cmd; } /** * Erzeugt ein IndexMetaData-Mock mit dem angegebenen Namen zur angegebenen * Tabelle mit den entsprechenden, gesetzten Spalten. * * @param indexName * Der Name des Index. * @param table * Die Tabelle, zu der der Index erzeugt werden soll. * @param columnsSet * Die Spalten, die in den Index mit einbezogen werden sollen. * * @changed OLI 19.12.2011 - Hinzugef&uuml;gt. */ public static IndexMetaData createIndexMetaData(String indexName, ClassMetaData table, AttributeMetaData... columnsSet) { IndexMetaData imd = new DefaultIndexMetaData(indexName, table); for (int i = 0; i < columnsSet.length; i++) { imd.addColumn(columnsSet[i]); } return imd; } }
<filename>src/factory/action/authorize.ts /** * 承認アクションファクトリー * @namespace action.authorize */ import * as ActionFactory from '../action'; /** * 承認対象インターフェース */ export type IObject = any; /** * 承認結果インターフェース */ export type IResult = any; /** * 承認目的インターフェース */ export type IPurpose = any; /** * アクション属性インターフェース */ export interface IAttributes<TObject, TResult> extends ActionFactory.IAttributes<TObject, TResult> { purpose: IPurpose; recipient: ActionFactory.IParticipant; } export type IAction<TAttributes extends IAttributes<IObject, IResult>> = ActionFactory.IAction<TAttributes>;
/** * Creates a new plugin instance. * * @param searchPath A List of Strings, containing different package names. * @param externalJars the list of external jars to search * @return A new plugin. * @throws ClassNotFoundException If the class declared was not found. * @throws InstantiationException If the class cannot be instantiated- * @throws IllegalAccessException If the class cannot be accessed. */ public Plugin newPluginInstance( final List< String > searchPath, final List< String > externalJars) throws ReflectiveOperationException { if( m_clazz == null ) { m_clazz = ClassUtil.findClass( searchPath, externalJars ,m_className ); } return ClassUtil.buildInstance( m_clazz ); }
/* * NOTE: Must be called with tty_token held */ static void comwakeup(void *chan) { struct com_s *com; int unit; lwkt_gettoken(&tty_token); callout_reset(&sio_timeout_handle, sio_timeout, comwakeup, NULL); for (unit = 0; unit < sio_numunits; ++unit) { com = com_addr(unit); if (com != NULL && !com->gone && (com->state >= (CS_BUSY | CS_TTGO) || com->poll)) { com_lock(); siointr1(com); com_unlock(); } } if (--sio_timeouts_until_log > 0) { lwkt_reltoken(&tty_token); return; } sio_timeouts_until_log = hz / sio_timeout; for (unit = 0; unit < sio_numunits; ++unit) { int errnum; com = com_addr(unit); if (com == NULL) continue; if (com->gone) continue; for (errnum = 0; errnum < CE_NTYPES; ++errnum) { u_int delta; u_long total; com_lock(); delta = com->delta_error_counts[errnum]; com->delta_error_counts[errnum] = 0; com_unlock(); if (delta == 0) continue; total = com->error_counts[errnum] += delta; log(LOG_ERR, "sio%d: %u more %s%s (total %lu)\n", unit, delta, error_desc[errnum], delta == 1 ? "" : "s", total); } } lwkt_reltoken(&tty_token); }
{-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} -- | This module declares the 2D MaxPooling layer data type. module TensorSafe.Layers.MaxPooling where import Data.Kind (Type) import Data.Map (fromList) import Data.Proxy (Proxy (..)) import GHC.TypeLits (KnownNat, Nat, natVal) import TensorSafe.Compile.Expr ( CNetwork (CNLayer), DLayer (DMaxPooling), ) import TensorSafe.Layer (Layer (..)) -- | A 2D MaxPooling pooling that works for D2 and D3 shapes data MaxPooling :: Nat -> Nat -> Nat -> Nat -> Type where MaxPooling :: MaxPooling kernelRows kernelColumns strideRows strideColumns deriving (Show) instance ( KnownNat kernelRows, KnownNat kernelColumns, KnownNat strideRows, KnownNat strideColumns ) => Layer (MaxPooling kernelRows kernelColumns strideRows strideColumns) where layer = MaxPooling compile _ _ = let kernelRows = natVal (Proxy :: Proxy kernelRows) kernelColumns = natVal (Proxy :: Proxy kernelColumns) strideRows = natVal (Proxy :: Proxy strideRows) strideColumns = natVal (Proxy :: Proxy strideColumns) in CNLayer DMaxPooling ( fromList [ ("poolSize", show [kernelRows, kernelColumns]), ("strides", show [strideRows, strideColumns]) ] )
// makeService returns the desired Service object for a given HTTPSource. func (r *Reconciler) makeService(src *sourcesv1alpha1.HTTPSource) *corev1.Service { return object.NewService(src.Namespace, src.Name, object.WithControllerRef(src.ToOwner()), object.WithSelector(applicationNameLabelKey, src.Name), object.WithServicePort(portName, externalPort, int(r.adapterEnvCfg.Port)), object.WithServicePort(metricsPortName, metricsPort, metricsPort), object.WithLabel(applicationNameLabelKey, src.Name), object.WithLabel(dashboardLabelKey, dashboardLabelValue), ) }
<reponame>selym3/Markov package markov; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Random; public class Sequence<T> implements Markov<T> { protected Map<T, List<T>> chain; protected T last; public Sequence() { chain = new HashMap<T, List<T>>(); last = null; } public Sequence(T... input) { this(); append(input); } @Override public Queue<T> generate(int length) { if (chain.size() == 0 || length == 0) { return new LinkedList<T>(); } Queue<T> out = new LinkedList<T>(); Object[] entries = chain.keySet().toArray(); T first = (T) entries[RANDOM.nextInt(entries.length)]; out.add(first); for (int i = 1; i < length; ++i) { List<T> possiblities = chain.get(first); if (possiblities != null) { first = possiblities.get(RANDOM.nextInt(possiblities.size())); } else { first = null; } out.add(first); } return out; } @Override public Queue<T> generate(int length, int seed) { if (chain.size() == 0 || length == 0) { return new LinkedList<T>(); } Random rn = new Random(seed); Queue<T> out = new LinkedList<T>(); Object[] entries = chain.keySet().toArray(); T first = (T) entries[rn.nextInt(entries.length)]; out.add(first); for (int i = 1; i < length; ++i) { List<T> possiblities = chain.get(first); if (possiblities != null) { first = possiblities.get(rn.nextInt(possiblities.size())); } else { first = null; } out.add(first); } return out; } // links first element to last element inputted @Override public Sequence<T> append(T... input) { if (last != null) { chain.putIfAbsent(last, new ArrayList<T>()); chain.get(last).add(input[0]); } for (int i=1; i < input.length;++i) { chain.putIfAbsent(input[i-1], new ArrayList<>()); chain.get(input[i-1]).add(input[i]); } if (input.length != 0) { last = input[input.length - 1]; } return this; } }
import { domInfoHelper, eventHelper, domManipulationHelper, domTypes } from '../dom/exports' import { Placement, TriggerBoundyAdjustMode, overlayConstraints, overlayPosition, Overlay } from './overlay' import { state } from '../stateProvider'; export class overlayHelper { static overlayRegistry: { [key: string]: Overlay} = {}; static addOverlayToContainer(blazorId: string, overlaySelector, triggerSelector, placement: Placement, containerSelector: string, triggerBoundyAdjustMode: TriggerBoundyAdjustMode, triggerIsWrappedInDiv: boolean, triggerPrefixCls: string, verticalOffset: number, horizontalOffset: number, arrowPointAtCenter: boolean, overlayTop?: number, overlayLeft?: number ): overlayPosition { const overlayElement = domInfoHelper.get(overlaySelector) as HTMLDivElement; const containerElement = domInfoHelper.get(containerSelector) as HTMLElement; const triggerElement = domInfoHelper.get(triggerSelector) as HTMLElement; if (!domManipulationHelper.addElementTo(overlaySelector, containerElement)) { console.log("Failed to add overlay. Details:", { triggerPrefixCls: triggerPrefixCls, overlaySelector: overlaySelector, containerElement: containerElement } ); return null; } let overlayPresets: domTypes.position; if (overlayTop || overlayLeft) { overlayPresets = { x: overlayLeft, y: overlayTop }; } let overlayConstraints: overlayConstraints = { verticalOffset: verticalOffset, horizontalOffset: horizontalOffset, arrowPointAtCenter: arrowPointAtCenter }; let overlay = new Overlay(blazorId, overlayElement, containerElement, triggerElement, placement, triggerBoundyAdjustMode, triggerIsWrappedInDiv, triggerPrefixCls, overlayConstraints); //register object in store, so it can be retrieved during update/dispose this.overlayRegistry[blazorId] = overlay; return overlay.calculatePosition(false, true, overlayPresets); } static updateOverlayPosition(blazorId: string, overlaySelector, triggerSelector, placement: Placement, containerSelector: string, triggerBoundyAdjustMode: TriggerBoundyAdjustMode, triggerIsWrappedInDiv: boolean, triggerPrefixCls: string, verticalOffset: number, horizontalOffset: number, arrowPointAtCenter: boolean, overlayTop?: number, overlayLeft?: number): overlayPosition { const overlay = this.overlayRegistry[blazorId]; if (overlay){ let overlayPresets: domTypes.position; if (overlayTop || overlayLeft) { overlayPresets = { x: overlayLeft, y: overlayTop }; } return overlay.calculatePosition(false, false, overlayPresets); } else { //When page is slow, it may happen that rendering of an overlay may not happen, even if //blazor thinks it did happen. In such a case, when overlay object is not found, just try //to render it again. return overlayHelper.addOverlayToContainer(blazorId, overlaySelector, triggerSelector, placement, containerSelector,triggerBoundyAdjustMode, triggerIsWrappedInDiv, triggerPrefixCls, verticalOffset, horizontalOffset, arrowPointAtCenter, overlayTop, overlayLeft); } } static deleteOverlayFromContainer(blazorId: string) { const overlay = this.overlayRegistry[blazorId]; if (overlay) { overlay.dispose(); delete this.overlayRegistry[blazorId]; } } static addPreventEnterOnOverlayVisible(element, overlayElement) { if (element && overlayElement) { let dom: HTMLElement = domInfoHelper.get(element); if (dom) { state.eventCallbackRegistry[element.id + "keydown:Enter"] = (e) => eventHelper.preventKeyOnCondition(e, "enter", () => overlayElement.offsetParent !== null); dom.addEventListener("keydown", state.eventCallbackRegistry[element.id + "keydown:Enter"], false); } } } static removePreventEnterOnOverlayVisible(element) { if (element) { let dom: HTMLElement = domInfoHelper.get(element); if (dom) { dom.removeEventListener("keydown", state.eventCallbackRegistry[element.id + "keydown:Enter"]); state.eventCallbackRegistry[element.id + "keydown:Enter"] = null; } } } }
import java.util.*; public class codeforcw58a { public static void main(String[] args){ Scanner scan=new Scanner(System.in); String palabra = scan.nextLine(); System.out.println(respuesta(palabra)); } public static String respuesta(String palabra) { int ch=0; int co=0; int cl=0; int ce=0; int l; String p; char letra; String res=""; l= palabra.length(); for(int i=0;i<l;i++){ letra=palabra.charAt(i); if(letra=='h'||letra=='o'||letra=='l'||letra=='e'){ if(letra=='h'&& ch==0){ res=res+letra; ce=1; ch=ch+1; } else{} if(letra=='o'&& co==2){ res=res+letra; co=co+1; } else{} if(letra=='e'&& ce==1){ res=res+letra; ce=ce+1; cl=1; } else{} if(letra=='l'&& cl<=2 && cl>0){ res=res+letra; cl=cl+1; co=co +1; } else{} } } if(res.equals("hello")){ res="YES"; } else{ res="NO"; } return res; } }
<reponame>MahdiGimDev/rayen-backend<gh_stars>0 import { Injectable, Logger } from '@nestjs/common'; import { Cron, CronExpression } from '@nestjs/schedule'; import { UsersService } from './main/services/users.service'; @Injectable() export class TasksService { constructor(private userService: UsersService) {} private readonly logger = new Logger(TasksService.name); @Cron(CronExpression.EVERY_1ST_DAY_OF_MONTH_AT_MIDNIGHT) handleCron() { this.userService.updateUserVacations(); this.logger.debug('Updating vacations for all users 1 month'); } /* @Cron(CronExpression.EVERY_1ST_DAY_OF_MONTH_AT_MIDNIGHT) handleCron() { this.userService.updateUserVacations(); this.logger.debug('Updating vacations for all users 1 month'); } */ @Cron(CronExpression.EVERY_6_MONTHS) handleMonthsCron() { this.userService.updateUser6MonthVacations(); this.logger.debug('Updating vacations for all users 6 months'); } }
export default { type: "object", properties: { distination: { type: "string" }, events: { type: "array", items: { type: "object", properties: { replyToken: { type: "string", }, message: { type: "object", properties: { type: { type: "string" }, text: { type: "string" }, }, }, source: { type: "object", properties: { type: { type: "string" }, userId: { type: "string" }, }, }, }, }, }, }, } as const;
def update_max_for_sim(m, init_max, max_allowed): max_for_sim = 200 * m + init_max if max_for_sim < max_allowed: max_for_sim = max_allowed return max_for_sim
Computer modeling of process in earth structures of dam type The article deals with computer modeling of processes in earth structures of dam type, taking into account the nonlinear properties of soil. Large vertical and planned displacements of soil in the body of the dam are formed due to changes in soil density or as a result of the development of plastic shear strains. The application of the von Mises-Botkin and Coulomb hypotheses to the development of plastic strain is investigated. Introduction Worldwide construction of underground and aboveground structures, as well as the development of ground space in seismically active regions and in the areas with developed irrigation systems and high danger of floods, stipulates for a reliable forecast of the actual state of dams, levees, underground and aboveground structures, road and railroad embankments, aircraft landing grounds in emergency situations and (or) during long-term operation. Finite Element Method (FEM) is widely used in the analysis of embankment dams . In the FEM analysis, a correct constitutive model should be identified; it is chosen for each part of the dam to model the stress-strain relationship. The zones of embankment dams have different functions. Because of this, zones usually consist of different types of soil and their stress-strain response can vary greatly. For each dam zone, the parameters entering the chosen constitutive model should have appropriate values. Generally, laboratory and/or field-testing of soil is needed as a basis to evaluate these parameters. However, a great number of dams are outdated and limited information on soil materials of the dam structures is available. Generally, it is difficult to take soil samples for testing, especially from the central impermeable part of the dam, since this can affect the dam operation and its safety. It would be useful to find a way to determine the values of the principal parameters by some nondestructive method. The study in determines the slope stability and gives a case study of the Koga embankment dam (Ethiopia). The analysis was conducted using PLAXIS 2D finite element software. The behavior of the body and the base of the dam was described using the Mohr-Coulomb criterion. Based on this study, at the end of the construction process, the resulting safety factor to analyze static stability was 1.6221. For the stationary regime, the water level was fixed at the full reservoir level (2015.25 m). The safety factor obtained for the static stability analysis was 1.6136. The analysis results showed that the safety factor for static calculations was 1.2199. Using the design standards recommended by the US Army Corps of Engineers, the British Dam Association, and the Canadian Dam Association, the slope stability analysis of the Koga embankment dam appeared to be safe under critical loads. The structural stability of an embankment dam under static loading is investigated in ; a geosynthetic lining system is used as a filtration barrier, and the results are compared with the results obtained for the same section of the embankment dam without geosynthetic lining systems. The geotechnical properties of the embankment dam were chosen so that it is stable under static conditions without any geosynthetic lining system. The results of the analysis clearly showed that the geosynthetic lining system increases the stability of the dam sections and provides a better alternative to the seepage control in the embankment dams. In , on the basis of integrated geo-radar and seismotomographic studies, a geomechanical model of an enclosing fill-in hydro-technical structure (a dam) was developed. The model was investigated in an elastoplastic case using computer simulation methods and resulted in determining the regularities of deformation and displacement of the structure body, and the formation of a depression curve in the body depending on the properties of its constituent soils and the level of external water load. The data obtained represent the basis for substantiating measures to reduce the risks of local destruction of enclosing dams. The accumulated experience of embankment dams operation shows that the accidents and conditions unacceptable from the point of view of normal operation can occur as a result of the development of the following processes:  large vertical and planned displacements of soil in the dam body due to changes in the soil density or as a result of the development of plastic shear strains. Residual soil displacements can cause the crack formation on the slopes and the crest of the dam, parallel to the axis of the dam with opening up to several tens of centimeter, up to several meters deep; and the appearance of feathering fractures directed at an angle to the axis of the dam which are especially dangerous from the point of view of the anti-seepage element damage;  violation of the monolithic character of anti-seepage element and its contact with the base, the sides of the canyon, and discharge facilities;  erosion of the dam by a flow over the crest due to disruption of discharge facilities operation during a flood or an earthquake, caused by a sharp drop in the crest level as a result of the dam body settlement or loss of stability of its slopes, wave formation in river beds, canals, and reservoirs due to collapse of large masses of soil of coastal slopes. The analysis of accidents and damages shows that the nature of the pressure front violation of the structure can be different. It can be abrupt damage, directly related to the impact of the lowering of the dam crest level, either due to soil settlement or due to the loss of stability and the slope collapse along the sliding curve that captures the dam crest. The finite element method is used to solve the problem, taking into account the complex geometry of the region. The study in describes a technology for constructing a finite element representation of a multiply connected three-dimensional domain. The representation of the finite element configuration of the domain is described by a discrete set, which consists of a number of nodes and elements of the finite element grid, which present an ordered set of coordinates of nodes and the number of finite elements. Subdomain merging is based on the coincidence criteria for boundary nodes by determining a simple hierarchy of volumes, surfaces, lines, and points. Relabeling of nodes is performed by the frontal method, in which the nodes located at the outer edges of the structure are used as the initial front. A method to solve a three-dimensional problem of elastoplastic deformations in a transversely isotropic body by the FEM is presented in . The process of solving the problem consists of the determination of effective parameters of a transversely isotropic medium; the construction of a finite element grid of the body configuration, including the determination of the local minimum value of the bandwidth of nonzero coefficients of systems of equations by the front method; the determination of the stiffness matrix coefficients and nodal components of the load vector of the equation of state of an individual finite element according to the theory of small elastoplastic deformations for a transversely isotropic medium; the formation of a resolving symmetric band system of equations by summing all the coefficients of the equations of state by summing all the finite elements; the solution of the system 3 of equations of a symmetric band by the square root method; the calculation of the elastoplastic stressstrain state of the body by performing the iterative process of the initial stress method. Materials and methods The identification of the causes of the above factors necessitates a deep analysis of real properties of soil, which depend on compressibility, water permeability, contact shear resistance, and structural-phase deformation, and on the defining relationships between the components of stress and strain tensors. The defining relationships and , determined experimentally, are called the soil models and they directly depend on the nature and rate of external force application and physical and chemical composition of the soil itself, i.e., it is impossible to describe them by a single law of the stresses and strains relationship. Therefore, 4 types of regularities that dominate in practical applications are distinguished in soil mechanics as the characteristics: 1) compressibility, when calculating the base settlement; 2) water permeability in the process of predicting the water-saturated soil bases settlement rate; 3) contact resistance, when determining the ultimate strength, stability, and pressure on enclosing facilities; 4) structural-phase deformability in the process of determining soil stresses and strains; when calculating unique structures, they should be solved considering the mutual influence of all characteristic soil properties. For some practical important classes of static problems, it is advisable to jointly solve the equations of groups (3) and (4), obtained from the relations given in where int  and int e are the intensity of stresses and strains; av  and av e are the average stresses and strains. The specific form of functions and directly depends on the real properties of soil . Under a single loading, the relationship between the components of the stress and strain tensors can be written as: where ij  and ij e are the components of stress and strain tensors; the relationship between deformations and displacements is taken in the following form: Equilibrium equations are solved with relations (1)-(4), under the following boundary conditions: and the following conditions of fixing on a part of the surface: When solving the problems of equilibrium of earth structures, the von Mises-Botkin models were tested to determine function  : and the Coulomb-Mohr models to determine function  : ( 23 3 23 13 3 13 12 3 12 33 1 22 2 11 2 33 33 2 22 1 11 2 22 33 2 22 2 An account for the complexity of geometric shape of earth structures under consideration, the FEM with relations (7)-(11) is applied to the solution of problem (5)- (6). In , nonlinear non-stationary problems of spatial structures of complex configuration are considered by the method of finite differences taking into account the influence of the stress and strain intensities rates and average stresses and strains. is achieved in three iterations, and in the case of the Coulomb-Mohr model, 3-4 times more iterations are required to achieve this accuracy. This is due to the fact that the Coulomb-Mohr models use limit values of soil. Conclusion The article deals with computer modeling of processes in earth structures of dam type, taking into account the nonlinear properties of soil according to the von Mises-Botkin and Coulomb-Mohr models. On the basis of the software developed and the computational experiment conducted, the nature of the stress-strain state redistribution in the Tupolang embankment dam (Uzbekistan) was studied. It was determined that an account for the nonlinearity effect of av  and av e relation increases the stress intensity values by 37.4%.
/** * Create an ID from this instance's prefix and zero padded specified value. * * Hand rolled equivalent to `String.format("ex-%010d", value)` optimized to reduce * allocation and CPU overhead. */ String createId(final long value) { final String longString = Long.toString(value); switch (longString.length()) { case 1: return prefix9 + longString; case 2: return prefix8 + longString; case 3: return prefix7 + longString; case 4: return prefix6 + longString; case 5: return prefix5 + longString; case 6: return prefix4 + longString; case 7: return prefix3 + longString; case 8: return prefix2 + longString; case 9: return prefix1 + longString; default: return prefix0 + longString; } }
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format */ import {colors} from './colors'; import FlexRow from './FlexRow'; import FlexBox from './FlexBox'; import styled from 'react-emotion'; /** * A toolbar. */ const Toolbar = styled(FlexRow)( (props: {position?: 'bottom' | 'top'; compact?: boolean}) => ({ backgroundColor: colors.light02, borderBottom: props.position === 'bottom' ? 'none' : `1px solid ${colors.sectionHeaderBorder}`, borderTop: props.position === 'bottom' ? `1px solid ${colors.sectionHeaderBorder}` : 'none', flexShrink: 0, height: props.compact ? 28 : 42, lineHeight: '32px', alignItems: 'center', padding: 6, width: '100%', }), ); export const Spacer = styled(FlexBox)({ flexGrow: 1, }); export default Toolbar;
There’s nothing the left likes better than attacking Fox News. Almost all liberal media “analysis” revolves around such activity, without ever noting the outlandishly liberal biases of the traditional outlets that outnumber Fox like the Persians outnumbered the Spartans. Throw in a chance to defend Islam and bash Christians and you get to light up the Internet like a Christmas (or Solstice) tree. That was the case when Lauren Green, religion correspondent for Fox News (the folks who run this website), interviewed the controversial author of the new book “Zealot: The Life and Times of Jesus of Nazareth.” In a Fox News.com Live interview Green dared to ask Reza Aslan, a Muslim who converted to Christianity and then back to Islam, the most obvious of questions: “Now, I want to clarify: You are a Muslim, so why did you write a book about the founder of Christianity?” That bolding is important. It’s how The Washington Post’s Erik Wemple handled it in a piece headlined “Fox News must apologize to Reza Aslan.” Wemple did two pieces attacking Fox on this point, saying they were “stupid and prejudicial questions.” Apparently Wemple needs a refresher course on the Society of Professional Journalists’ Code of Ethics. It states: “The public is entitled to as much information as possible on sources' reliability.” [pullquote] The heck with that. In the liberal media, one dare not ever question the motives of Muslims. How do we know this? Just ask the Post. Two months after 9/11, Wemple’s own paper went to an Islamic expert to answer questions for readers on washingtonpost.com. The paper turned to Anwar Al-Awlaki – a man we later discovered was a terrorist so awful that Obama had him killed on sight by a drone armed with Hellfire missiles. In the video, Al-Awlaki criticized “this association between Islam and terrorism.” Now that’s apology worthy. The Post was really just piling on. The Daily Beast called it “the interview that’s been burning up the Internet.” Actually, it was only burning up the left side of the Internet until the traditional media followed that lead. The Huffington Post has written about it around the clock since the interview – with at least six different pieces, including a HuffPo Live segment, and a column calling Green “the Fox News Zealot” and another saying the Q&A was a “wacky interview.” My favorite ridiculous HuffPo headline on this was: “Reza Aslan: Speaking Truth to Power.” Aslan’s so-called truth is that Jesus was not the son of God. In HuffPo land, anyone who is Christian is wrong. That is the core of lefty belief. The rest of the extensive left-wing web – Buzzfeed, Salon, The New Republic, Alternet, etc. – churned the story to make libs fume with anger. That was followed by the traditional media – the Post, New York Times, CNN et al. Aslan whined to Piers Morgan that it was “distasteful” to be asked about his credentials. Morgan, taking a break from his crusade against the Constitution, called the questions “ridiculous.” During the interview, Aslan called his book “an historical biography of a man named Jesus of Nazareth. It seems to separate him from the Christology that arose around him in the generations that followed.” Pretty much proving the point that Christian critics have said of his book – that he was supporting the principle tenet of his faith, that Jesus is not divine. Yet he was shocked and complained some felt he was “attacking their faith.” Since that’s exactly what he was doing. Unsurprisingly, Morgan gave a weak interview, letting Aslan once again get away without significant challenge to his work. The liberal reaction to Green’s interview is a great example of journalists seeing what they choose to see, not reality. The interview wasn’t her best, but that’s not cause for the media feeding frenzy. The reason so many are after her is that she raised the uncomfortable question about why a Muslim was writing to deny the existence of the Christian faith. Sadly, most journalists did that so long ago that they fail to recognize the story.
def list_value(self, tree: "Tree") -> "SchemaNode": return SchemaNode( type="list_value", value=lark_to_list_value_node(tree) )
<reponame>xubinzheng/BlueCrew //------------------------------------------------------------------------------- // Licensed Materials - Property of IBM // XXXX-XXX (C) Copyright IBM Corp. 2013. All Rights Reserved. // US Government Users Restricted Rights - Use, duplication or // disclosure restricted by GSA ADP Schedule Contract with IBM Corp. //------------------------------------------------------------------------------- // // IBMQuery.h // IBMData iOS SDK #import <IBMData/IBMData.h> /** The IBMQuery class represents queries for objects stored in the Mobile Data service. */ @interface IBMQuery : NSObject /**--------------------------------------------------------------------------------------- * @name Getting Query Attributes * --------------------------------------------------------------------------------------- */ /** @return The className for a queryForClass type query. */ @property (nonatomic, readonly) NSString *className; /** @return The objectId for a queryForObjectId type query. */ @property (nonatomic, readonly) NSString *objectId; /**--------------------------------------------------------------------------------------- * @name Creating Queries * --------------------------------------------------------------------------------------- */ /** Creates and returns a query that will match against all objects with the specified class name. @param classname The class name for the objects being sought @return (IBMQuery *) query for all objects with a given class name */ +(IBMQuery *)queryForClass:(NSString *)classname; /** Creates and returns a query for a specific object (by object id). @param objectId The objectId for the object being sought @return (IBMQuery *) query for a specific object */ +(IBMQuery *)queryForObjectId:(NSString *)objectId; /**--------------------------------------------------------------------------------------- * @name Adding a Condition to a Query * --------------------------------------------------------------------------------------- */ /** Specify an equals condition against one of the fields in the object The whereKey:equalTo: method is a provisional API. APIs that are marked provisional are evolving and might change or be removed in future releases. @param key The name of the attribute to compare against @param value The value to compare with the attribute value @return void @warning For MVP we only support the equals condition but more can be added as needed */ - (void)whereKey:(NSString *)key equalTo:(id)value; /**--------------------------------------------------------------------------------------- * @name Issuing a Query * --------------------------------------------------------------------------------------- */ /** Issue the query to the Mobile Data service. If successful, the result value of the returned BFTask contains a reference to an array of objects that match the query conditions. @return The BFTask that can be used to obtain the query results once the query is completed. */ - (BFTask*)find; @end
/** * This is called to write a bit of data to the Rx characteristic. * and will be called again from OnCharacteristicWrite until all * of the string has been written. The last call to this function * clears mString2Write which indicates to OnCharacteristicWrite * that no further calls to this function are needed in which case * it calls clearGattBusyFlagAndExecuteNext(). */ private void writeChunkOfData() { int ibegin, iend; if (mBluetoothGatt == null) { Log.e("bgx_dbg", "writeChunkOfData(): BluetoothGatt is null"); return; } if (mFastAck && mFastAckTxBytes <= 0) { Log.d("bgx_fastAck", "FastAck Stall: mFastAckTxBytes = " + mFastAckTxBytes); return; } BluetoothGattCharacteristic rxChar; if (this.mRxCharacteristic2 != null) { rxChar = this.mRxCharacteristic2; } else { rxChar = this.mRxCharacteristic; } if (null == rxChar) { Log.e("bgx_dbg", "Error, no RxCharacteristic available."); } synchronized (this.dataWriteSync) { if (null != this.mData2Write) { ibegin = this.mWriteOffset; iend = this.mData2Write.length; if (iend - ibegin > this.deviceWriteChunkSize) { iend = ibegin + this.deviceWriteChunkSize; } if (mFastAck) { for (int amt2write = (iend - ibegin); amt2write > mFastAckTxBytes; amt2write = (iend - ibegin)) { iend = ibegin + mFastAckTxBytes; } } rxChar.setValue(Arrays.copyOfRange(this.mData2Write, ibegin, iend)); boolean writeResult = mBluetoothGatt.writeCharacteristic(rxChar); if (writeResult) { if (mFastAck) { mFastAckTxBytes -= (iend - ibegin); Log.d("bgx_fastAck", "mFastAckTxBytes: " + mFastAckTxBytes + " (subtracted " + (iend - ibegin) + " bytes)"); } Log.d("bgx_dbg", "Rx Char Write success."); if (this.mData2Write.length > iend) { this.mWriteOffset = iend; } else { this.mWriteOffset = 0; this.mData2Write = null; } } } } }
// New azure mixin client, initialized with useful defaults. func New() (*Mixin, error) { return &Mixin{ Context: context.New(), }, nil }
<filename>packages/core/test/di/injector/circular-deps/init-order.ts export const INIT_ORDER: string[] = []; export async function asyncIncrement(name: string) { INIT_ORDER.push(name); } export async function increment(name: string) { INIT_ORDER.push(name); }
/* * __drop_list_execute -- * Clear the system info (snapshot and timestamp info) for the named checkpoints on the drop * list. */ static int __drop_list_execute(WT_SESSION_IMPL *session, WT_ITEM *drop_list) { WT_CONFIG dropconf; WT_CONFIG_ITEM k, v; WT_DECL_RET; __wt_config_init(session, &dropconf, drop_list->data); while ((ret = __wt_config_next(&dropconf, &k, &v)) == 0) { WT_RET(__wt_meta_sysinfo_clear(session, k.str, k.len)); } WT_RET_NOTFOUND_OK(ret); return (0); }
<gh_stars>0 package com.daquilema.posfechados.modules.fiscalYear.service; import java.util.Date; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import com.daquilema.posfechados.core.BaseService; import com.daquilema.posfechados.core.ErrorControl; import com.daquilema.posfechados.modules.fiscalYear.entity.FiscalYear; import com.daquilema.posfechados.modules.fiscalYear.repository.FiscalYearRepo; import com.daquilema.posfechados.modules.logErrorExcel.entity.LogErrorExcel; @Service public class FiscalYearService extends BaseService<FiscalYear> { public FiscalYearService() { super(FiscalYear.class); } @Autowired private FiscalYearRepo fiscalYearRepo; public ResponseEntity<?> listYears() { try { return new ResponseEntity<List<FiscalYear>>(fiscalYearRepo.findAll(), HttpStatus.OK); } catch (Exception err) { return new ResponseEntity<ErrorControl>( new ErrorControl(err.getMessage(), HttpStatus.INTERNAL_SERVER_ERROR.value(), true), HttpStatus.INTERNAL_SERVER_ERROR); } } }
<reponame>kireevaa85/behavioral_patterns<gh_stars>0 package ru.kireev.visitor; /** * @author sergey * created on 12.09.18. */ public class CarService implements Visitor { @Override public void visit(Engine item) { System.out.println(item.checkEngine()); } @Override public void visit(Transmission item) { System.out.println(item.refreshOil()); } @Override public void visit(Brake item) { System.out.println(item.replaceBrakePad()); } }
Medication Nonadherence: Implications for patient health outcomes in pharmacy practice The primary objective of this review is (1) to better understand the prevalence and impact of medication nonadherence, (2) to identify risk factors for medication nonadherence, (3) to understand the association between nonadherence and its implications on patient health outcomes in pharmacy practice, and (4) to study interventions designed to improve patient adherence to prescribed medications for medical conditions, considering its impact on both medication adherence and patient health outcomes. Narrative review design by critical analysis of the literature of published paper-based journal articles were manually sorted. Additional references were obtained from citations within the retrieved articles. This narrative review surveyed the findings of the identified articles with data extracted to presents various strategies and resources on medication nonadherence related to patients and healthcare providers. Out of 121 published articles, only 64 articles have been considered according to surveyed identified articles to determine both subjective and objective medication adherence measures. The research in this field needs advances, including improved design of feasible long‐term interventions, objective adherence measures, and sufficient study power to detect improvements in patient health outcomes. Current methods of improving medication adherence for chronic health problems are mostly complex and ineffective so full benefits of treatment cannot be realized. To date, monitoring of patient medication adherence and use of interventions to improve adherence are rare in routine clinical practice. INTRODUCTION The World Health Organisation (WHO) defines adherence as 'an extent to which a person's behaviour (in taking medication, following a diet, and / or executing lifestyle changes) corresponds with agreed recommendations from a health care provider' (Sabaté and Sabaté, 2003). While the term adherence is often used interchangeably with compliance which is the extent to which a patient's behaviour matches the prescriber's recommendations (Horne et al., 2005), the latter is less preferred as it reflects poorly on patients' autonomy in treatment decisions. WHO reported that patients in developed countries with chronic diseases who are prescribed self-administered medications adhere to their medication regimens only 50% of the time. Medication nonadherence therefore is recognized as a significant public health issue, has it can lead to considerable morbidity, mortality, and increased health care costs. Adherence to medication has long been recognised as a crucial factor in achieving good therapeutic outcomes while medication nonadherence has been linked to treatment failure (Farmer, 1999;Hassan, et al., 2019;Lam and Fresco, 2015). Sokol et al. in a study among patients with diabetes and hypercholesterolemia, showed that higher medication adherence was significantly associated with lower disease-related medical costs, despite medication cost increase. The rate of hospitalisation was significantly lower among patients who maintained their adherence compared to those with lower adherence (Sokol, et al., 2005). 417 Medication nonadherence is multifactorial, extending beyond the patients' behaviour alone. In order to improve medication adherence rate among patients, it is important to understand the underlying factors that may cause this behaviour so effective strategies can be considered to overcome these barriers. ASSESSMENT OF ADHERENCE Clinicians around the world adopt various approaches to assess medication adherence among patients. Many of these approaches are quantitative, while some are qualitative in nature. However, with the dearth of a "gold standard" among these assessments, combination approaches are often recommended (Lam and Fresco, 2015). Assessment of medication adherence can be classified into two major categories (Brown and Bussell, 2011), subjective measurement of adherence, and Objective measurement of adherence. Subjective measurement of adherence Most researchers consider subjective measurements as the least reliable approach. It involves assessment by inquiring the patient or the caregiver about the pattern of medication usage. This method however is still widely used in clinical practice due to ease of administration, low cost and the ability to generate fast response (Lam and Fresco, 2015). Patient-Kept Diaries This method relies on self-reporting where patients record information about their medication taking patterns such as how often they comply to dosing, the number of tablets taken, time of medication administration and relevant information on meal intake. This method is however subject to patient memory and their ability to successfully return the diary to the clinician for review and discussion. Furthermore, patients have the tendency to complete the diary retrospectively which may not reflect their actual medicationtaking behaviour and lead to overestimation of adherence (Lam and Fresco, 2015;Oldenmenger et al., 2007). Patient interview This method involves interviewing patients about their medication-taking behaviour, general knowledge on disease, therapy-related knowledge Including details of medications taken by the patients, and administration timing. This method is subject to inter-assessor variability and evaluation technique. Questionnaires Questionnaires have been developed to overcome the limitations of self-reporting methods in the assessment of medication adherence. There are several validated instruments that are commonly used in practice 8-item Morisky Medication Adherence Scale MMAS-8 is one of the most commonly used tools in clinical practice, developed by (Morisky, et al., 2008). It contains 8 questions, the total scores of which reflect the level of adherence. A score of 8 indicates high level of adherence and a score of less than 6 points towards low adherence to medications. It is particularly useful as a screening and monitoring tool in clinical practice to recognise patients at high risk for nonadherence issues. Brief Medical Questionnaire (BMQ) The BMQ, developed in 1999 by Svarstad et al, is unique compared to other adherence assessment tools in that it is able to elicit a patient's barriers to adherence. In particular, this questionnaire screens for 3 crucial issues which are potential nonadherence, belief barriers and recall barriers with respect to patients' medication taking behaviour (Svarstad, et al., 1999). Self-Efficacy for Appropriate Medication Use Scale (SEAMS) The SEAMS was developed to overcome the limitations of existing adherence tools among patients with low literacy level (Risser, et al., 2007). This instrument consists of 21 sets of questions with a three-point response about medication taking behaviour. The higher the score, higher the level of adherence to medications. However, administration of this questionnaire is time consuming given the number of items to be completed Objective measurement of adherence Objective measurements of adherence encompass pill counting, secondary database analysis, electronic monitoring and biochemical measures. Also known as direct measures, these methods are considered better compared to subjective measurements in assessing medication adherence as they reflect physical confirmation that patients have taken the medicine as prescribed. Pill counting 100% The eponymous method involves asking patients to bring their medications at a scheduled appointment and counting the balance pills for a given course of the medication. The level of adherence is calculated based on the following formula: (Number of Pills Dispensed -Number of Pills Leftover) in a given time period / Number of Pills Prescribed for the same time period = % Patient is deemed adherent to the medication prescribed if the percentage is equivalent to or more than 80% with the assumption that the medication was truly taken by the patient (Brown and Bussell, 2011). Whilst this zero-cost strategy can be easily applied to almost all pharmaceutical dosage forms, it may however underestimate adherence in patients who collect their medication earlier than they are supposed to. Furthermore, this method is unable to provide information on the actual medication-taking pattern of the patient (Osterberg and Blaschke, 2005). Secondary Database Analysis Analysis of secondary databases such as pharmacy refill records allows quantification of medication adherence by using prescription refilling patterns. Whilst this method of assessment eliminates the "Hawthorne effect", which refers to the inclination of patients to change/ or improve their behaviour due to being assessed, it is only applicable for patients who are on long term medications as the data is derived based on the frequency of refilling. For those with acute illness and prescriptions without refills, this method is not suitable (Fairman and Motheral, 2000). Also, this method relies on the assumption that patients take the medication exactly as prescribed, which inadvertently leads to overestimation of adherence. Electronic Monitoring Adherence-monitoring devices such as the Electronic Medication Packaging (EMP) can be incorporated into the packaging of a prescription medication to gather data on adherence (Lam and Fresco, 2015). The conceptual structure of this technology is that whenever the patient opens the container to take medicine, a microprocessor embedded in the container cap will record the time and date of each opening (Diaz et al., 2001), which provides information about their adherence with the assumption that the medication is consumed by the patient. However, this technology is not entirely foolproof. Estimation of adherence from EMP devices may be spurious if patients discard the medication from the container without ingesting them or transfer the medication to another container. Furthermore, EMP devices are cost-intensive which includes hardware and software to retrieve data as well as operational costs. Biochemical measures Biochemical measurements involve determining the concentration of a drug or its metabolites in biological fluids, particularly blood and urine. In some instances, biological markers are given together with the drugs to ascertain the presence of a drug in the blood (Farmer, 1999;Lam and Fresco, 2015). These provide evidence that patients have indeed ingested their medications. Although this method flaunts accurate results, it does not always describe the patient's medicationtaking behaviour. There are chances that patients who are aware that they must undergo testing may take their medication prior to clinic visit in order to appear as adherent to their medication. This is described as the "whitecoat adherence". As most drugs have plasma half-life of 12 hours or less, it may only take 2 to 3 days of regular dosing prior to the blood tests to show a drug concentration within the therapeutic range, while any noncompliance that may have occurred previously goes undetected. Apart from that, these methods require qualified staff and techniques to operate which contribute to the operational cost. Illustrate the summary of medication adherence measurement tools and there applications (Table I). GROUND OBSTACLES Nonadherence is multifactorial and cannot be blamed solely on the patient. There are many other obstacles to adherence which must first be acknowledged and recognized. A WHO report by Sabaté E. (2003) linked barriers of adherence to five inter-related factors which includes: 1) socioeconomic status; 2) therapy; 3) patient factors; 4) condition or disease and 5) healthcare systems (Sabaté and Sabaté, 2003). Simpson et al. (2006) found that around a quarter of individuals have poor adherence to the treatment regimen prescribed which is a prominent obstacle to therapeutic outcomes and poses a great challenge to the healthcare providers (Simpson et al., 2006). Socio economic status Race and cultural beliefs The race is not a predictor of medication adherence, irrespective of whether the affiliates of a specific race are living in their country of origin or away as foreigners. Cross-sectional national survey among Medicare beneficiaries ≥65 years of age, (n=14,829) were found Blacks and Hispanics were more likely than whites to report cost-related nonadherence (35.1%, 36.5%, and 26.7%, respectively, p< .001). There were no racial/ethnic differences in nonadherence caused by experiences or self-assessed needs (Gellad, Grenard, and McGlynn, 2009). Age Age has an inconsistent effect on adherence. WHO recommends to assess age independently for each disease, e.g., by patient characteristics and age-related developmental grouping (i.e. children, teenagers, adults and elderly patients) (Sabaté and Sabaté, 2003). Studies conducted in Malaysia by Paraidathathu et al., (2013) and Chew et al., (2015) concluded that adherence to the medication improved with increasing age in diabetes (Ahmad, Ramli, Islahudin, and Paraidathathu, 2013). The latter study reported a medication adherence rate of 64.2% in patients> 60 years old of age, compared to 49.4% in patients <50 years old (p=0.001) (Gellad et al., 2009). In contrast, the Sunderland study showed patients in younger age groups (30-40 years old) had high adherence (82%) compared to older patients (75%). These differences might be due to older patients having strong perceptions about disease treatment and medication adverse effects (Khan, et al., 2014). Gender A study in Malaysia (Ahmad et al., 2013) and Hong Kong (Wong, et al., 2010) showed that Chinese females patients were more adherent to antihypertensive and anti-diabetic medications when compared to the Chinese male patients. Finding in the US reported however that were significantly more adherent than women in taking medication for chronic conditions (Manteuffel et al., 2014). It was postulated that women, as primary caregivers, spend less time and energy taking care of themselves then they do taking care of others (Geboers et al., 2015). Social support A study on heart and lung transplant patients (n=304) found adherence improved by 2.6 fold in patients who received family support compared to those who received no support at all (p<0.05). Gellad et al., (2009) in a systematic review found that only four out of seventeen reviewed articles used a well-established tool to measure association between social support and patient adherence. He concluded that social support alone is not a strong, independent factor affecting adherence, as it also involves other factors such as patients, healthcare providers, systems and the disease itself (Gellad, et al., 2007). Therapy Complexity of treatment regimen Treatment complexity highly contributes to patients nonadherence. Ingersoll and Cohen (2008) studied "regimen factors" on medication adherence for chronic diseases and found that poor adherence was associated with treatment complexity. However, another systematic review in HIV patients found the reduced tendency to report pill burden as a barrier to adherence (Shubber et al., 2016). Adverse effects of the treatment This refers to a form of intentional nonadherence in which patients stop taking their medication due to the adverse effects experienced, without consulting their healthcare providers. In Italy, a study on HIV patients found that no adherent patients reported 3.6-30% of adverse effects impacting their medication adherence (Ammassari et al., 2001). Patient factors Forgetfulness Forgetfulness, categorised as unintentional nonadherence, is the main barrier to medication adherence. A national survey conducted in the US found that 20% of hypertensive patients have poor adherence to their medication and 46% of them reported forgetfulness as their main reason for nonadherence (Egan, Lackland, and Cutler, 2003). Similarly, a study conducted in Malaysia on hypertensive patients also found that the primary reason for the patient nonadherence was forgetfulness (Ramli, et al., 2012). Lack of understanding and patient perception of the disease Patient behaviours and beliefs about disease and medicines can influence adherence in meaningful and complicated ways (Martin, et al., 2005). A qualitative study done in Malaysia among diabetic Malay patients to assess their perspectives in managing the diseases revealed that the patients thought their disease can be cured and that should not restrict their diet when they are on insulin treatment (Ali and Jusoff, 2009). Therefore, robust strategies need to be implemented to ensure provision of adequate patient education on the disease and its associated treatment. Low health literacy Health literacy can be defined as 'the degree to which people are able to access, understand, appraise, and communicate information to engage with the demands of different health contexts in order to promote and maintain health across the life-course' (Geboers et al., 2015). Patients with low literacy levels may experience problems in understanding directions given and may interpret medication instructions differently leading to poor adherence and poor therapeutic outcomes (Jimmy and Jose, 2011). William et al., (1995) in a study comprising of more than 2500 patients found that almost 33% of the patients had minimal or no health literacy regarding their prescriptions, appointment schedules and on how to read an informed consent document (Williams et al., 1995). In contrast, a meta-analysis was done by Geboers et al. (2015) among elderly patients stated that this study provides the reason to feel ambiguous about the presence of robust evidence between health literacy and adherence among elderly patients (Geboers et al., 2015). Condition or disease Severity of the condition Theoretically, patients who suffer from severe diseases or complications tend to have lower adherence to their medications due to the high pill burden. A study by Basco (2009) among bipolar disease patients found that 64% were no adherent to their medication prior to the admission (Basco andSmith, 2009), whereas Brown (2011) reported decrease in medication adherence after catastrophic events like stroke (Brown and Bussell, 2011). A meta-analysis done by DiMatteo et al. (2007) found that most patients with severe conditions (HIV, ESRD, cancer and heart disease) are less adherent to their medication, regardless of objective or subjective health assessment (DiMatteo, Haskard, and Williams, 2007). The reason could be the physical, psychological and other factors that significantly reduce patients' determination towards adherence. Comorbidities Multiple comorbidities may contribute to nonadherence due to the pill burden in order to manage their diseases. A study by Rolnick et al (2013) showed lower adherence in patients with multiple conditions, drug regimen and frequency of dosing. Contrasting findings were reported by a Hong Kong study done in hypertensive patients (Rolnick, et al., 2013). Similarly, a study in Malaysia examining adherence in diabetics found that patients with comorbidities reported higher medication adherence compared to patients with no comorbidities (Gellad et al., 2009). Health systems Communication Inadequate communication between providers and patients with chronic illnesses can further complicate patient comprehension of illness, its potential complications, and the importance of medication adherence (Brown and Bussell, 2011). Factors associated with poor communication from a prescriber perspective may include complex medication regimen prescriptions as well as inadequate explanation on the benefits and risks of the prescribed regimens (Osterberg and Blaschke, 2005), It may also include failure to obtain an accurate patient medical history and reduced time span to discuss any issues related to patient wellbeing. The Jimmy and Jose (2011) study on medication adherence found that 40-60% of patients incorrectly interpret what their doctors expect of them 10-80 minutes after consultation (Jimmy and Jose, 2011). Health services Disintegrated healthcare frameworks cause nonadherence. In an overburdened health care system where the patient population is large, lack of staff could lead to multitasking of the prescribers and thus, less time is available for them to engage with patients (Brown and Bussell, 2011). Medications cost and health insurance plan reimbursement Unreasonable drug costs may contribute to poor adherence. Egan et al (2003) reported that 24% of patients who were no adherent to antihypertensive agents said that the medications were too expensive (Egan et al., 2003). Another study highlighting the high cost of medication was conducted using pharmacy claim records of 6,236 patients taking statin medication, and reported significant reduction in adherence as the cost of medication rose (p < 0.01) (Pedan, Varasteh, and Schneeweiss, 2007). Inadequate or non-existent reimbursement by health insurance plans contributes to poor adherence due to potential partial coverage of medical costs, including medicines for eligible patients who may still need to pay a subsidy for health services and care. Additionally, patients who utilize public insurance were found to be four times more no adherent to 422 Volume 32 Issue 4 (2021) their medication and thus are at high risk of poor health outcomes (Dew et al., 2008). ADHERENCE IMPROVEMENT STRATEGIES Medication adherence is a key obstacle in achieving optimum treatment outcomes for patients as target outcomes cannot be obtained if adherence is low. Strategies need to be planned and implemented to enhance medication adherence by considering seven main factors, which are: 1) patient factors, 2) physician or healthcare providers, 3) medication regimens, 4) healthcare systems, 5) individualized care plans, 6) Multisystemic Therapy (MST), and 7) technologymediated interventions (TMI). Diagram 1: shows the seven factors for adherence improvement strategies. Patient factors The key stakeholder in determining medication adherence is the patient. Hence, they need to be provided with appropriate patient education in adherence improvement plans. A study by Balamurugan et al.; evaluated the effectiveness of a diabetes self-management education (DSME) program utilizing an intervention that provided patient education on nutrition and diabetes self-management. The study found that intervention patients registered a 0.45% reduction in HbA1c, and a reduced hospitalisation rate when compared to control (Balamurugan, et al., 2006). A systematic review showed that patient education by pharmacists improved adherence particularly when patients were educated about disease conditions, medications and its benefits, possible adverse drug reactions and lifestyle modifications. In this review, five out of eight studies showed significant improvement in patient adherence towards antidiabetic medications (Omran, et al., 2012). Patient education on medication side effects should be prioritized to alleviate patient concern as this may contribute to low adherence rates (Garner, 2010). Poor health literacy also impacts patient adherence. Studies show that encouraging universal precaution strategies and providing verbal counselling together with pictorial aids on medication labels can help to improve medication adherence (Katz, et al., 2006). Physician or healthcare providers Healthcare providers should practice patient-centred care by promoting active involvement of the patient in the decision-making process throughout their health journey. Good communication skills using universal precaution strategies to reinforce patient education are essential to confirm patient understanding. Cultural competency will gain patient trust as healthcare providers will be able to respect different beliefs and attitudes their patients have towards health and medication adherence. Medication regimens Polypharmacy can result in lower adherence to medications as patients struggle with complicated medication regimens (Benner et al., 2009). Deprescribing should be practiced to provide simple, effective and safe therapy to patients as simplification of medication regimens has been linked to an increase in medication adherence (Schroeder, et al., 2004). This is particularly helpful for patients suffering from multiple comorbidities and long-term chronic conditions (Richter, et al., 2003). Effective ways of simplifying medication regimens include reducing dose frequency in favour of longer dosing intervals, introducing polypills or combination-therapies, and preventing duplication of therapy when treating a medical condition (Claxton, et al., 2001;Malo et al., 1995;Monedero and Caminero, 2011;Thiele et al., 2012) Health care systems Interprofessional collaboration will encourage time-based efficiency and multidisciplinary input within healthcare systems in providing patient education and promoting medication adherence for patients (Hassan et al., 2019). Patient-centred care Patient-centred care takes into account patient factors such as lifestyle and medication preferences, health attitudes and beliefs with an aim to tailor treatment recommendations and patient education to optimize patient adherence. Research has shown that patient-centred care plans result in higher patient compliance towards medications including repeat prescriptions (Gray et al., 2012). Medication adherence clinics create specialty-focused patient-centred care plans for patients with long-term chronic care conditions. An example of this would be the Medication Therapy Adherence Clinics (MTAC) in Malaysian government hospitals that offer patient education, patient reminders, educational aids and booklets to patients suffering from diabetes, respiratory conditions, or those who may have been prescribed with antiretroviral (ARV) therapy or warfarin. Multisystemic Therapy (MST) Multisystemic therapy refers to intensive family-or community-based programs designed to result in positive changes across society, developed originally with the goal to manage social behavioural problems among at-risk youth. This interventional model has also been explored further to support adolescents suffering from chronic diseases such as HIV, diabetes or asthma. A study to determine the effect of MST in treatment of adolescent diabetes in comparison to telephone support services found a significantly greater reduction of HbA1c in the MST-intervention group when compared to the telephone-support group (1.01% vs 0.74%). The MST intervention involved family education focused on diabetes, medication and lifestyle management as well as developing parent communication skills to provide supportive care to their diabetic children. Schools were involved with personnel trained to provide glucose testing for diabetic students and weekly reports to parents while community-based interventions involved setting up support groups. Parents reported an improvement in their children's diabetes management adherence according to the Diabetes Management Scale at 7 months and 12 months (Ellis et al., 2012). This model can also be used in adult patients who require intensive health support. An example is the methadone maintenance therapy which aims to treat and rehabilitate drug addiction with a combination of pharmacology, psychology and sociology therapy with the support of family, peers, community support groups and health care providers (Rusdi, et al., 2008). Technology-mediated interventions (TMI) Technology-mediated intervention (TMI) incorporates elements of digital health to provide patient education and aid patients in selfmanagement of their medical conditions. Digital technology such as telehealth platforms, smart devices, health applications and software, chat messaging or text reminder services have been used increasingly especially in the pandemic era. Mistry et al found that 19 out of 38 studies showed significant adherence outcomes when TMIs were used to improve patient adherence (Mistry et al., 2015). Telephone-based services have been provided successfully to patients with studies showing this method to be convenient, cheap, feasible and applicable for a variety of monitoring and lifestyle modification support services. A study using monthly structured telephone interviews to monitor depression symptoms in patients, and provide medication self-management and adherence advice found increased medication adherence in intervention groups using the modified Morisky score (2.7 vs 2.53, p=0.0042) (Gensichen et al., 2009). Text reminders have also been helpful in addressing nonadherence. In Maduka et al., a strategy integrating adherence counselling together with text reminders for HIV patients taking ARV therapy showed a significant difference in medication adherence outcomes between two groups (p=0.022) where intervention group achieved 76.9% of targeted adherence while only 55.8% of the control group achieved this target. Clinically significant outcomes saw a rise in CD4 cell count for the intervention group when compared to control group (p=0.007), proving that adherence counselling with text reminders can help HIV patients to successfully adhere to ARV therapy leading to better clinical outcomes (Maduka and Tobin-West, 2013). Engaging younger patients to develop better medication adherence behaviours can also be explored through the use of video games which were found to be effective in Kato et al where adolescent and younger adult cancer patients were given condition-specific and medication-specific knowledge as well as encouraged to adopt self-care behaviours through gamification strategies. The intervention group showed a higher concentration of anticancer metabolites (p=0.002) and positive outcomes in cancer-related knowledge and selfefficacy when compared to control (Kato, et al., 2008). PHARMACOECONOMICS The purpose of studying pharmacoeconomics is to identify, estimate and compare the cost of medications as well as to consider the risks and benefits of any therapy, service or programme (MTM) (Mauskopf, 2001). Nonadherence not only affects treatment benefits, but also affects financial burden on patients, payers and society (Richter et al., 2003). A systematic review by Chiatti et al. found that out of 21 selected studies, 23.8% of the studies (5/21) measured economic burden due to nonadherence. Most adherence-related pharmacoeconomics studies (4/5) demonstrated that nonadherence was related to cost loss. At the same time, the studies concluded that adherence interventions were costeffective (Chiatti et al., 2012). An observational study was done by Truong et al. to evaluate potential cost saving based on cost avoidance by handling four years of Medication Therapy Management (MTM) for pharmacoeconomical clarification of the intervention programme. Medication-related problems (MRPs) were identified by pharmacists, and categorised into indication, effectiveness, safety or adherence. The result showed that the main MRPs detected were subtherapeutic dose (38%), nonadherence (19%) and untreated indication (16%). Correspondingly, detected nonadherence savings cost about $ 25,434-118,535 (Truong et al., 2015). Another study showed that an intensive asthma treatment programme which includes the same attending physician, continuous patient and family members' education and adequate dose physician referral can reduce the cost of the treatment in cost differences evaluated before and after the intervention. The study found that the cost per patient per year for inpatient care before the treatment was $22,999 ± $20.64, but surprisingly reduced to $1107± 1618 after the intervention (p<0.0017) (Levenson, Grammer, Yarnold, and Patterson, 1997). A cohort study was done to evaluate long-term cost and outcome of treating hypertension, comparing the effect of physician-pharmacist intervention and physician management alone. This finding supported the physician-pharmacist programme demonstrated cost-effectiveness especially for high-risk patients (Kulchaitanaroaj et al., 2017). Clinical pharmacists also play a major role in decreasing the cost of care, hospitalization and medications (Dunn et al., 2015). Clinical pharmacists also help to improve medication adherence and reduce medication cost by simplifying patient medication regimens. Prescribers should collaborate with pharmacists always to therapy regimen effectiveness, cost and propensity for adverse drug events to optimise patient adherence. Simplification of medications is very useful for a disease that is very dependent on the patient adherence such as HIV and tuberculosis treatment. Simplification of HIV treatment has shown benefits toward life-expectancy, qualityadjusted life expectancy and cost of treatment. The cost for lifetime was estimated to be less for the simplification strategy group than the standard of care group. The difference was estimated at $26,500-$72,400 per person (Schackman et al., 2007). ADHERENCE COUNSELING Pharmacists are responsible for not an easy task for adherence counselling, since they need to tailor the counselling to the patient's behaviour and real-life barriers. Adherence counselling has been ancillary in many adherence interventions because behaviour and cognitive are the fundamental determinant of adherence (Coetzee, et al., 2016). There are skills and models that can be considered in adherence counselling to make sure that the counselling is effective and meaningful. The purpose of the adherence counselling is to increase patients' knowledge about their disease and treatment, enhance their motivation and build patient's self-efficacy. Information-motivationbehavioural (IMB) model is a method of informing a positive behaviour that the patient needs to have, motivating the patient to have the behaviour, and coaching the patient to execute the behaviour in their life. The outcome of the IMB is patients that have confidence in their life at any occasion or situation (Lin and Scott, 2012). Rubak et al conducted a systematic review to evaluate the effectiveness of motivational interviewing in different areas of disease and to identify factors shaping outcomes. 72 randomised controlled trial RCTs were included in the analysis, which 39% (28/72) of the studies were for the treatment of alcohol addiction. Besides that, motivational interview also was implemented in the counselling of diabetes and asthma (3/72), smoking cessation (12/72), weight loss or physical activity (10/72) and psychiatric or addiction problem (19/72). Out of these studies, 73% (53/72) showed a significant effect towards the treatment. The median counselling duration was 60 minutes, with 81% (26/32) of the 60 minutes counselling session showing significant effect. Besides that, the total of encounter counselling was examined to find the 425 optimum effect of the counselling. It was proved that 87% (13/15) of the studies with more than five encounters demonstrated better effect. In addition, the median of follow up duration was 12 months, which supported 81% (26/32) of the 12-months follow up period that showed better effect (Rubak, et al., 2005). Continuing training should be done to train pharmacists and other health care providers about these motivational interview skills in order to produce a good counsellor. Discharge counselling is one of clinical pharmacist job scope that has been implemented to overcome incidence of discharge adverse events by enhancing medication adherence. It is found that about 23% of discharge patients experience post discharge adverse events, mainly due to newly prescribed medications. An observational study was done to evaluate the effect of discharge counselling towards medication adherence, the study showed that the adherence rate significantly increased from 51% to 66.7% (p<0.01) from the observational period and after intervention. Percentage of unfilled medications also reduced significantly from 50.2% to 32.5% (p<10-7). This finding highlights that counselling before discharge is very important (Leguelinel-Blache et al., 2015). LIMITATIONS This is not a comprehensive review on all the existent medication adherence measures. Rather it is focused on the different types available and the most commonly used in different settings. The types of setting and population in the studies that are used as examples vary in different measures which can make comparisons cumbersome. If researchers and healthcare professionals are looking for measures for a specific or rare condition, they should refer to studies that have a clearer validation. This review is limited to researchers and health professionals conducting studies in English language IMPLICATION AND DIRECTION FOR FUTURE RESEARCH There are ongoing public health reforms worldwide to minimize unnecessary healthcare expenditure and maximise public health outcomes. Improving medication adherence is a significant outcome in clinical practice and research. The lack of a universal guideline on medication adherence measures provides room for research on which measure, or which combination of measures, is the most appropriate for different target populations and health problems. Meanwhile, research on improving the currently available measures and/or on the development of new ways to measure and uncover reasons behind medication nonadherence should also be further explored. CONCLUSION Adherence to medication has been recognized as a crucial factor to achieve therapeutic outcomes. Poor to nonadherence is viewed as a prominent obstacle to patient therapeutic outcomes, poses a great challenge to healthcare providers, is found to reduce the quality of life and results in additional healthcare costs. Nonadherence cannot be blamed solely on a patient. It is multifactorial and thus, in order to increase adherence, various obstacles to adherence must therefore be acknowledged and recognized. Despite the fact that patient education is the ultimate way to increase adherence, utilization of adherence aids, strong motivation and moral support are also shown to improve medication adherence. Health care providers should be able to recognize possible intervention that is practical for implementation to improve medication adherence within their capabilities. It should be a multidisciplinary approach that is accomplished with collaborative support of all key stakeholders involved in medication use.
O32: TRANSPARENCY IN SURGICAL RANDOMISED CONTROLLED TRIALS: CROSS-SECTIONAL, OBSERVATIONAL STUDY Randomised controlled trials (RCT) often provide the scientific basis on which commissioning and treatment decisions are made. It is essential that their results and methods are reported transparently. The aim of this study was to explore transparency with respect to trial registration, disclosure of funding sources, conflicts of interest (COI), and data sharing. This was a cross-sectional review of surgical RCTs. Data were extracted from RCTs in ten high-impact journals published in the years 2009, 2012, 2015, and 2018. Outcomes of interest were the incidence of reported trial registration, disclosure of funding sources, disclosure of investigator COI, and presence of a statement of data sharing plans. A total of 475 were eligible for analysis. Trial registration was present in 73 (67%) studies in 2009, 137 (84%) in 2012, 111 (89%) in 2015 and 110 (93%) in 2018. Funding statements were provided in 55%, 65%, 69.4%, and 75.4% of manuscripts, respectively. Conflicts of interest statements were provided in 49.5%, 89.1%, 94.6%, and 98.3% of manuscripts, respectively. Data sharing statements were present in only 15 (3.2%) RCTs. Eleven of these were in studies published most recently in 2018. Trial registration, presence of funding statements, and disclosure of personal conflicts of interest in surgical RCTs have improved rapidly over the last 10 years. In contrast, disclosure of data sharing plans is exceptionally low. This may contribute to research waste and represents an essential target for improvement. Trial registration, presence of funding statements, and disclosure of personal conflicts of interest in surgical RCTs have improved rapidly over the last 10 years. In contrast, disclosure of data sharing plans is exceptionally low. This may contribute to research waste and represents an essential target for improvement.
"""Contains the tools needed to reproduce experiments specific to QM9.""" import os import time import pandas as pd import numpy as np from xGPR.xGP_Regression import xGPRegression from xGPR.data_handling.dataset_builder import build_offline_sequence_dataset from .core_exp_funcs import get_qm9_train_dataset, get_target_qm9_files def molfit(start_dir, pretransform_dir = None): """Fits the model for QM9.""" os.chdir(start_dir) logpath = os.path.abspath(os.path.join("final_results", "molfit.txt")) hparam_df = pd.read_csv(os.path.join("final_results", "moltune.txt")) for i in range(hparam_df.shape[0]): preset_hparams = np.asarray([float(f) for f in hparam_df.iloc[i,2].split("_")]) kernel = hparam_df.iloc[i,1] dataset_specifier = hparam_df.iloc[i,0] target_dir = os.path.join(start_dir, "benchmark_evals", "chemdata", "full_soap") split_pts = [] os.chdir(target_dir) qm9_model = xGPRegression(training_rffs = 12, fitting_rffs = 16384, device = "gpu", kernel_choice = kernel, verbose = True, kernel_specific_params = {"split_points":split_pts, "polydegree":2}) for data_type in ["u298_atom", "h298_atom", "u0_atom", "zpve", "cv", "g298_atom"]: start_time = time.time() train_xfiles, train_yfiles = get_target_qm9_files("train", data_type) os.chdir("..") test_xfiles, test_yfiles = get_target_qm9_files("test", data_type) os.chdir(target_dir) train_dset = build_offline_sequence_dataset(train_xfiles, train_yfiles, skip_safety_checks=True) for fitting_rff in [16384, 32768, 65536]: qm9_model.fitting_rffs = fitting_rff pre_dset = qm9_model.pretransform_data(train_dset, pretransform_dir = pretransform_dir, preset_hyperparams = preset_hparams) preconditioner, _ = qm9_model.build_preconditioner(pre_dset, max_rank = 3500, preset_hyperparams = preset_hparams, method="srht_2") qm9_model.fit(pre_dset, preconditioner=preconditioner, mode="cg", preset_hyperparams = preset_hparams, suppress_var = True, tol=1e-9, max_iter = 1000) end_time = time.time() fitting_wclock = end_time - start_time print(f"Fitting wallclock time: {fitting_wclock}", flush=True) all_preds, all_y = [], [] for xfile, yfile in zip(test_xfiles, test_yfiles): xdata, ydata = np.load(xfile), np.load(yfile) preds = qm9_model.predict(xdata, get_var = False, chunk_size=100) all_preds.append(preds) all_y.append(ydata) all_preds = np.concatenate(all_preds) all_y = np.concatenate(all_y) mae = np.mean(np.abs(all_y - all_preds)) print(f"MAE for data type {data_type} on {dataset_specifier} is {mae}", flush=True) with open(logpath, "a+", encoding="utf8") as output_file: output_file.write(f"{dataset_specifier},{kernel},{hparam_df.iloc[i,2]}," f"{fitting_rff},{fitting_wclock},{mae},{data_type}\n") pre_dset.delete_dataset_files()
<reponame>iqqmuT/flipper /** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @format */ import {BaseDevice} from 'flipper'; import {Crash, shouldShowiOSCrashNotification} from '../index'; import {parseCrashLog, parsePath} from '../index'; import {TestUtils} from 'flipper-plugin'; import {getPluginKey} from 'flipper'; import * as CrashReporterPlugin from '../index'; function getCrash( id: number, callstack: string, name: string, reason: string, ): Crash { return { notificationID: id.toString(), callstack: callstack, reason: reason, name: name, date: new Date(), }; } function assertCrash(crash: Crash, expectedCrash: Crash) { const {notificationID, callstack, reason, name, date} = crash; expect(notificationID).toEqual(expectedCrash.notificationID); expect(callstack).toEqual(expectedCrash.callstack); expect(reason).toEqual(expectedCrash.reason); expect(name).toEqual(expectedCrash.name); expect(date.toDateString()).toEqual(expectedCrash.date.toDateString()); } test('test the parsing of the date and crash info for the log which matches the predefined regex', () => { const log = 'Blaa Blaaa \n Blaa Blaaa \n Exception Type: SIGSEGV \n Blaa Blaa \n Blaa Blaa Date/Time: 2019-03-21 12:07:00.861 +0000 \n Blaa balaaa'; const crash = parseCrashLog(log, 'iOS', null); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('SIGSEGV'); expect(crash.name).toEqual('SIGSEGV'); expect(crash.date).toEqual(new Date('2019-03-21 12:07:00.861')); }); test('test the parsing of the reason for crash when log matches the crash regex, but there is no mention of date', () => { const log = 'Blaa Blaaa \n Blaa Blaaa \n Exception Type: SIGSEGV \n Blaa Blaa \n Blaa Blaa'; const crash = parseCrashLog(log, 'iOS', undefined); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('SIGSEGV'); expect(crash.name).toEqual('SIGSEGV'); expect(crash.date).toBeUndefined(); }); test('test the parsing of the crash log when log does not match the predefined regex but is alphanumeric', () => { const log = 'Blaa Blaaa \n Blaa Blaaa \n Blaa Blaaa'; const crash = parseCrashLog(log, 'iOS', undefined); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('Cannot figure out the cause'); expect(crash.name).toEqual('Cannot figure out the cause'); }); test('test the parsing of the reason for crash when log does not match the predefined regex contains unicode character', () => { const log = 'Blaa Blaaa \n Blaa Blaaa \n Exception Type: 🍕🐬 \n Blaa Blaa \n Blaa Blaa'; const crash = parseCrashLog(log, 'iOS', undefined); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('Cannot figure out the cause'); expect(crash.name).toEqual('Cannot figure out the cause'); expect(crash.date).toBeUndefined(); }); test('test the parsing of the reason for crash when log is empty', () => { const log = ''; const crash = parseCrashLog(log, 'iOS', undefined); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('Cannot figure out the cause'); expect(crash.name).toEqual('Cannot figure out the cause'); expect(crash.date).toBeUndefined(); }); test('test the parsing of the Android crash log for the proper android crash format', () => { const log = 'FATAL EXCEPTION: main\nProcess: com.facebook.flipper.sample, PID: 27026\njava.lang.IndexOutOfBoundsException: Index: 190, Size: 0\n\tat java.util.ArrayList.get(ArrayList.java:437)\n\tat com.facebook.flipper.sample.RootComponentSpec.hitGetRequest(RootComponentSpec.java:72)\n\tat com.facebook.flipper.sample.RootComponent.hitGetRequest(RootComponent.java:46)\n'; const date = new Date(); const crash = parseCrashLog(log, 'Android', date); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual( 'java.lang.IndexOutOfBoundsException: Index: 190, Size: 0', ); expect(crash.name).toEqual('FATAL EXCEPTION: main'); expect(crash.date).toEqual(date); }); test('test the parsing of the Android crash log for the unknown crash format and no date', () => { const log = 'Blaa Blaa Blaa'; const crash = parseCrashLog(log, 'Android', undefined); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('Cannot figure out the cause'); expect(crash.name).toEqual('Cannot figure out the cause'); expect(crash.date).toBeUndefined(); }); test('test the parsing of the Android crash log for the partial format matching the crash format', () => { const log = 'First Line Break \n Blaa Blaa \n Blaa Blaa '; const crash = parseCrashLog(log, 'Android', null); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('Cannot figure out the cause'); expect(crash.name).toEqual('First Line Break '); }); test('test the parsing of the Android crash log with os being iOS', () => { const log = 'FATAL EXCEPTION: main\nProcess: com.facebook.flipper.sample, PID: 27026\njava.lang.IndexOutOfBoundsException: Index: 190, Size: 0\n\tat java.util.ArrayList.get(ArrayList.java:437)\n\tat com.facebook.flipper.sample.RootComponentSpec.hitGetRequest(RootComponentSpec.java:72)\n\tat com.facebook.flipper.sample.RootComponent.hitGetRequest(RootComponent.java:46)\n'; const crash = parseCrashLog(log, 'iOS', null); expect(crash.callstack).toEqual(log); expect(crash.reason).toEqual('Cannot figure out the cause'); expect(crash.name).toEqual('Cannot figure out the cause'); }); test('test the getter of pluginKey with proper input', () => { const device = new BaseDevice('serial', 'emulator', 'test device', 'iOS'); const pluginKey = getPluginKey(null, device, 'CrashReporter'); expect(pluginKey).toEqual('serial#CrashReporter'); }); test('test the getter of pluginKey with undefined input', () => { const pluginKey = getPluginKey(null, null, 'CrashReporter'); expect(pluginKey).toEqual('unknown#CrashReporter'); }); test('test the getter of pluginKey with defined selected app', () => { const pluginKey = getPluginKey('selectedApp', null, 'CrashReporter'); expect(pluginKey).toEqual('selectedApp#CrashReporter'); }); test('test the getter of pluginKey with defined selected app and defined base device', () => { const device = new BaseDevice('serial', 'emulator', 'test device', 'iOS'); const pluginKey = getPluginKey('selectedApp', device, 'CrashReporter'); expect(pluginKey).toEqual('selectedApp#CrashReporter'); }); test('test defaultPersistedState of CrashReporterPlugin', () => { expect( TestUtils.startDevicePlugin(CrashReporterPlugin).exportState(), ).toEqual({crashes: []}); }); test('test helper setdefaultPersistedState function', () => { const crash = getCrash(0, 'callstack', 'crash0', 'crash0'); const plugin = TestUtils.startDevicePlugin(CrashReporterPlugin); plugin.instance.reportCrash(crash); expect(plugin.exportState()).toEqual({crashes: [crash]}); }); test('test getNewPersistedStateFromCrashLog for non-empty defaultPersistedState and defined pluginState', () => { const crash = getCrash(0, 'callstack', 'crash0', 'crash0'); const plugin = TestUtils.startDevicePlugin(CrashReporterPlugin); plugin.instance.reportCrash(crash); const pluginStateCrash = getCrash(1, 'callstack', 'crash1', 'crash1'); plugin.instance.reportCrash(pluginStateCrash); const crashes = plugin.instance.crashes.get(); expect(crashes).toBeDefined(); expect(crashes.length).toEqual(2); expect(crashes[1]).toEqual(pluginStateCrash); }); test('test getNewPersistedStateFromCrashLog for non-empty defaultPersistedState and defined pluginState and improper crash log', () => { const plugin = TestUtils.startDevicePlugin(CrashReporterPlugin); const pluginStateCrash = getCrash(0, 'callstack', 'crash1', 'crash1'); plugin.instance.reportCrash(pluginStateCrash); const content = 'Blaa Blaaa \n Blaa Blaaa'; plugin.instance.reportCrash(parseCrashLog(content, 'iOS', null)); const crashes = plugin.instance.crashes.get(); expect(crashes.length).toEqual(2); assertCrash(crashes[0], pluginStateCrash); assertCrash( crashes[1], getCrash( 1, content, 'Cannot figure out the cause', 'Cannot figure out the cause', ), ); }); test('test getNewPersistedStateFromCrashLog when os is undefined', () => { const plugin = TestUtils.startDevicePlugin(CrashReporterPlugin); const content = 'Blaa Blaaa \n Blaa Blaaa'; expect(() => { plugin.instance.reportCrash(parseCrashLog(content, undefined as any, null)); }).toThrowErrorMatchingInlineSnapshot(`"Unsupported OS"`); const crashes = plugin.instance.crashes.get(); expect(crashes.length).toEqual(0); }); test('test parsing of path when inputs are correct', () => { const content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-15DEV1CE-1D/AppName.app/AppName \n Blaa Blaa \n Blaa Blaa'; const id = parsePath(content); expect(id).toEqual('path/to/simulator/TH1S-15DEV1CE-1D/AppName.app/AppName'); }); test('test parsing of path when path has special characters in it', () => { let content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-15DEV1CE-1D/App Name.app/App Name \n Blaa Blaa \n Blaa Blaa'; let id = parsePath(content); expect(id).toEqual( 'path/to/simulator/TH1S-15DEV1CE-1D/App Name.app/App Name', ); content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-15DEV1CE-1D/App_Name.app/App_Name \n Blaa Blaa \n Blaa Blaa'; id = parsePath(content); expect(id).toEqual( 'path/to/simulator/TH1S-15DEV1CE-1D/App_Name.app/App_Name', ); content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-15DEV1CE-1D/App%20Name.app/App%20Name \n Blaa Blaa \n Blaa Blaa'; id = parsePath(content); expect(id).toEqual( 'path/to/simulator/TH1S-15DEV1CE-1D/App%20Name.app/App%20Name', ); }); test('test parsing of path when a regex is not present', () => { const content = 'Blaa Blaaa \n Blaa Blaaa \n Blaa Blaa \n Blaa Blaa'; const id = parsePath(content); expect(id).toEqual(null); }); test('test shouldShowCrashNotification function for all correct inputs', () => { const device = new BaseDevice( 'TH1S-15DEV1CE-1D', 'emulator', 'test device', 'iOS', ); const content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-15DEV1CE-1D/App Name.app/App Name \n Blaa Blaa \n Blaa Blaa'; const shouldShowNotification = shouldShowiOSCrashNotification( device.serial, content, ); expect(shouldShowNotification).toEqual(true); }); test('test shouldShowiOSCrashNotification function for all correct inputs but incorrect id', () => { const device = new BaseDevice( 'TH1S-15DEV1CE-1D', 'emulator', 'test device', 'iOS', ); const content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-1598DEV1CE-2D/App Name.app/App Name \n Blaa Blaa \n Blaa Blaa'; const shouldShowNotification = shouldShowiOSCrashNotification( device.serial, content, ); expect(shouldShowNotification).toEqual(false); }); test('test shouldShowiOSCrashNotification function for undefined device', () => { const content = 'Blaa Blaaa \n Blaa Blaaa \n Path: path/to/simulator/TH1S-1598DEV1CE-2D/App Name.app/App Name \n Blaa Blaa \n Blaa Blaa'; const shouldShowNotification = shouldShowiOSCrashNotification( null as any, content, ); expect(shouldShowNotification).toEqual(false); }); test('only crashes from the correct device are picked up', () => { const serial = 'AC9482A2-26A4-404F-A179-A9FB60B077F6'; const crash = `Process: Sample [87361] Path: /Users/USER/Library/Developer/CoreSimulator/Devices/AC9482A2-26A4-404F-A179-A9FB60B077F6/data/Containers/Bundle/Application/9BF91EF9-F915-4745-BE91-EBA397451850/Sample.app/Sample Identifier: Sample Version: 1.0 (1) Code Type: X86-64 (Native) Parent Process: launchd_sim [70150] Responsible: SimulatorTrampoline [1246] User ID: 501`; expect(shouldShowiOSCrashNotification(serial, crash)).toBe(true); // wrong serial expect( shouldShowiOSCrashNotification( 'XC9482A2-26A4-404F-A179-A9FB60B077F6', crash, ), ).toBe(false); });
def to_str(self, show_traceback: bool) -> str: obj = self.to_dict() if not show_traceback: remove_traceback(obj) return json.dumps(obj, indent=2)
""" Input/output for objects API---includes functions for storing and retrieving lists of words. Functions: get_word_filename, store_words, retrieve_words """ from .objects import max_word_size, max_word_length, Word from word_explorer.io import store_data, retrieve_data def get_word_filename(list_type, size=None, length=None, extra_suffix=""): if length is not None: file_name = "words_up_to_length" + str(length) else: file_name = "words_up_to_size" + str(size) return file_name + "_" + list_type + "_" + extra_suffix + ".txt" def store_words(word_list, list_type="dow", extra_suffix=""): if list_type == "all": length = max_word_length(word_list) size = None else: size = max_word_size(word_list) length = None file_name = get_word_filename( list_type, size, length, extra_suffix=extra_suffix) store_data(word_list, file_name) def retrieve_words(list_type, file_name=None, size=None, length=None, optimize=False, include_empty_word=True, ascending_order=None): if file_name is None: file_name = get_word_filename(list_type, size, length) double_occurrence = False if list_type == "all" else True if ascending_order is None: ascending_order = True if list_type == "ao" else False word_list = [] for line in retrieve_data(file_name, add_output_dir=False): if "," in line: letters = re.findall(r"\d+", line) digit_strings = [str(i) for i in range(1, 10)] for i, letter in enumerate(letters): if letter not in digit_strings: letters[i] = chr(int(letter) + 87) word = Word("".join(letters), ascending_order=ascending_order, optimize=optimize) else: word = Word(line.strip(), double_occurrence=double_occurrence, ascending_order=ascending_order, optimize=optimize) word_list.append(word) if "" not in word_list and include_empty_word: empty_word = Word("", double_occurrence=double_occurrence, ascending_order=ascending_order, optimize=optimize) word_list = [empty_word] + word_list return word_list
s=input() arr=[] if s=="{}": print("0") else: for i in s[1:len(s):3]: arr.append(i) c = len(set(arr)) print(c)
Heathens United Against Racism (HUAR) has many intelligent, strong individuals that see the necessity to speak up about what they feel are the biggest problems within Heathenry. Here they will use their own words to express their main concerns, which gives those who identify as Heathen some very heavy topics to consider and commit to solving. HUAR is comprised of members that consider themselves Heathen, and members that do not, and we believe both points of view are extremely valuable, as they provide a broad perspective to consider. Just as much as these people felt a responsibility to respond to the current state of Heathenry, it is the responsibility of all Heathens to make sure these messages are heard, and that these concerns do not get ignored, or set on the back-burner by those who are in leadership of Heathen organizations. SHARON KNIGHT “I have had an uneasy relationship with Heathenry for about 12 years now. Its power is undeniable. Its Gods and its inner landscapes are both beautiful and terrible. And yet, its willingness to accept racism as a defining feature among some of its practitioners is utterly abhorrent to me. It makes me ashamed to call myself Heathen. So I often don’t. The Blood and Soil nonsense needs to be abolished. It is hurtful, exclusionary, and completely fallacious. We humans have been nomadic since forever. We’ve been intermingling with one another since we dwelt in caves, so the thought that there has ever been any sort of racial purity is ridiculous. Also, Gods have called to people across cultural boundaries for as long as we have record of people experiencing Gods. Who are we to second guess the choices of Gods? It is laughable to think this is our decision to make. Aside from the absurdity in such thinking, this attitude in no way demonstrates the hospitality that Heathens boast of. The kind of Heathenry espoused by Stephen McNallen makes my skin crawl. I want as far away from it as I can get. However, I am unwilling to cede the ground to such views and let them poison something so breathtakingly beautiful. So I stay, that I may be part of defining a Heathenry I can be proud of. I don’t know Ryan Smith well. I don’t know Steven Abell at all. But to suggest that Ryan is too extreme in his condemnation of racism while Stephen McNallen is a friendly fellow to chat with is everything that is wrong with Heathenry. Personally I think an aggressive stance against racism is called for. People are being killed over it. If you don’t like Ryan’s methods then come up with your own. But do something, because racism is poisoning the well. I am glad HUAR exists. I wish it didn’t need to. The fact that some felt a need to create a group to demarcate those of us willing to stand against racism is telling indeed. The day we don’t need a special group, because the word “Heathen” itself already includes within it a bold stance against racism, is the day I will wear the banner of Heathen proudly. I can’t stop people being racists. But I don’t have to tolerate racism, or welcome it at any events I produce, sponsor, or attend. Racism is not welcome in my sphere of influence in any way. Standing up for true Heathen hospitality by welcoming and defending ANY person who is genuinely drawn to Heathen traditions is the kind of Heathenry I want to be a part of, and one I will help to create.” SIERRA DAWN “I would consider myself an intentionally practicing Heathen since 2007, and in that time I have watched the problem of racism grow and spread throughout the community. This is absolutely the biggest issue, and needs to continue to be addressed amongst Heathens. It’s a hard one in this case, because a lot of people aren’t blatant about it. Few have neither the stupidity nor the bravery to stand up and scream, “White power for Odin”, because they understand the back lash they would soon be suffering. So they’re sneaky about it, attempting to hide their belief in cultural superiority by stating that they’re only trying to keep the blood line of their ancestors pure, or that it is the right of those descended from Europeans to practice ‘their’ religion separately from those of other nationalities. By urging people without Northern European descent to ‘drink from their own well’, as McNallen puts it, and shunning or shaming those who do not, these people are promoting the complete segregation of religious practices based on race, and possibly making people who are not of the “proper” descent feel unwelcomed. Those who may be new to Heathenry may get the wrong idea that all Heathens have this view. There is no reason that Heathenry should not be inclusive to all, and there are many of us who feel this way. This is a growing problem that is turning into a continuous battle. Perhaps we, as inclusive Heathens, cannot stamp out every angry individual who is trying to give Heathenry a bad name with their racist beliefs, but what we can do is continue to speak out against those individuals and their collectives. In raising our voices, the people we can reach are the ones new to Heathenry who are seeking community. We have a chance to warn people of the groups that perpetuate racism. The more articles, blogs and pieces of writing we put out, the stronger the chance that people will come across us first and become aware of this divide in heathenry before they stumble into a racist kindred and learn to believe that ‘that’s just how this religion is.’ ” NAN EDWARDS BOYSTER “I am the leader of a kindred. I do not, here, speak for that kindred, but only for myself. It is important to make that clear up front. Heathenry has issues. I love my religion and the many, many positives things about it, but it has a reputation I wish it didn’t: a religion of white supremacy, an intolerant faith, a religion that hates, and a religion that has more in common with fundamentalist Christianity than most Pagan faiths. Right off the bat, we have the issue of racism, which we can’t hide from. Steven McNallan and his ilk make it impossible. Those who make apologies for, and persist in treating such people like valued members of the community don’t help. There is no room in Heathenry for the coddling of racism to any degree, or in any form if we ever want to move beyond that stereotype and ill reputation. Period. Then there is the Loki thing. Some of us treat him like he’s Satan. Except—we aren’t Christians and the Lore is not infallible. We make those who are called by Odin’s blood brother unwelcome. How is this hospitality? Can we not have a rational conversation about this? Odin does magic and Seidr and Spae, as does Freya. Why do some of treat those who also as if they are some kind of pariah? Then there is this relatively new urge in some circles to separate the men from the women in what seems to be some strange kind of misogyny. In a faith that already seems to smack of “bro-ishness,” do we need this? Can we not discuss these issues calmly and come to rational agreements? Or do we simply not care enough to try? Where will we draw the lines? Heathenry has issues, but they CAN be dealt with, if we’re willing to do so honestly. Are you? SOPHIA FATE-CHANGER MARTINEZ “The world of Heathen politics is an overlapping chasm of chaos. As a former member of the AFA, The Troth, current member of an inclusive kindred, and a member of HUAR, I have personally examined and experienced the full spectrum of Heathen ideologies. While I am a member of a kindred, and of HUAR, my statements only represent my own personal beliefs on this subject. My following conclusions are based on my experiences. There are the obvious problems in Heathenry, such as racism, and all other imaginable forms of bigotry that reflect the problems in broader culture beyond the scope of Heathenry. Heathenry is not the original source of oppressive thought process, but is often used as a tool to justify these negative behaviors and thoughts. It is an extreme problem that Stephen McNallen of the Asatru Folk Assembly (AFA) has perpetuated racism in the many ways he tries to disguise it for something else, and an even bigger problem that he targets audiences that are easily influenced through prison ministry, and those who are innocently drawn to Heathenry because of their lineage. Having Northern European lineage is a perfectly fine and respectable reason to pursue this noble path, it is when people start believing that only people of Northern European lineage are worthy of walking down it. It seems logical that if an ancient way of life is to be revived in modern times from scratch, that it wouldn’t turn away potential members for an absurd reason like heritage. Of course, membership numbers aren’t the only concern, or the most important reason to fight against racism. It is simply the most honorable and ethical choice to do so. Moving to recent bustling in the community, Steve Abell makes his position on racism quite clear. He believes Ryan Smith, an original founder of HUAR, is a worse problem to Heathenry than racism is. While no one in HUAR could care less of Abell’s opinion of Smith (including Smith himself), it cannot be ignored that Abell downplays the very real, and critical issue of racism. A group called HUA-Racism wouldn’t need to exist if there wasn’t a problem with racism, and The Troth may have never branched away from the AFA had there never been a problem with racism. It is antithetical to the very purpose of The Troth’s existence for their Steersman to be throwing that reality on the back-burner. The problems that have stemmed from Abell’s recent statement go deeper into the loose policies of The Troth. I know many members, and have witnessed other members that I don’t know, make their grievances about Abell’s opinions known to no avail. These people are good people, with questionable leadership. This makes it evident that Troth members have no power to make or influence necessary changes, and that all of that power sits at the top of The Troth within the Rede and above them, the Steer. This is a problem for a couple of reasons. The first, is that this is an example to members and potential members that this type of behavior is not only tolerable, but acceptable. Second, it provides a gateway for future leaders of The Troth to do and say whatever they please in the name of The Troth. The Troth leadership has recently made it clear that criticism from non-members is just as valuable as the opinions of actual members, which is not at all. Not only is this a good way to turn away potential members, but it is a sign that The Troth does not recognize its roll in representing Heathenry as a whole. This representation is not just a responsibility to its members, it is a responsibility to all Heathens. At present, this is what I believe to be one of the biggest problems in Heathenry, that The Troth is not taking accountability for how its actions and inactions effect the rest of us. It is extremely frustrating that my own opinions have gone ignored as a non-member who would actually like to see The Troth succeed. Despite my complaints, I believe that if The Troth could recognize and correct its faults, they would not only gain membership, but they would stop losing members. Seeing them succeed and go forward in a positive direction (and perhaps strengthening their anti-discrimination statement), would not only be a wonderful move to revive their organization, but would improve the image of Heathenry altogether. Everybody wins that way. As it is currently, The Troth and the AFA are both representing Heathenry, especially here in the U.S., and they are both poor examples that I currently feel ashamed to be associated with.” BEQUI MARIE “As a non-Heathen, and looking at the challenges currently plaguing Heathenry that are rooted in racism and bigotry, the parallels to Christian insecurity, arrogance, pomposity, bigotry and patriarchy are stunning. It’s not just that this “makes Heathenry look bad”. It’s not an image thing in sum. It is that it runs smack into identity for Heathens. Many have accepted Stephen McNallen’s leadership as a principled and educated guide to, and protection of, Heathenry. Until most recently, it seems many Heathens were willing to allow his bigotry to slide based on his leadership, and his ability to network with important individuals and groups outside Heathenry. It’s unclear how many Heathens understand how McNallen has led so many down an odious path, but there are a couple of issues that are noteworthy now that he’s come to a more direct acknowledgement of his biases. His theory of “metagenetics” has provided many with the idea that their heritage, “racial purity”, and “spirituality” are inborn; innate. The manner in which he’s appropriated Jungian theory on the subconscious, other religions and his applications of pseudoscience, have given his believers the structure upon which they base their belief. Few understand the underlying deception or folly here, but I’ll leave the appropriation of Buddhist principles to Buddhists, and the pragmatics behind DNA science for another time. Carl Jung was a product of his environment, and he of all people knew this. In developing his cognitive and developmental theories, Jung never presented these as anything beyond what they were; he was a psychiatrist who understood the need for evidence and development beyond current theoretical frameworks. In breaking from his mentor, he argued accordingly but he never left the framework that provided the basis from which his own beliefs could be challenged. He merely defended them. That process in psychiatry and psychoanalysis, is inherently mocked and deliberately left on the cutting room floor as McNallen appropriates his theory, leading many to understand that there’s a substitute for the term “species” that grounds the fundamentals in understanding collective unconscious. It’s a deliberate act that, taken in any other context, would be forgiven perhaps, or addressed as the fundamental error that it is, on any paper or assessment presented by a neophyte in the field of psychoanalysis. Anyone who learned from Jung–who didn’t simply use one of his ideas in a manner which would conflate at least a healthy body of his work, or his history in the field, can understand the problem here. What we know now about how the brain works would suggest that Jung might reorient some of his thinking-we know now for instance, that “genetic” is a limiting base upon which to formulate all ideas about cognition, and development, or even illness. We know that in certain instances, part of the brain can die or be permanently harmed, and another will compensate, often performing its function. There’s brain science that McNallen misses when he shares his ideas of what Jung intended to convey in his developmental models. Jung knew of his limits and saw these more appropriately as boundaries from which his ideas could be launched; anyone who has read The Red Book might find this reference familiar; anyone familiar with his work on dreaming would find this familiar. McNallen approaches the “spirituality” of blood born “inheritance” from a framework that theoretically, could have justified Jung’s own bigotry and misogyny–but the difference between the two personalities is here: Jung knew the science of his day; he would have, as a trained professional, honored the process and he would likely have developed an entirely new source of ideas had he been born in contemporary time. Jung understood the importance of developmental theory as it’s anchored to science in psychiatry, and he would have learned from what is currently understood in and provided by neuroscience on both brain development and cognition. These of course are boundaries, not limits. Jung was many things, a psychoanalyst and spiritualist, but he was not a Nazi. McNallen relies upon a misinterpretation of Jung’s idea as it is dated and within its cultural context, to sell Heathens on “racial purity” that lies in their heritage in spite of currently understood progress in DNA science and brain function. Jung would have been appalled. And it’s that cultural context that leads me to comment on the other thing I find notable. Jung tried function outside the context of cultural and sometimes even religious patriarchy which he often voiced a personal dependence on, and discomfort with these concepts. McNallen’s effort appears to mirror a patriarchal structure and context that many who have been raised as Christian find very familiar. Most Christians have some passing familiarity with bigotry and how the Christian Bible teaches and justifies it; many argue that the Old Testament and its bigotry is absent from the New, and others argue that this is not entirely so. Many in more recent times are encountering this core as they either embrace or attempt to divest themselves of these teachings-the Mormon church recently abandoned its teachings against blacks, more churches/individuals have either spoken strongly against or in favor of rape, or the LGBT community–all using the varying themes, books and/or verses from Scripture or the Bible to do so. Many more progressive Christians argue the distinct paths taken by those who follow Jesus Christ as savior, and Paul, as even a counter to Jesus. Few argue outside the context of a top-down theology created by their “God” but they argue about whose interpretation is more valid, speaking to the history of various influences and interests in who reports what God said, wanted, or did. McNallen’s theories on “metagenics”, their development and those who argue in favor, follow what is a familiar behavioral pattern, looking at American Heathentry from the outside in. Ideas from varying sources have been taken in whole or parts, and appropriated or reformed- those coming from other belief systems- from psychology and pseudoscience, have been used as adjuncts in order create a “heritage” based on myth making. In effect, this has been his effort to gather “heritage” from the mists of time, and “collective” or “spiritual genetic” history, one virtually destroyed by a monolith of a religion that relied upon its varying storytellers, interpretations and sociopolitical norms to justify genocide, in order to ensure that anything which countered its theology, its theosophy, its means or justifications, was subverted or destroyed. Again, this background speaks to behavior that is openly acknowledged and debated in Christianity, whose base is taken not only from the teachings of Jesus, but from other previously existing sources, as all Pagans are aware, McNallen isn’t proposing and he hasn’t done anything novel in having deliberately created ideology that codifies embracing a “spiritual purity” and its incumbent “racial” construct, presumably superior and unique, lauded and placed in hierarchy above all others. It appears most of Heathenry outside the USA is flummoxed, many corners angered by the insertion of racial and political overtones throughout the presumptively “spiritual”, and that includes those insights shared by Heathens familiar with Jung and how his work is understood outside the confines of how psychiatry is practiced in the USA. American Heathens who practice their beliefs in accordance with what McNallen teaches seem to many outsiders like myself, dependent on and increasingly rooted in a construct that to us, is acutely representative of a patriarchal and monotheistic context which seeks the destruction of diversity in order to preserve and maintain its “purity” via race. I’m left to wonder how for some this doesn’t at least feel a bit familiar; how they could miss the cues. A reliance on something such as “metagenics” over the recovery and understanding of the Havamal is something that, as I discuss my continued support of Heathenry, can’t and will not answer to. Each Heathen will do as I know they must- I wouldn’t dream of intruding. But knowing the difference between answering a call to Heathen gods and their teachings, and clubbing someone over the head with a manufactured “heritage” as any insecure, bigoted monotheist might do with their sacred holy book, there will come a time when American Heathens will be more openly understood to either have recovered what was lost and rebuilt from true heritage and spirituality-from all of its sources and their diversity. Or they’ll simply be understood and perceived to “be” what some are doing now, answering the call of an increasingly isolationist, tribal and rigid “prophet” who preaches the consequence of enmity and suffering to those he fears, as he seeks increasing influence using fear, and circular myths which seem grounded in little else. The growing, loud consensus from American Heathens who are speaking against bigotry speaks for itself. From what I have gathered, although the claim is made that each individual must answer to what they must in Heathenry, McNallen has functioned as an important guide for so long, that he should not be questioned. But, American Heathens are questioning. They are denouncing racism. Heathens are their deeds. And McNallen’s response has been consistent with his bigoted history-one of fear mongering, denunciations and appeals for “solidarity” behind his stance. From the outside looking in, this seems akin to a backward shaming kind of thing for those presumably capable of individuating from old models predicated on monotheism and its continuity. As I learn more about Heathenry and continue to argue for its right to travel on its own terms out of the confines of marginalized “minority” religions in the USA, I’ll support its continued growth and agree to consign the behavior exhibited by McNallen and his followers to what a lot of people I know perceive what Heathenry is not, and that is right-wing Christianity. My task as I see it, is to help people understand that the minority within which insists it’s got a “spiritual source” in “metagenics” is being taken for a collective fool by what is commonly understood as a “false prophet” in Christian terms, and that this is where any similarity really ends, as far as Heathenry might go. There’s no room for, and I’ve never once heard any Heathen argue for boilerplate Christian behavioral influences in Heathenry.” HRAFNCRAIG “I’ll be honest. I don’t get into the nitty-gritty of modern Heathenry. I don’t have the time nor energy to delve into Reddit forums, Facebook comments, email threads, etc. where most of this stuff seems to be taking place. But some things are simply too obvious to ignore, even for someone like me. So here are three big problems, at least according to me. 1: Racism I’m fortunate to belong to a kindred that welcomes people. By this, I mean that we welcome people of all heritages and cultures, genders, sexual orientations, economic classes, spiritual backgrounds and spectrums, and more. As long as someone is a good person, as long as they’re polite and respectful, they can hang out with us. This was very important to me, because the last thing I wanted to do was join a group that was racist or exclusionary. I was a solitary Heathen for many years before I joined a national group. Doing research was tricky because none of the main US groups said they were “racist” but a little digging pointed me in the direction of The Troth. I’m glad that I joined and I remain firm in belief that The Troth will welcome everyone who wishes to join, then continue to make everyone feel like they belong. I believe that racism and folkism have no place in Heathenry. There is simply no precedent in history nor the lore to support such exclusion. Yet there are many Heathens who channel their ignorance and fear through our faith to further their hateful causes. I was drawn to Heathenry in a big way because my family came to the US from Sweden and Norway, and I wanted to learn more about my ancestral culture and ways. However, if I wanted to join a “Scandinavians Only” club I’d join the Sons of Norway or something. I’m grateful that since joining The Troth and my kindred I’ve met Heathens of many different cultures, genders, and identities. What have we had in common? A dedication to the wights, disir, alfar, goddesses, and gods. A desire to explore the lore, to learn the runes, and to grow as people. We’ve held blots and sumbels together, ate, drank, and sung together. We’ve even had disagreements together. In other words, we’ve been HEATHENS together, and nobody will ever be able to tell me otherwise. I am committed to making Heathenry welcoming to anyone who heeds the call of the gods and goddesses. And I will strive to uphold hospitality to all who show themselves worthy, not by the color of their skin, but by their words and deeds. 2: The Internet The Heathen Talk podcast recently had an episode called “Face-to-Face Heathenry.” In it, the hosts discuss the pros and cons of online Heathenry. They also discuss the advantages of real human interactions. It’s so easy to make a rash, hurtful comment from the safety of our screens and monitors. I’ve seen it on email lists, Facebook posts, YouTube comments, and I hear it’s even worse in places like Reddit, et al. It’s so easy to polarize and divide ourselves, and it’s something that we seem to be very good at. While I have no problem with a lively and respectful debate over important issues, let’s not forget that we’re humans. Living, breathing, feeling, humans. If you have a chance to sit down and discuss something, person to person, do it. If you can’t do that, why not try to contact the person directly before dragging something out into public? If both those fail, and that person continues to be a harmful or negative presence in our community, then by all means call them out. But first, let’s try to deal with each other respectfully. 3: Myopia I’ve known several people who have attempted to join in Heathenry only to be scared off by other Heathens telling them “you’re doing it wrong.” There’s an eagerness to criticize and ostracize, even within the universalist community. Someone sidles over from Wicca – they’re doing it wrong! Someone performs the Hammer Rite – they’re doing it wrong! Someone says they want to explore the spiritual side of Heathenry – they’re doing it wrong! Your interoperation of this particular edda or saga is wrong! Then there are debates on whether or not Heathens are Pagans, if we are this, if we are that. It’s like walking into the record store in “High Fidelity.” To the rest of the world, we’re a bunch of weirdos. We really are. We have made a decision to not follow any of the major religions. We are on the fringe. And it certainly doesn’t help that most people think of us as neo-nazi thugs at worst, and Ren Faire nerds at the best. We need to ease up on each other. We really do. We need to be more welcoming and patient with newcomers. We need to be more civil and helpful to one another. If somebody says something that’s dumb or mis-informed pull them aside and discuss it with them. Privately and courteously. In the way that you would like to be spoken to. This isn’t to say we should turn a blind eye to obvious stupidity, nor should we be politely dishonest with one another, but can’t we do it in more thoughtful ways? I believe that this would go a long way in bolstering our numbers and retaining members of our kindreds and organizations. And again, back to problem #2, if people would simply speak with one another, rather than flame people in the comments section, we’ll all be better off for it. All of this said, let me finish on a positive note. I am very grateful to have found Heathenry. Its impact on myself and on my life has been profound. I’ve met some amazing people through it and I know I will continue to meet more and more. While I’ve pointed out some problems here I truly believe that there are solutions to these, and that we will work together to evolve and improve Heathenry. For ourselves and for those who are yet to heed the call.”
<gh_stars>0 package com.yangdq.java.designpattern.abstractfactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * 抽象工厂哦死 * https://java-design-patterns.com/patterns/abstract-factory/ */ public class App { public static Logger logger = LoggerFactory.getLogger(App.class); private Army army; private Castle castle; private King king; public static class FactoryMaker { public enum KingdomType { ELF, ORC } public static KingdomFactory makeFactory(KingdomType type) { switch (type) { case ELF -> { return new ElfKingdomFactory(); } case ORC -> { return new OrcKingdomFactory(); } default -> { throw new IllegalArgumentException("KingdomType not supported"); } } } } public static void main(String[] args) { var app = new App(); logger.info("ELF Kingdom"); app.createKindom(FactoryMaker.makeFactory(FactoryMaker.KingdomType.ELF)); logger.info(app.getArmy().getDescription()); logger.info(app.getKing().getDescription()); logger.info(app.getCastle().getDescription()); logger.info("Orc Kingdom"); app.createKindom(FactoryMaker.makeFactory(FactoryMaker.KingdomType.ORC)); logger.info(app.getArmy().getDescription()); logger.info(app.getKing().getDescription()); logger.info(app.getCastle().getDescription()); } private void createKindom(KingdomFactory kingdomFactory) { setArmy(kingdomFactory.createArmy()); setKing(kingdomFactory.createKing()); setCastle(kingdomFactory.createCastle()); } public Army getArmy() { return army; } public void setArmy(Army army) { this.army = army; } public Castle getCastle() { return castle; } public void setCastle(Castle castle) { this.castle = castle; } public King getKing() { return king; } public void setKing(King king) { this.king = king; } }
def _qc_version(self) -> str: mqc_ids = [ item.id for item in self.collection.data.filter( type="data:multiqc", status="OK", entity__isnull=False, ordering="id", fields=["id", "entity__id"], ) ] if not mqc_ids: raise ValueError( f"Collection {self.collection.name} has no samples with MultiQC data!" ) return str(hash(tuple(mqc_ids)))
Correlated responses on litter size traits and survival traits after two-stage selection for ovulation rate and litter size in rabbits. Farmer profit depends on the number of slaughter rabbits. The improvement of litter size (LS) at birth by two-stage selection for ovulation rate (OR) and LS could modify survival rate from birth to slaughter. This study was aiming to estimate direct and correlated response on LS traits and peri- and postnatal survival traits in the OR_LS rabbit line selected first only for OR (first period) and then for OR and LS using independent culling levels (second period). The studied traits were OR, LS measured as number of total born, number of kits born alive (NBA) and dead (NBD), and number of kits at weaning (NW) and young rabbits at slaughter (NS). Prenatal survival (LS/OR) and survival at birth (NBA/LS), at weaning (NW/NBA) and at slaughter (NS/NW) were also studied. Data were analysed using Bayesian inference methods. Heritability for LS traits were low, 0.07 for NBA, NW and NS. Survival traits had low values of heritability 0.07, 0.03 and 0.03 for NBA/LS, NW/NBA and NS/NW, respectively. After six generations of selection by OR (first period), a small increase in NBD and a slight decrease in NBA/LS were found. However, no correlated responses on NW/NBA and NS/NW were observed. After 11 generations of two-stage selection for OR and LS (second period), correlated responses on NBA, NW and NS were 0.12, 0.12 and 0.11 kits per generation, respectively, whereas no substantial modifications on NBA/LS, NW/NBA and NS/NW were found. In conclusion, two-stage selection improves the number of young rabbits at slaughter without modifying survival from birth to slaughter.
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.rest; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.NonUniqueMetadataException; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.SiteService; import org.dspace.rest.common.MetadataSchema; import org.dspace.rest.exceptions.ContextException; import org.dspace.usage.UsageEvent; import org.dspace.rest.common.MetadataField; /** * Class which provides read methods over the metadata registry. * * @author <NAME>, Georgetown University * * GET /registries/schema - Return the list of schemas in the registry * GET /registries/schema/{schema_prefix} - Returns the specified schema * GET /registries/schema/{schema_prefix}/metadata-fields/{element} - Returns the metadata field within a schema with an unqualified element name * GET /registries/schema/{schema_prefix}/metadata-fields/{element}/{qualifier} - Returns the metadata field within a schema with a qualified element name * POST /registries/schema/ - Add a schema to the schema registry * POST /registries/schema/{schema_prefix}/metadata-fields - Add a metadata field to the specified schema * GET /registries/metadata-fields/{field_id} - Return the specified metadata field * PUT /registries/metadata-fields/{field_id} - Update the specified metadata field * DELETE /registries/metadata-fields/{field_id} - Delete the specified metadata field from the metadata field registry * DELETE /registries/schema/{schema_id} - Delete the specified schema from the schema registry * * Note: intentionally not providing since there is no date to update other than the namespace * PUT /registries/schema/{schema_id} */ @Path("/registries") public class MetadataRegistryResource extends Resource { protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService(); protected SiteService siteService = ContentServiceFactory.getInstance().getSiteService(); private static Logger log = Logger.getLogger(MetadataRegistryResource.class); /** * Return all metadata registry items in DSpace. * * @param expand * String in which is what you want to add to returned instance * of metadata schema. Options are: "all", "fields". Default value "fields". * @return Return array of metadata schemas. * @throws WebApplicationException * It can be caused by creating context or while was problem * with reading schema from database(SQLException). */ @GET @Path("/schema") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataSchema[] getSchemas(@QueryParam("expand") @DefaultValue("fields") String expand, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwarderfor") String xforwarderfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Reading all metadata schemas."); org.dspace.core.Context context = null; ArrayList<MetadataSchema> metadataSchemas = null; try { context = createContext(); List<org.dspace.content.MetadataSchema> schemas = metadataSchemaService.findAll(context); metadataSchemas = new ArrayList<MetadataSchema>(); for(org.dspace.content.MetadataSchema schema: schemas) { metadataSchemas.add(new MetadataSchema(schema, expand, context)); } context.complete(); } catch (SQLException e) { processException("Could not read metadata schemas, SQLException. Message:" + e, context); } catch (ContextException e) { processException("Could not read metadata schemas, ContextException. Message:" + e.getMessage(), context); } finally { processFinally(context); } log.trace("All metadata schemas successfully read."); return metadataSchemas.toArray(new MetadataSchema[0]); } /** * Returns metadata schema with basic properties. If you want more, use expand * parameter or method for metadata fields. * * @param schemaPrefix * Prefix for schema in DSpace. * @param expand * String in which is what you want to add to returned instance * of metadata schema. Options are: "all", "fields". Default value "fields". * @param headers * If you want to access to metadata schema under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return instance of org.dspace.rest.common.MetadataSchema. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading. Also if id/prefix of schema is incorrect * or logged user into context has no permission to read. */ @GET @Path("/schema/{schema_prefix}") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataSchema getSchema(@PathParam("schema_prefix") String schemaPrefix, @QueryParam("expand") @DefaultValue("fields") String expand, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwarderfor") String xforwarderfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Reading metadata schemas."); org.dspace.core.Context context = null; MetadataSchema metadataSchema = null; try { context = createContext(); org.dspace.content.MetadataSchema schema = metadataSchemaService.find(context, schemaPrefix); metadataSchema = new MetadataSchema(schema, expand, context); if (schema == null) { processException(String.format("Schema not found for index %s", schemaPrefix), context); } context.complete(); } catch (SQLException e) { processException("Could not read metadata schema, SQLException. Message:" + e, context); } catch (ContextException e) { processException("Could not read metadata schema, ContextException. Message:" + e.getMessage(), context); } finally { processFinally(context); } log.trace("Metadata schemas successfully read."); return metadataSchema; } /** * Returns metadata field with basic properties. * * @param schemaPrefix * Prefix for schema in DSpace. * @param element * Unqualified element name for field in the metadata registry. * @param expand * String in which is what you want to add to returned instance * of the metadata field. Options are: "all", "parentSchema". Default value "". * @param headers * If you want to access to community under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return instance of org.dspace.rest.common.MetadataField. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading. Also if id of field is incorrect * or logged user into context has no permission to read. */ @GET @Path("/schema/{schema_prefix}/metadata-fields/{element}") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataField getMetadataFieldUnqualified(@PathParam("schema_prefix") String schemaPrefix, @PathParam("element") String element, @QueryParam("expand") String expand, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwarderfor") String xforwarderfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { return getMetadataFieldQualified(schemaPrefix, element, "", expand, user_ip, user_agent, xforwarderfor, headers, request); } /** * Returns metadata field with basic properties. * * @param schemaPrefix * Prefix for schema in DSpace. * @param element * Element name for field in the metadata registry. * @param qualifier * Element name qualifier for field in the metadata registry. * @param expand * String in which is what you want to add to returned instance * of the metadata field. Options are: "all", "parentSchema". Default value "". * @param headers * If you want to access to community under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return instance of org.dspace.rest.common.MetadataField. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading. Also if id of field is incorrect * or logged user into context has no permission to read. */ @GET @Path("/schema/{schema_prefix}/metadata-fields/{element}/{qualifier}") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataField getMetadataFieldQualified(@PathParam("schema_prefix") String schemaPrefix, @PathParam("element") String element, @PathParam("qualifier") @DefaultValue("") String qualifier, @QueryParam("expand") String expand, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwarderfor") String xforwarderfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Reading metadata field."); org.dspace.core.Context context = null; MetadataField metadataField = null; try { context = createContext(); org.dspace.content.MetadataSchema schema = metadataSchemaService.find(context, schemaPrefix); if (schema == null) { log.error(String.format("Schema not found for prefix %s", schemaPrefix)); throw new WebApplicationException(Response.Status.NOT_FOUND); } org.dspace.content.MetadataField field = metadataFieldService.findByElement(context, schema, element, qualifier); if (field == null) { log.error(String.format("Field %s.%s.%s not found", schemaPrefix, element, qualifier)); throw new WebApplicationException(Response.Status.NOT_FOUND); } metadataField = new MetadataField(schema, field, expand, context); context.complete(); } catch (SQLException e) { processException("Could not read metadata field, SQLException. Message:" + e, context); } catch (ContextException e) { processException("Could not read metadata field, ContextException. Message:" + e.getMessage(), context); } finally { processFinally(context); } log.trace("Metadata field successfully read."); return metadataField; } /** * Returns metadata field with basic properties. * * @param fieldId * Id of metadata field in DSpace. * @param expand * String in which is what you want to add to returned instance * of the metadata field. Options are: "all", "parentSchema". Default value "parentSchema". * @param headers * If you want to access to community under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return instance of org.dspace.rest.common.MetadataField. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading. Also if id of field is incorrect * or logged user into context has no permission to read. */ @GET @Path("/metadata-fields/{field_id}") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataField getMetadataField(@PathParam("field_id") Integer fieldId, @QueryParam("expand") @DefaultValue("parentSchema") String expand, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwarderfor") String xforwarderfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Reading metadata field."); org.dspace.core.Context context = null; MetadataField metadataField = null; try { context = createContext(); org.dspace.content.MetadataField field = metadataFieldService.find(context, fieldId); if (field == null) { log.error(String.format("Metadata Field %d not found", fieldId)); throw new WebApplicationException(Response.Status.NOT_FOUND); } org.dspace.content.MetadataSchema schema = field.getMetadataSchema(); if (schema == null) { log.error(String.format("Parent Schema not found for Metadata Field %d not found", fieldId)); throw new WebApplicationException(Response.Status.NOT_FOUND); } metadataField = new MetadataField(schema, field, expand, context); context.complete(); } catch (SQLException e) { processException("Could not read metadata field, SQLException. Message:" + e, context); } catch (ContextException e) { processException("Could not read metadata field, ContextException. Message:" + e.getMessage(), context); } finally { processFinally(context); } log.trace("Metadata field successfully read."); return metadataField; } /** * Create schema in the schema registry. Creating a schema is restricted to admin users. * * @param schema * Schema that will be added to the metadata registry. * @param headers * If you want to access to schema under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return response 200 if was everything all right. Otherwise 400 * when id of community was incorrect or 401 if was problem with * permission to write into collection. * Returns the schema (schemaId), if was all ok. * @throws WebApplicationException * It can be thrown by SQLException, AuthorizeException and * ContextException. */ @POST @Path("/schema") @Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataSchema createSchema(MetadataSchema schema, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Creating a schema."); org.dspace.core.Context context = null; MetadataSchema retSchema = null; try { context = createContext(); if (!authorizeService.isAdmin(context)) { context.abort(); String user = "anonymous"; if (context.getCurrentUser() != null) { user = context.getCurrentUser().getEmail(); } log.error("User(" + user + ") does not have permission to create a metadata schema!"); throw new WebApplicationException(Response.Status.UNAUTHORIZED); } log.debug(String.format("Admin user creating schema with namespace %s and prefix %s", schema.getNamespace(), schema.getPrefix())); org.dspace.content.MetadataSchema dspaceSchema = metadataSchemaService.create(context, schema.getNamespace(), schema.getPrefix()); log.debug("Creating return object."); retSchema = new MetadataSchema(dspaceSchema, "", context); writeStats(siteService.findSite(context), UsageEvent.Action.CREATE, user_ip, user_agent, xforwardedfor, headers, request, context); context.complete(); log.info("Schema created" + retSchema.getPrefix()); } catch (SQLException e) { processException("Could not create new metadata schema, SQLException. Message: " + e, context); } catch (ContextException e) { processException("Could not create new metadata schema, ContextException. Message: " + e.getMessage(), context); } catch (AuthorizeException e) { processException("Could not create new metadata schema, AuthorizeException. Message: " + e.getMessage(), context); } catch (NonUniqueMetadataException e) { processException("Could not create new metadata schema, NonUniqueMetadataException. Message: " + e.getMessage(), context); } catch (Exception e) { processException("Could not create new metadata schema, Exception. Class: " + e.getClass(), context); } finally { processFinally(context); } return retSchema; } /** * Create a new metadata field within a schema. * Creating a metadata field is restricted to admin users. * * @param field * Field that will be added to the metadata registry for a schema. * @param headers * If you want to access to schema under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return response 200 if was everything all right. Otherwise 400 * when id of community was incorrect or 401 if was problem with * permission to write into collection. * Returns the field (with fieldId), if was all ok. * @throws WebApplicationException * It can be thrown by SQLException, AuthorizeException and * ContextException. */ @POST @Path("/schema/{schema_prefix}/metadata-fields") @Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public MetadataField createMetadataField(@PathParam("schema_prefix") String schemaPrefix, MetadataField field, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info(String.format("Creating metadataField within schema %s.", schemaPrefix)); org.dspace.core.Context context = null; MetadataField retField = null; try { context = createContext(); if (!authorizeService.isAdmin(context)) { context.abort(); String user = "anonymous"; if (context.getCurrentUser() != null) { user = context.getCurrentUser().getEmail(); } log.error("User(" + user + ") does not have permission to create a metadata field!"); throw new WebApplicationException(Response.Status.UNAUTHORIZED); } org.dspace.content.MetadataSchema schema = metadataSchemaService.find(context, schemaPrefix); if (schema == null) { log.error(String.format("Schema not found for prefix %s", schemaPrefix)); throw new WebApplicationException(Response.Status.NOT_FOUND); } org.dspace.content.MetadataField dspaceField = metadataFieldService.create(context, schema, field.getElement(), field.getQualifier(), field.getDescription()); writeStats(siteService.findSite(context), UsageEvent.Action.CREATE, user_ip, user_agent, xforwardedfor, headers, request, context); retField = new MetadataField(schema, dspaceField, "", context); context.complete(); log.info("Metadata field created within schema" + retField.getName()); } catch (SQLException e) { processException("Could not create new metadata field, SQLException. Message: " + e, context); } catch (ContextException e) { processException("Could not create new metadata field, ContextException. Message: " + e.getMessage(), context); } catch (AuthorizeException e) { processException("Could not create new metadata field, AuthorizeException. Message: " + e.getMessage(), context); } catch (NonUniqueMetadataException e) { processException("Could not create new metadata field, NonUniqueMetadataException. Message: " + e.getMessage(), context); } catch (Exception e) { processException("Could not create new metadata field, Exception. Message: " + e.getMessage(), context); } finally { processFinally(context); } return retField; } //@PUT //@Path("/schema/{schema_prefix}") //Assumption - there are no meaningful fields to update for a schema /** * Update metadata field. Replace all information about community except the id and the containing schema. * * @param fieldId * Id of the field in the DSpace metdata registry. * @param field * Instance of the metadata field which will replace actual metadata field in * DSpace. * @param headers * If you want to access to metadata field under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Response 200 if was all ok. Otherwise 400 if was id incorrect or * 401 if logged user has no permission to update the metadata field. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading or writing. Or problem with writing to * community caused by authorization. */ @PUT @Path("/metadata-fields/{field_id}") @Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public Response updateMetadataField(@PathParam("field_id") Integer fieldId, MetadataField field, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Updating metadata field(id=" + fieldId + ")."); org.dspace.core.Context context = null; try { context = createContext(); org.dspace.content.MetadataField dspaceField = metadataFieldService.find(context, fieldId); if (field == null) { log.error(String.format("Metadata Field %d not found", fieldId)); throw new WebApplicationException(Response.Status.NOT_FOUND); } writeStats(siteService.findSite(context), UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, request, context); dspaceField.setElement(field.getElement()); dspaceField.setQualifier(field.getQualifier()); dspaceField.setScopeNote(field.getDescription()); metadataFieldService.update(context, dspaceField); context.complete(); } catch (SQLException e) { processException("Could not update metadata field(id=" + fieldId + "), AuthorizeException. Message:" + e, context); } catch (ContextException e) { processException("Could not update metadata field(id=" + fieldId + "), ContextException Message:" + e, context); } catch (AuthorizeException e) { processException("Could not update metadata field(id=" + fieldId + "), AuthorizeException. Message:" + e, context); } catch (NonUniqueMetadataException e) { processException("Could not update metadata field(id=" + fieldId + "), NonUniqueMetadataException. Message:" + e, context); } catch (IOException e) { processException("Could not update metadata field(id=" + fieldId + "), IOException. Message:" + e, context); } finally { processFinally(context); } log.info("Metadata Field(id=" + fieldId + ") has been successfully updated."); return Response.ok().build(); } /** * Delete metadata field from the DSpace metadata registry * * @param fieldId * Id of the metadata field in DSpace. * @param headers * If you want to access to metadata field under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return response code OK(200) if was everything all right. * Otherwise return NOT_FOUND(404) if was id of metadata field is incorrect. * Or (UNAUTHORIZED)401 if was problem with permission to metadata field. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading or deleting. Or problem with deleting * metadata field caused by IOException or authorization. */ @DELETE @Path("/metadata-fields/{field_id}") public Response deleteMetadataField(@PathParam("field_id") Integer fieldId, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Deleting metadata field(id=" + fieldId + ")."); org.dspace.core.Context context = null; try { context = createContext(); org.dspace.content.MetadataField dspaceField = metadataFieldService.find(context, fieldId); if (dspaceField == null) { log.error(String.format("Metadata Field %d not found", fieldId)); throw new WebApplicationException(Response.Status.NOT_FOUND); } writeStats(siteService.findSite(context), UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, headers, request, context); metadataFieldService.delete(context, dspaceField); context.complete(); } catch (SQLException e) { processException("Could not delete metadata field(id=" + fieldId + "), SQLException. Message:" + e, context); } catch (AuthorizeException e) { processException("Could not delete metadata field(id=" + fieldId + "), AuthorizeException. Message:" + e, context); } catch (ContextException e) { processException("Could not delete metadata field(id=" + fieldId + "), ContextException. Message:" + e.getMessage(), context); } finally { processFinally(context); } log.info("Metadata field(id=" + fieldId + ") was successfully deleted."); return Response.status(Response.Status.OK).build(); } /** * Delete metadata schema from the DSpace metadata registry * * @param schemaId * Id of the metadata schema in DSpace. * @param headers * If you want to access to metadata schema under logged user into * context. In headers must be set header "rest-dspace-token" * with passed token from login method. * @return Return response code OK(200) if was everything all right. * Otherwise return NOT_FOUND(404) if was id of metadata schema is incorrect. * Or (UNAUTHORIZED)401 if was problem with permission to metadata schema. * @throws WebApplicationException * It is throw when was problem with creating context or problem * with database reading or deleting. Or problem with deleting * metadata schema caused by IOException or authorization. */ @DELETE @Path("/schema/{schema_id}") public Response deleteSchema(@PathParam("schema_id") Integer schemaId, @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, @Context HttpServletRequest request) throws WebApplicationException { log.info("Deleting metadata schema(id=" + schemaId + ")."); org.dspace.core.Context context = null; try { context = createContext(); org.dspace.content.MetadataSchema dspaceSchema = metadataSchemaService.find(context, schemaId); if (dspaceSchema == null) { log.error(String.format("Metadata Schema %d not found", schemaId)); throw new WebApplicationException(Response.Status.NOT_FOUND); } writeStats(siteService.findSite(context), UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, headers, request, context); metadataSchemaService.delete(context, dspaceSchema); context.complete(); } catch (SQLException e) { processException("Could not delete metadata schema(id=" + schemaId + "), SQLException. Message:" + e, context); } catch (AuthorizeException e) { processException("Could not delete metadata schema(id=" + schemaId + "), AuthorizeException. Message:" + e, context); } catch (ContextException e) { processException("Could not delete metadata schema(id=" + schemaId + "), ContextException. Message:" + e.getMessage(), context); } finally { processFinally(context); } log.info("Metadata schema(id=" + schemaId + ") was successfully deleted."); return Response.status(Response.Status.OK).build(); } }
Emulsion-Based Intradermal Delivery of Melittin in Rats Bee venom (BV) has long been used as a traditional medicine. The aim of the present study was to formulate a BV emulsion with good rheological properties for dermal application and investigate the effect of formulation on the permeation of melittin through dermatomed rat skin. A formulated emulsion containing 1% (w/v) BV was prepared. The emulsion was compared with distilled water (DW) and 25% (w/v) N-methyl-2-pyrrolidone (NMP) in DW. Permeation of melittin from aqueous solution through the dermatomed murine skin was evaluated using the Franz diffusion cells. Samples of receptor cells withdrawn at pre-determined time intervals were measured for melittin amount. After the permeation study, the same skin was used for melittin extraction. In addition, a known amount of melittin (5 μg/mL) was added to stratum corneum, epidermis, and dermis of the rat skin, and the amount of melittin was measured at pre-determined time points. The measurement of melittin from all samples was done with HPLC-MS/MS. No melittin was detected in the receptor phase at all time points in emulsion, DW, or NMP groups. When the amount of melittin was further analyzed in stratum corneum, epidermis, and dermis from the permeation study, melittin was still not detected. In an additional experiment, the amount of melittin added to all skin matrices was corrected against the amount of melittin recovered. While the total amount of melittin was retained in the stratum corneum, less than 10% of melittin remained in epidermis and dermis within 15 and 30 min, respectively. Skin microporation with BV emulsion facilitates the penetration of melittin across the stratum corneum into epidermis and dermis, where emulsified melittin could have been metabolized by locally-occurring enzymes. Introduction Bee venom (BV) has long been used in East Asian countries as a traditional medicine. The venoms of bees are complex mixtures of biologically-active proteins and peptides, such as phospholipases, hyaluronidase, phosphatase, α-glucosidase, serotonin, histamine, dopamine, noradrenaline, and adrenaline. In addition, melittin, apamin, and mast cell degranulating peptide are also found in BV. A recent review article on bee venoms for their potential therapeutic and biotechnological applications in biomedicine focuses on two major peptides-namely, melittin and apamin . Both melittin and apamin have a broad spectrum of therapeutic applications. Topical application of cosmetics containing purified BV has been reported to be effective in the treatment of humans with acne vulgaris . Another study conducted by An et al. reported that BV has a potential anti-bacterial effect against inflammatory skin disease. In this context, Propionibacterium acnes was intradermally injected into the ears of mice. Following the injection, BV mixed with Vaseline was applied to the skin surface of the ear. Histological observation revealed that the P. acnes injection induced a considerable increase in the number of infiltrated inflammatory cells and inflammatory cytokines. By contrast, the BV-treated ears showed noticeably reduced ear thickness. Moreover, heat-killed P. acnes increased the secretion of cytokines in human keratinocytes and monocytes, which was reversed by BV treatment . Further studies have demonstrated that BV application induced a significant anti-inflammatory response via inhibition of inflammatory mediators, similar to what is achieved by treatment with non-steroidal anti-inflammatory drugs. Han et al. have shown that BV treatment has anti-inflammatory effects in the skin and a rapid cicatrizing effect on wounds in rats. Intraperitoneal administration of BV inhibited the degranulation of mast cells and the production of pro-inflammatory cytokines in compound 48/80-treated mouse skin tissues . BV also inhibited the compound 48/80-induced activation of nuclear factor kappa B (NF-κB), which regulates pro-inflammatory cytokine expression. Lee et al. demonstrated that BV and melittin mediated the anti-inflammatory effect via NF-κB signaling, confirming that activation of the p38 pathway was important in the activation of cytokines during inflammatory reactions. Many topical BV-based skincare cosmetic products are available in the market. For example, BV serum treatment clinically improved facial wrinkles by decreasing total wrinkle area, total wrinkle count, and average wrinkle depth . Furthermore, BV serum was found to be effective in the treatment of mild-to-moderate acne vulgaris . However, topical formulations or products of melittin-the most studied and prevalent substance in BV-are presently not available in the market. Moreover, low permeability of melittin across the stratum corneum due to its high molecular weight as well as strong binding to the lipid bilayers in the skin makes its use as a key ingredient in skin care products difficult. An ideal cosmetic skincare product must pass through the stratum corneum and reach the viable epidermis and dermis. Among various strategies to improve the penetration of melittin across the stratum corneum, an emulsification method was used in the current study. The overall objective of this study was to formulate a BV-based emulsion and access delivery of melittin through dermatomed murine skin using Franz diffusion cells. Skin was then treated with tape stripping to create skin matrices to extract melittin. Finally, another dermatomed rat skin was separated into stratum corneum, epidermis, and dermis before the melittin was applied to them, and retention of melittin in skin matrices was measured. In Vitro Skin Permeation Study Permeation of melittin from aqueous solution through dermatomed murine skin was evaluated. No melittin was detected in the receptor phase at all time points, even after 36 h in all test groups (Table 1). Extraction of Melittin from Skin The amount of melittin was further analyzed in stratum corneum (tapes 0-3) and underlying layers (viable epidermis and dermis) using the tape stripping technique. Melittin was still not detected in all skin matrices of all test groups, while melittin remaining in tapes 0-1 of all test groups was almost negligible ( Table 2). This finding, as well as data from skin permeation study, indicated the ability of melittin in the form of emulsion to permeate across the skin-probably due to the chemical interaction of emulsified melittin with the lipid bilayers of the skin. It may also be possible that any small amount of emulsified melittin when permeated could have been metabolized by the degradative action of locally occurring enzymes in the skin or have been below the limit of detection of the HPLC-MS/MS method. An additional melittin recovery study was conducted to confirm this implication. Extraction Recovery Study Further experiment was carried out using stratum corneum, epidermis, and dermis treated by melittin, because extraction of melittin from dermatomed murine skin following the in vitro skin permeability study was not found to be practically feasible. As shown in Figure 1, the amount of melittin added to all skin matrices was corrected against the amount of melittin recovered. The extraction efficiency of less than 10% for melittin from viable epidermis and dermis was observed in 15 and 30 min, respectively. However, the total amount of melittin retained in the stratum corneum was found to be 100%, confirming the presence of no enzymes for melittin metabolism in the stratum corneum. Although the trend that the amount of melittin gradually reduced from the outer layer to the deeper layers was observed, the metabolizing capacity of melittin was more efficient in epidermis than dermis. In fact, most biotransformation reactions in the rat skin occur predominantly in the epidermis due to both phase I and II reactions . Discussion The main purpose of the present study was to evaluate the emulsification effect of BV for dermal application by measuring the amount of melittin remaining from permeation process in the different skin layers. This is the first experimental study to determine the effects of BV-containing emulsion on skin permeation. In this research, we found that emulsified melittin when permeated could have been metabolized by the degradative action of locally-occurring enzymes, at least in the rat skin. The mechanism by which BV exerts its therapeutic action on skin disorders has mostly been related to anti-inflammatory activity. When skin bacteria were incubated with BV, the bacteriostatic action of BV was demonstrated which was further supported by its anti-inflammatory activity against skin bacteria through suppression of the secretion of pro-inflammatory cytokines . This in vitro action of antimicrobial property of BV study was further translated into an in vivo study of human subjects with acne vulgaris or wrinkles . Repeated exposure to UV irradiation induces an elevated secretion of matrix-degrading enzymes called matrix metalloproteinases (MMPs). With human dermal fibroblasts, BV markedly reduced UV-induced MMP protein levels compared with those of UV-irradiated control . This was attributed to the presence of anti-melanogenic property of BV by inhibiting tyrosinase-related proteins . When wounded mice were treated topically with BV, increased collagen protein synthesis was demonstrated which might be related to increased proliferation and migration of human epidermal keratinocytes . BV as a cosmetic ingredient may be useful as a topical agent for promoting skin regeneration or treatment of certain epidermal conditions. The topical application of BV can be well tolerated in the human skin because it exhibited no dermal irritation potential in animal studies . Despite the widespread use of BV, topical formulations or products of melittin are not available in the market due to its low permeability across the skin layers. This study showed an enhancement in the delivery of melittin from aqueous solution to deeper skin layers of epidermis and dermis. Furthermore, melittin in the form of emulsion was not detected in the receptor phase in the in vitro permeation study using dermatomed rat skin. In conclusion, bee venom emulsion formulated using 1% BV and triethanolamine as a base solubilizing oils and other ingredients was found to be rheologically stable and acceptable for topical application. The permeability of melittin from the solution to the deeper skin layers for an ultimate metabolism was enhanced with the use of emulsification. As Discussion The main purpose of the present study was to evaluate the emulsification effect of BV for dermal application by measuring the amount of melittin remaining from permeation process in the different skin layers. This is the first experimental study to determine the effects of BV-containing emulsion on skin permeation. In this research, we found that emulsified melittin when permeated could have been metabolized by the degradative action of locally-occurring enzymes, at least in the rat skin. The mechanism by which BV exerts its therapeutic action on skin disorders has mostly been related to anti-inflammatory activity. When skin bacteria were incubated with BV, the bacteriostatic action of BV was demonstrated which was further supported by its anti-inflammatory activity against skin bacteria through suppression of the secretion of pro-inflammatory cytokines . This in vitro action of antimicrobial property of BV study was further translated into an in vivo study of human subjects with acne vulgaris or wrinkles . Repeated exposure to UV irradiation induces an elevated secretion of matrix-degrading enzymes called matrix metalloproteinases (MMPs). With human dermal fibroblasts, BV markedly reduced UV-induced MMP protein levels compared with those of UV-irradiated control . This was attributed to the presence of anti-melanogenic property of BV by inhibiting tyrosinase-related proteins . When wounded mice were treated topically with BV, increased collagen protein synthesis was demonstrated which might be related to increased proliferation and migration of human epidermal keratinocytes . BV as a cosmetic ingredient may be useful as a topical agent for promoting skin regeneration or treatment of certain epidermal conditions. The topical application of BV can be well tolerated in the human skin because it exhibited no dermal irritation potential in animal studies . Despite the widespread use of BV, topical formulations or products of melittin are not available in the market due to its low permeability across the skin layers. This study showed an enhancement in the delivery of melittin from aqueous solution to deeper skin layers of epidermis and dermis. Furthermore, melittin in the form of emulsion was not detected in the receptor phase in the in vitro permeation study using dermatomed rat skin. In conclusion, bee venom emulsion formulated using 1% BV and triethanolamine as a base solubilizing oils and other ingredients was found to be rheologically stable and acceptable for topical application. The permeability of melittin from the solution to the deeper skin layers for an ultimate metabolism was enhanced with the use of emulsification. As epidermis and dermis are the ideal sites for the biological effects of melittin, enhancement in its intradermal delivery mediated by emulsification seems to have a bright future for its use as an anti-photoaging agent in cosmetic products. Purified Honeybee Venom Collection Bee venom added to cosmetic emulsion was from the experimental colonies of natural honey bees (Apis mellifera L.) that were maintained at the National Institute of Agricultural Science, Korea. Bee venom was collected with a bee venom collector (Chungjin Biotech, Ansan, Korea) in a sterile manner under strict laboratory conditions. In brief, the bee venom collector was placed on the hive, and the bees were given enough electric shock to cause them to sting a glass plate from which dried bee venom was later scraped off. The collected venom was diluted in cold sterile water and then centrifuged at 10,000× g for 5 min at 4 • C to discard residues from the supernatant. Purified bee venom was lyophilized by freeze dryer and refrigerated at 4 • C for later use. Preparation of Skin Samples The animal protocols for this study were approved by the boards of the Catholic University of Daegu and Daegu Catholic University Medical Center (Daegu, Korea). Male Sprague-Dawley rats (230 ± 20 g, Samtako Bio Korea Co., Ltd. Osan, Korea) were used to obtain the skin sections. The animals were euthanized by high concentration of diethyl ether and the dorsal hair of rats was shaved with electric clipper (Model 808, Daito Electric Co., Osaka, Japan). Full-thickness skin (5 × 5 cm 2 ) was prepared by excising the dorsum of rats, cleaned thoroughly using distilled water and stored at −20 • C until further use. It was then dermatomed using a Dermatome to obtain skin pieces, approximately 0.6 mm thick. The dermatomed skin was then cut into approximate sizes (5 × 5 cm 2 ) for mounting on the vertical Franz diffusion cells (Bioneer, Hørsholm, Denmark). In Vitro Skin Permeation Study Permeation experiment of melittin through dermatomed murine skin was performed with a system employing Franz diffusion cells. The temperature in the receptor phase was maintained at 32 ± 0.5 • C with an external constant temperature circulating water bath. Skin was mounted on a receptor phase (12 mL) with the dermis facing the receptor and stratum corneum towards the donor phase with effective permeation area of 1.766 cm 2 . The receptor and donor phases were filled with phosphate-buffered saline (PBS) solution, and the receptor fluid was continuously stirred with a magnetic bar at 600 rpm to maintain homogeneity. After 1 h equilibration, the solution in the receptor phase was replaced with fresh PBS, and emulsion was applied on the skin in the donor phase. The donor phase was then covered with a parafilm to avoid any evaporation process. The samples of the receptor cell (0.2 mL) were withdrawn at pre-determined time points (1,2,3,4,5,6,8,10,12,24, and 36 h after DW, 25% NMP, and emulsion application) and replaced with equal volume of PBS buffer to keep a constant volume. Samples were analyzed using HPLC-MS/MS (Waters, Minneapolis, MA, USA). Extraction of Melittin from Skin After the permeation study, donor emulsion was removed first, and the skin was then cleaned properly with PBS buffer. To measure the amount of melittin retained in the skin layers, stratum corneum was separated from the underlying epidermis and dermis using tape stripping. Adhesive tapes were applied onto the permeation area of the skin, one by one, pressed manually with a finger, removed quickly with forceps, and collected in six-well plates. Tapes 0-3 were collected separately to measure the amount of melittin remaining in stratum corneum. The remaining viable epidermis and dermis were cut into small pieces using surgical scissors. Skin pieces were weighed, and 25 mg of skin was placed in separate vials. The vials were centrifuged at 20,000× g for 20 min at room temperature so as to ensure that all the skin pieces were at the bottom of the vial and then kept overnight in an incubator at 37 • C. Ethanol (500 µL) was then added to each vial and vortexed to remove any emulsion present on the skin surface. The skin pieces were then removed and placed individually from each vial in a six-well plate. Methanol (2 mL) was added to each well. The plates were kept on the shaker at 150 rpm for 4 h. Samples were then filtered through 0.22 µm syringe filters and analyzed by HPLC-MS/MS . Extraction Recovery Study The protocol as described in the skin sample preparation was followed. Dermatomed murine skin was tape-stripped to separate the stratum corneum from epidermis and dermis. Stratum corneum, epidermis, and dermis were cut into small pieces, and placed in six-well plate. Melittin standard solution (5 µg/mL) was added into respective vials containing the weighed amount of skin. The vials were kept at room temperature to ensure that all the skin pieces were in complete contact with melittin to maximize its absorption into skin layers. The amount of 25 µL of samples were withdrawn at pre-determined time intervals (0, 5, 10, 15, 20, 25, and 30 min; for epidermis, it was up to 15 min), and acetonitrile (100 µL) was added to stop the enzymatic reaction and precipitate the protein in the skin. Each vial was vortexed and centrifuged at 4000× g for 10 min. The supernatant was used to analyze the amount of melittin using HPLC-MS/MS. Quantitative Analysis HPLC-MS/MS was used for the quantitative estimation of melittin. A Waters 2690 HPLC system (Waters, Minneapolis, MA, USA) coupled with a binary pump, an autosampler, and triple quardruple mass spectrometer equipped with a turbo electrospray ionization source was used. The separation was carried out using Halo C18 column (2.1 × 50 mm, 2.7 µm) from Advanced materials technology (Wilmington, DE, USA). The chromatographic conditions included a flow rate of 0.3 mL/min, injection volume of 5 µL, column temperature of 30 • C, solvent A of MeCN containing 0.1% formic acid, solvent B of H 2 O containing 0.1% formic acid, and a gradient elution of 95% for 1 min, 95-30% B in 3.5 min, 30-95% B in 10 min. Mass spectrometric analysis was conducted in positive ion mode under the following settings: gas temperature 320 • C, curtain gas 4 psi, collision gas 25 psi, DP 31 V, FP 370 V, EP 3.5 V, CE 61 V and CXP 2 V. Quantification of melittin was estimated by scanning the following multiple-reaction monitoring transition: melittin (m/z 570.1→86.2, dwell time 300 ms). Conclusions Skin microporaton with BV emulsion facilitates the skin permeability of melittin which may be due to enzyme activity.
// AppendToLog appends text to a log file. func AppendToLog(filename, text string) { f, err := Fopen(filename, "a") if err != nil { fmt.Printf("Failed to open to log file:%s, error:%s\n", filename, err) return } defer f.Close() if _, err = f.WriteString(text); err != nil { fmt.Printf("Failed to write to log file:%s error:%s\n", filename, err) return } }
// // Connection is marked deleted, add it to the ConnectionSet. // void XmppServer::InsertDeletedConnection(XmppServerConnection *connection) { CHECK_CONCURRENCY("bgp::Config"); assert(connection->IsDeleted()); ConnectionSet::iterator it; bool result; tie(it, result) = deleted_connection_set_.insert(connection); assert(result); }
Design, Progress and Challenges of a Double-Blind Trial of Warfarin versus Aspirin for Symptomatic Intracranial Arterial Stenosis Background and Relevance: Atherosclerotic stenosis of the major intracranial arteries is an important cause of transient ischemic attack (TIA) or stroke. Of the 900,000 patients who suffer a TIA or stroke each year in the USA, intracranial stenosis is responsible for approximately 10%, i.e. 90,000 patients. There have been no prospective trials evaluating antithrombotic therapies for preventing recurrent vascular events in these patients. The main objective of this trial is to compare warfarin with aspirin (1,300 mg/day) for preventing stroke (ischemic and hemorrhagic) and vascular death in patients presenting with TIA or stroke caused by stenosis of a major intracranial artery. Study Design: Prospective, randomized, double-blind, multicenter trial. The sample sizerequired will be 403 patients per group, based on stroke and vascular death rates of 33% per 3 years in the aspirin group vs. 22% per 3 years in the warfarin group, a p value of 0.05, power of 80%, a 24% rate of ‘withdrawal of therapy’, and a 1% rate of ‘lost to follow-up’. Conduct of Trial: Patients with TIA or nondisabling stroke caused by ≧50% stenosis of a major intracranial artery documented by catheter angiography are randomized to warfarin or aspirin. Patients are contacted monthly by phone and examined every 4 months until a common termination date. Mean follow-up in the study is expected to be 3 years. Conclusion: This study will determine whether warfarin or aspirin is superior for patients with symptomatic intracranial arterial stenosis. Furthermore, it will identify patients whose rate of ischemic stroke in the territory of the stenotic intracranial artery on best medical therapy is sufficiently high to justify a subsequent trial comparing intracranial angioplasty/stenting with best medical therapy in this subset of patients.
/** * Binary editor mode options panel. * * @version 0.2.0 2018/12/20 * @author ExBin Project (https://exbin.org) */ @ParametersAreNonnullByDefault public class ModePanelEx extends javax.swing.JPanel { private ExtCodeArea codeArea; public ModePanelEx() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { viewModeScrollModeLabel = new javax.swing.JLabel(); viewModeComboBox = new javax.swing.JComboBox<>(); showNonprintableCharactersCheckBox = new javax.swing.JCheckBox(); codeTypeLabel = new javax.swing.JLabel(); codeTypeComboBox = new javax.swing.JComboBox<>(); positionCodeTypeLabel = new javax.swing.JLabel(); positionCodeTypeComboBox = new javax.swing.JComboBox<>(); hexCharactersModeLabel = new javax.swing.JLabel(); hexCharactersModeComboBox = new javax.swing.JComboBox<>(); antialiasingLabel = new javax.swing.JLabel(); antialiasingComboBox = new javax.swing.JComboBox<>(); editModeLabel = new javax.swing.JLabel(); editModeComboBox = new javax.swing.JComboBox<>(); fontPanel = new javax.swing.JPanel(); fontFamilyLabel = new javax.swing.JLabel(); fontFamilyComboBox = new javax.swing.JComboBox<>(); fontSizeLabel = new javax.swing.JLabel(); fontSizeComboBox = new javax.swing.JComboBox<>(); charsetLabel = new javax.swing.JLabel(); charsetComboBox = new javax.swing.JComboBox<>(); borderTypeLabel = new javax.swing.JLabel(); borderTypeComboBox = new javax.swing.JComboBox<>(); viewModeScrollModeLabel.setText("View Mode"); viewModeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "DUAL", "HEXADECIMAL", "PREVIEW" })); viewModeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { viewModeComboBoxActionPerformed(evt); } }); showNonprintableCharactersCheckBox.setText("Show Nonprintable Characters"); showNonprintableCharactersCheckBox.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { showNonprintableCharactersCheckBoxItemStateChanged(evt); } }); codeTypeLabel.setText("Code Type"); codeTypeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "BINARY", "OCTAL", "DECIMAL", "HEXADECIMAL" })); codeTypeComboBox.setSelectedIndex(3); codeTypeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { codeTypeComboBoxActionPerformed(evt); } }); positionCodeTypeLabel.setText("Position Code Type"); positionCodeTypeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "OCTAL", "DECIMAL", "HEXADECIMAL" })); positionCodeTypeComboBox.setSelectedIndex(2); positionCodeTypeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { positionCodeTypeComboBoxActionPerformed(evt); } }); hexCharactersModeLabel.setText("Hex Characters Mode"); hexCharactersModeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "LOWER", "UPPER" })); hexCharactersModeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { hexCharactersModeComboBoxActionPerformed(evt); } }); antialiasingLabel.setText("Character Antialiasing"); antialiasingComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "OFF", "AUTO", "DEFAULT", "BASIC", "GASP", "LCD_HRGB", "LCD_HBGR", "LCD_VRGB", "LCD_VBGR" })); antialiasingComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { antialiasingComboBoxActionPerformed(evt); } }); editModeLabel.setText("Edit Mode"); editModeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "READ_ONLY", "EXPANDING", "CAPPED", "INPLACE" })); editModeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { editModeComboBoxActionPerformed(evt); } }); fontPanel.setBorder(javax.swing.BorderFactory.createTitledBorder("Font")); fontFamilyLabel.setText("Font Family"); fontFamilyComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "DIALOG", "MONOSPACE", "SERIF" })); fontFamilyComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { fontFamilyComboBoxActionPerformed(evt); } }); fontSizeLabel.setText("Font Size"); fontSizeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "8", "9", "10", "12", "14", "18", "22" })); fontSizeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { fontSizeComboBoxActionPerformed(evt); } }); javax.swing.GroupLayout fontPanelLayout = new javax.swing.GroupLayout(fontPanel); fontPanel.setLayout(fontPanelLayout); fontPanelLayout.setHorizontalGroup( fontPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(fontPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(fontPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(fontFamilyComboBox, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(fontSizeComboBox, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(fontPanelLayout.createSequentialGroup() .addGroup(fontPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(fontFamilyLabel) .addComponent(fontSizeLabel)) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); fontPanelLayout.setVerticalGroup( fontPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(fontPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(fontFamilyLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(fontFamilyComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(fontSizeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(fontSizeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); charsetLabel.setText("Charset"); charsetComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "UTF-8", "UTF-16", "UTF-16BE", "US-ASCII", "IBM852", "ISO-8859-1" })); charsetComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { charsetComboBoxActionPerformed(evt); } }); borderTypeLabel.setText("Border Type"); borderTypeComboBox.setModel(new javax.swing.DefaultComboBoxModel<>(new String[] { "NONE", "EMPTY BORDER", "MARGIN BORDER", "BEVEL BORDER - RAISED", "BEVEL BORDER - LOWERED", "ETCHED BORDER - RAISED", "ETCHED BORDER - LOWERED", "LINE BORDER" })); borderTypeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { borderTypeComboBoxActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(borderTypeComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(charsetComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(fontPanel, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(editModeComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(antialiasingComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(hexCharactersModeComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(viewModeComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(showNonprintableCharactersCheckBox, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(positionCodeTypeComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(viewModeScrollModeLabel, javax.swing.GroupLayout.Alignment.LEADING) .addComponent(codeTypeLabel, javax.swing.GroupLayout.Alignment.LEADING)) .addGap(0, 0, Short.MAX_VALUE)) .addComponent(codeTypeComboBox, javax.swing.GroupLayout.Alignment.LEADING, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(hexCharactersModeLabel) .addComponent(antialiasingLabel) .addComponent(editModeLabel) .addComponent(charsetLabel) .addComponent(borderTypeLabel) .addComponent(positionCodeTypeLabel)) .addGap(0, 0, Short.MAX_VALUE)))) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(viewModeScrollModeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(viewModeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(showNonprintableCharactersCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(codeTypeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(codeTypeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(positionCodeTypeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(positionCodeTypeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(hexCharactersModeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(hexCharactersModeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(antialiasingLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(antialiasingComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(editModeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(editModeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(fontPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(charsetLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(charsetComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(borderTypeLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(borderTypeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents private void showNonprintableCharactersCheckBoxItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_showNonprintableCharactersCheckBoxItemStateChanged codeArea.setShowUnprintables(showNonprintableCharactersCheckBox.isSelected()); }//GEN-LAST:event_showNonprintableCharactersCheckBoxItemStateChanged private void codeTypeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_codeTypeComboBoxActionPerformed codeArea.setCodeType(CodeType.values()[codeTypeComboBox.getSelectedIndex()]); }//GEN-LAST:event_codeTypeComboBoxActionPerformed private void editModeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_editModeComboBoxActionPerformed ((EditModeCapable) codeArea).setEditMode(EditMode.values()[editModeComboBox.getSelectedIndex()]); }//GEN-LAST:event_editModeComboBoxActionPerformed private void fontFamilyComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_fontFamilyComboBoxActionPerformed Font codeFont = codeArea.getCodeFont(); int size = codeFont.getSize(); switch (fontFamilyComboBox.getSelectedIndex()) { case 0: { codeArea.setCodeFont(new Font(Font.DIALOG, Font.PLAIN, size)); break; } case 1: { codeArea.setCodeFont(new Font(Font.MONOSPACED, Font.PLAIN, size)); break; } case 2: { codeArea.setCodeFont(new Font(Font.SERIF, Font.PLAIN, size)); break; } } }//GEN-LAST:event_fontFamilyComboBoxActionPerformed private void fontSizeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_fontSizeComboBoxActionPerformed Font font = codeArea.getCodeFont(); Font derivedFont = font.deriveFont(Font.PLAIN, Integer.valueOf((String) fontSizeComboBox.getSelectedItem())); codeArea.setCodeFont(derivedFont); }//GEN-LAST:event_fontSizeComboBoxActionPerformed private void viewModeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewModeComboBoxActionPerformed ((ViewModeCapable) codeArea).setViewMode(CodeAreaViewMode.values()[viewModeComboBox.getSelectedIndex()]); }//GEN-LAST:event_viewModeComboBoxActionPerformed private void antialiasingComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_antialiasingComboBoxActionPerformed ((AntialiasingCapable) codeArea).setAntialiasingMode(AntialiasingMode.values()[antialiasingComboBox.getSelectedIndex()]); }//GEN-LAST:event_antialiasingComboBoxActionPerformed private void charsetComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_charsetComboBoxActionPerformed ((CharsetCapable) codeArea).setCharset(Charset.forName((String) charsetComboBox.getSelectedItem())); }//GEN-LAST:event_charsetComboBoxActionPerformed private void borderTypeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_borderTypeComboBoxActionPerformed codeArea.setBorder(getBorderByType(borderTypeComboBox.getSelectedIndex())); }//GEN-LAST:event_borderTypeComboBoxActionPerformed private void hexCharactersModeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_hexCharactersModeComboBoxActionPerformed ((CodeCharactersCaseCapable) codeArea).setCodeCharactersCase(CodeCharactersCase.values()[hexCharactersModeComboBox.getSelectedIndex()]); }//GEN-LAST:event_hexCharactersModeComboBoxActionPerformed private void positionCodeTypeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_positionCodeTypeComboBoxActionPerformed codeArea.setPositionCodeType(PositionCodeType.values()[positionCodeTypeComboBox.getSelectedIndex()]); }//GEN-LAST:event_positionCodeTypeComboBoxActionPerformed public void setCodeArea(ExtCodeArea codeArea) { this.codeArea = codeArea; viewModeComboBox.setSelectedIndex(codeArea.getViewMode().ordinal()); showNonprintableCharactersCheckBox.setSelected(codeArea.isShowUnprintables()); codeTypeComboBox.setSelectedIndex(codeArea.getCodeType().ordinal()); positionCodeTypeComboBox.setSelectedIndex(codeArea.getPositionCodeType().ordinal()); hexCharactersModeComboBox.setSelectedIndex(((CodeCharactersCaseCapable) codeArea).getCodeCharactersCase().ordinal()); antialiasingComboBox.setSelectedIndex(((AntialiasingCapable) codeArea).getAntialiasingMode().ordinal()); editModeComboBox.setSelectedIndex(codeArea.getEditMode().ordinal()); } /** * Test method for this panel. * * @param args the command line arguments */ public static void main(String[] args) { final JFrame frame = new JFrame("Panel"); frame.setSize(1000, 600); frame.add(new ModePanelEx()); frame.setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); frame.setVisible(true); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JComboBox<String> antialiasingComboBox; private javax.swing.JLabel antialiasingLabel; private javax.swing.JComboBox<String> borderTypeComboBox; private javax.swing.JLabel borderTypeLabel; private javax.swing.JComboBox<String> charsetComboBox; private javax.swing.JLabel charsetLabel; private javax.swing.JComboBox<String> codeTypeComboBox; private javax.swing.JLabel codeTypeLabel; private javax.swing.JComboBox<String> editModeComboBox; private javax.swing.JLabel editModeLabel; private javax.swing.JComboBox<String> fontFamilyComboBox; private javax.swing.JLabel fontFamilyLabel; private javax.swing.JPanel fontPanel; private javax.swing.JComboBox<String> fontSizeComboBox; private javax.swing.JLabel fontSizeLabel; private javax.swing.JComboBox<String> hexCharactersModeComboBox; private javax.swing.JLabel hexCharactersModeLabel; private javax.swing.JComboBox<String> positionCodeTypeComboBox; private javax.swing.JLabel positionCodeTypeLabel; private javax.swing.JCheckBox showNonprintableCharactersCheckBox; private javax.swing.JComboBox<String> viewModeComboBox; private javax.swing.JLabel viewModeScrollModeLabel; // End of variables declaration//GEN-END:variables @Nullable private Border getBorderByType(int borderTypeIndex) { switch (borderTypeIndex) { case 0: { return null; } case 1: { return new EmptyBorder(5, 5, 5, 5); } case 2: { return new BasicBorders.MarginBorder(); } case 3: { return new BevelBorder(BevelBorder.RAISED); } case 4: { return new BevelBorder(BevelBorder.LOWERED); } case 5: { return new EtchedBorder(EtchedBorder.RAISED); } case 6: { return new EtchedBorder(EtchedBorder.LOWERED); } case 7: { return new LineBorder(Color.BLACK); } } return null; } }
<filename>collector/cluster_nodes_response.go package collector type clusterNodesResponse struct { ClusterName string `json:"cluster_name"` Nodes map[string]ClusterNodesResponseNode } type ClusterNodesResponseNode struct { Name string `json:"name"` EphemeralID string `json:"ephemeral_id"` TransportAddress string `json:"transport_address"` Attributes ClusterNodesResponseAttributes `json:"attributes"` } type ClusterNodesResponseAttributes struct { MlMaxOpenJobs string `json:"ml.max_open_jobs"` RackID string `json:"rack_id"` MlEnabled string `json:"ml.enabled"` }
// This gets called BEFORE the service has been bound. @Override public IBinder onBind(final Intent intent) { initMediaSession(); return new Binder(); }
package com.dpmn.meshnet; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.TextView; import java.util.Random; import com.dpmn.meshnet.model.Node; public class MainActivity extends AppCompatActivity { private TextView peersTextView; private TextView framesTextView; Node node; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); peersTextView = (TextView) findViewById(R.id.peersTextView); framesTextView = (TextView) findViewById(R.id.framesTextView); node = new Node(this); } @Override protected void onStart() { super.onStart(); node.start(); } @Override protected void onStop() { super.onStop(); if(node != null) node.stop(); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } private static boolean started = false; public void sendFrames(View view) { /*if(!started) { started = true; node = new Node(this); node.start(); return; }*/ node.broadcastFrame(new byte[1]); for(int i = 0; i < 2000; ++i) { byte[] frameData = new byte[1024]; new Random().nextBytes(frameData); node.broadcastFrame(frameData); } /*for(int i = 0; i < 100; ++i) { byte[] frameData = new byte[100 * 1024]; new Random().nextBytes(frameData); node.broadcastFrame(frameData); }*/ } public void refreshPeers() { peersTextView.setText(node.getLinks().size() + " connected"); } public void refreshFrames() { framesTextView.setText(node.getFramesCount() + " frames"); } } // MainActivity
<filename>lib/property/changeSymbol.spec.ts import { createChangeSymbolCaller } from './changeSymbol' import { stubbedWeb3, stubbedSendTx } from '../utils/for-test' describe('changeSymbol.spec.ts', () => { describe('createChangeSymbolCaller', () => { it('call success', async () => { const expected = true const nextSymbol = 'next' const contract = { methods: { // eslint-disable-next-line @typescript-eslint/no-unused-vars changeSymbol: (nextSymbol: string) => ({ send: jest.fn().mockImplementation(async () => stubbedSendTx()), }), }, } // eslint-disable-next-line @typescript-eslint/no-explicit-any const caller = createChangeSymbolCaller(contract as any, stubbedWeb3) const result = await caller(nextSymbol) expect(result).toEqual(expected) }) it('call failure', async () => { const nextSymbol = 'next' const contract = { methods: { // eslint-disable-next-line @typescript-eslint/no-unused-vars changeSymbol: (nextSymbol: string) => ({ send: jest .fn() .mockImplementation(async () => stubbedSendTx(undefined, true)), }), }, } // eslint-disable-next-line @typescript-eslint/no-explicit-any const caller = createChangeSymbolCaller(contract as any, stubbedWeb3) const result = await caller(nextSymbol).catch((err) => err) expect(result).toBeInstanceOf(Error) }) }) })
Oral Mucosal Ulceration Caused by the Topical Application of a Concentrated Propolis Extract Propolis is a resinous mixture that is collected by honey bees from tree buds, sap flow, and other botanical sources. Propolis has been extensively used in medicine, dentistry, and cosmetics; however, unwanted effects have been reported. This paper reports a case of oral mucosal burn in a 50-year-old patient, who used an overnight application of concentrated propolis to overcome a throbbing pain in the right upper posterior mucosa. The patient was otherwise healthy and was not receiving any medication. She presented with painful shallow multiple irregular ulcers measuring 0.3–1 cm in diameter that were located on the right buccal mucosa and hard palate mucosa, in addition to the gingival mucosa surrounding tooth 17. Propolis-induced oral mucosal burn was diagnosed. The ulcer cleared after the prescription of tetracycline mouthwash, accompanied with Doloneurobion. The patient was further treated with carbamazepine to address the persistent throbbing pain in the affected area, which was suspected to be trigeminal neuralgia. This report provides another alert to clinicians about the potential adverse effects of propolis use for the treatment of oral diseases, despite its natural origin. Introduction Propolis is a Greek word that literally means "in front of the city, " and it is sometimes referred to as bee glue. It is collected by honey bees to construct their hives and serves as waterproof and protection material against invaders . The chemical analysis of propolis has revealed at least 300 compounds as its constituents . It is a complex mixture containing resinous and balsamic compounds (55%) as its major constituents. The remaining constituents are beeswax (30%), essential oils (10%), bee pollen (5%), and organic compounds (5%; phenolic, esters, and flavonoids) . These components are collected from tree buds, sap flow, and other botanical sources. The location of plants, climate, and environmental conditions have an important role in determining the ratio and concentration of the components of propolis . For many years, propolis has been considered as a traditional herbal medicine that heals various diseases . Propolis has been extensively used in medicine, dentistry, and cosmetics. In vitro and in vivo animal studies of propolis have inferred a number of its biological activities. For example, it exhibits astringent, antiseptic, anesthetic, anti-inflammatory, antibiotic, antifungal, antiviral, antioxidant, immunomodulator, and antineoplastic activities . However, clinical studies of propolis for oral diseases in humans remain limited . Despite the benefits of using propolis in medicine and dentistry, allergic reactions due to propolis have also been reported . A recent study described 22 cases of oral lesions induced by the use of propolis, indicating that the improper use of propolis may have serious adverse effects on the oral mucosa . Although several published reports have described adverse reactions to propolis , we recently documented a new case related to its use. Here, we report a case of a woman who developed oral mucosal ulcers after the topical use of concentrated propolis on a painful dental area. complaining of a painful lesion located on her upper right buccal mucosa, in addition to the palatal mucosa. She had experienced throbbing pain in the mucosal tissue around tooth 17 a few days earlier. She reported the self-application of a cotton roll that had been damped in propolis to relieve the pain in the mucosal area. The cotton roll was left in contact with the mucosa overnight. She noticed the eruption of a painful oral lesion the next morning, which caused difficulty in eating. The eruption of the lesion was not accompanied by any systemic symptoms, and no other body areas were involved. She also reported the daily consumption of propolis diluted in her drinking water. A review of her medical history revealed an allergic history to chloramphenicol and occasional gastric pain. Otherwise, the patient was healthy and was not under any medication. A clinical examination revealed multiple shallow and irregular ulcerations on the right buccal mucosa, the right hard palate mucosa, and the gingival area surrounding tooth 17. The size of the ulcers ranged from 0.3 to 1.5 cm in diameter ( Figure 1). Tooth 17 was in the middle of endodontic treatment for wide and deep caries lesion, and the radiograph showed no periapical lesion (Figure 2). Palpation and percussion of tooth 17 were within normal limits. However, the patient considered having tooth 17 extracted. Furthermore, enlarged and painful submandibular lymph nodes were noted on palpation and were possibly the result of inflammation related to tooth 17. A working diagnosis of propolis-induced mucosal burns was made. The patient was advised to discontinue propolis use and was prescribed tetracycline mouthwash three times daily for 3 days and Doloneurobion twice daily for 7 days to manage the pain. She was advised to make a followup consultation after 5 days. Case Presentation On the followup consultation, extra oral examination indicated a normal appearance of the affected area. Tooth 17 had been extracted by a different department, because the patient believed that it was the cause of the throbbing pain. Clinically, there was a healing extraction socket of tooth 17 and healing of the ulcerated area that was seen as the erythematous area. Pain related to the postulcerated area had mainly resolved; however, she reported persistent throbbing pain in the area where tooth 17 had been extracted. No submandibular lymphadenopathy was observed during this visit. Our department suspected trigeminal neuralgia as the cause of the throbbing pain. The patient was prescribed a gauze mucosal compress with 0.05% chlorhexidine gluconate three times daily for 3 days to heal the oral mucosa, in addition to 100 mg carbamazepine twice daily for 5 days. The patient was asked to return for a followup consultation in 5 days. On the final consultation, the erythematous area was completely healed, and the rest of the mucosa appeared normal, with the socket of tooth 17 healing after extraction ( Figure 3). The patient reported no pain related in the postulcerated area. However, the throbbing pain in the area of tooth 17 was noted as a "funny feeling. " A 100 mg dose of carbamazepine was prescribed twice daily for 2 weeks. Case Reports in Dentistry 3 Figure 3: The previously ulcerated oral mucosa healed after 5 days of treatment with tetracycline mouthwash. The ulcer was declared to be healed at this visit, and the patient was scheduled for a followup consultation 2 weeks later for the further evaluation of the suspected trigeminal neuralgia. Discussion The various biological benefits of propolis have resulted in it being widely used in medicine, including dentistry . Many in vitro and in vivo studies on propolis have been completed, with several clinical trials on humans showing its beneficial use as an active ingredient for the treatment of eosinophilic ulcers, as an antimicrobial for gingivitis patients, as a component of pulp capping materials, and as an antifungal for patients with denture stomatitis . However, adverse reactions to propolis have also been reported and described in the published literature . A 50-year-old female patient with an oral lesion due to the topical application of propolis was described in this case report. After careful analysis of the nature of the lesion, our patient was diagnosed as having oral mucosal burns following direct contact with concentrated propolis. The patient in this case report decided to compress the mucosa with a cotton roll damped with concentrated propolis to ease a throbbing pain in the affected area overnight. The eruption of the lesion in the contact area happened approximately 8 hours after the application of propolis, without any systemic conditions. The high concentration of ethanol component (50-70%) in the propolis extract might be the cause of the mucosal burn. The propolis extract had been subject to a series of extraction processes that use highly concentrated alcohols before it is made publically available . This high alcohol component might have caused the damage to the oral mucosa in this patient . In addition, keeping a cotton roll in the mouth for several hours might have also caused trauma to the oral mucosa. The ulcer completely healed after 12 days, following the cessation of propolis use and the prescription of appropriate antibiotics with anticollagenolytic effects, in parallel with antiseptics and supportive measures. Although cases of allergic reactions to the topical application of propolis have been reported, we did not suspect that this was the case for our patient . Our patient had a long history of propolis use in her daily life and had been adding propolis to her drinking water, with no adverse effects. Many reports state that the median time for lesions related to allergy to propolis occurs after 2.5 days (range: 0-15 days). In contrast, our patient developed the ulcer approximately 8 hours after propolis application . Studies on the allergic potential of propolis have revealed that it should not be used as a topical product due to its high sensitizing characteristics . The minor constituents in propolis, such as 3-methyl-2-butenyl caffeate and phenyl-ethyl caffeate, are major allergens, in addition to benzyl-salicylate and benzylcinnamate . We did not order a patch test to check for a possible allergic reaction of our patient to propolis. Therefore, we could not confirm whether the oral ulceration was due to a propolis allergy within 8 hours of exposure . However, possible allergic contact mucositis that was facilitated by injury to oral mucosa could be postulated. This case report provides another alert to clinicians about the potential adverse effect of propolis when used to treat oral diseases, as some propolis applications may have serious negative effects. Although there is an increasing global trend in the use of propolis for medication, the important discovery of its beneficial roles should be in parallel with research undertaken to specifically define its application in many areas of dentistry. Careful consideration should be given before using propolis to treat oral diseases, as many clinical complications might arise, despite its natural origin .
The United States presidential election of 1908 was the 31st quadrennial presidential election, held on Tuesday, November 3, 1908. Secretary of War and Republican Party nominee William Howard Taft defeated three-time Democratic nominee William Jennings Bryan. Popular incumbent President Theodore Roosevelt honored his promise not to seek a third term, and persuaded his close friend, Taft, to become his successor. With Roosevelt's support, Taft won the presidential nomination of the 1908 Republican National Convention on the first ballot. Having lost the 1904 election badly, the Democratic Party re-nominated Bryan, who had been defeated in 1896 and 1900 by Republican William McKinley. Despite his two previous defeats and the waning of the Free Silver issue, Bryan remained extremely popular among the more liberal and populist elements of the Democratic Party. Bryan ran a vigorous campaign against the nation's business elite, but the Democrat suffered the worst loss of his three presidential campaigns. Taft won 51.6% of the popular vote and carried most states outside of the Solid South. Taft's triumph gave Republicans their fourth straight presidential election victory. Two third party candidates, Eugene V. Debs of the Socialist Party and Eugene W. Chafin of the Prohibition Party, each took over 1% of the popular vote. Nominations [ edit ] Republican Party nomination [ edit ] Republican candidates: The Republican nomination contest marked the introduction of the presidential preference primary. The idea of the primary to nominate candidates was sponsored by anti-machine politicians such as New York Governor Charles Evans Hughes and Senator Albert B. Cummins. The first state to hold a presidential primary to select delegates to a national convention was Florida in 1904, when Democratic Party voters held a primary among uninstructed candidates for delegate. Early in 1908, the only two Republican contenders running nationwide campaigns for the presidential nomination were Secretary of War William Howard Taft and Governor Joseph B. Foraker, both of Ohio. In the nomination contest, four states held primaries to select national convention delegates. In Ohio, the state Republican Party held a primary on February 11. Candidates pledged to Taft were printed on the ballot in a Taft column, and candidates pledged to Foraker were printed in a column under his name. Taft won a resounding victory in Ohio. The three states holding primaries to select delegates without the preference component were split: California chose a slate of delegates that supported Taft; Wisconsin elected a slate that supported Wisconsin Senator Robert M. La Follette, Sr., and Pennsylvania elected a slate that supported its Senator Philander C. Knox. The 1908 Republican Convention was held in Chicago between June 16 and 19. William Howard Taft was nominated with 702 votes to 68 for Knox, 67 for Hughes, 58 for Cannon, 40 for Fairbanks, 25 for La Follette, 16 for Foraker, 3 for President Roosevelt, and one abstention.[2] [3] Representative James S. Sherman from New York received the vice-presidential nomination. [4] Democratic Party nomination [ edit ] As the 1908 election approached, William Jennings Bryan was the front-runner for the Democratic presidential nomination. Bryan's most formidable challenger for the nomination was Minnesota Governor John Albert Johnson. Johnson's rags-to-riches story, honesty, reformist credentials, and ability to win in a heavily Republican state made him popular within the Democratic Party. In March, the Minnesota Democratic State Convention endorsed Johnson for president. By the end of June, however, Bryan had amassed more than the requisite two-thirds of the delegates needed for nomination. The 1908 Democratic National Convention was held in Denver between July 7 and 10. Johnson, aware of the fact that Bryan's nomination was a foregone conclusion, released his delegates, thereby allowing Bryan to win the nomination on the first ballot.[5] Bryan left the choice of vice-president to the delegates. John W. Kern from Indiana was unanimously declared the candidate for vice-president without a formal ballot after the names of Charles A. Towne, Archibald McNeil, and Clark Howell were withdrawn from consideration. Kern was a former state senator (1893-1897) and two-time gubernatorial candidate (1900 and 1904). In response to nomination of Bryan and Kern, The New York Times disparagingly pointed out that the Democratic national ticket was consistent because "a man twice defeated for the Presidency was at the head of it, and a man twice defeated for governor of his state was at the tail of it."[5] Others [ edit ] Independence Party nomination [ edit ] Independence candidates: Disappointed with his performance in the 1904 Democratic presidential nomination campaign, and disillusioned as to his chances of successfully attaining it in 1908, William Randolph Hearst decided to run instead on the ticket of a third party of his own making. Originally borne from the Municipal Ownership League, a vehicle for Hearst's ultimately unsuccessful bid for the mayoralty of New York in 1905, it was Hearst's intention to fuse it with the remnants of the Populist Party led by Thomas Watson, a former Representative from Georgia who had been its presidential nominee in 1904. However, these intentions were dashed when every candidate that the Independence Party put forth in elections held in New York was elected except Hearst himself, despite an endorsement by the Democratic Party. Devastated, Hearst declared his intention never again to be a candidate. While Hearst would no longer be the nominee, he fully intended to exercise influence at Independence Party's convention; the platform itself was in large part a statement of his own views. With its candidates nominated, the party's purpose was changed from being a path for Hearst's presidential ambitions to being an instrument of his wrath. Through the influence of his papers and generous financial donations, Hearst hoped that the Independence ticket would draw away votes from William Jennings Bryan and lead to his defeat against Taft, a personal vendetta for Bryan failing to support his own bid for the Presidency in 1904. [6] Socialist Party nomination [ edit ] Socialist candidates: Eugene Debs had originally hoped that Bill Haywood, who had attained a national profile from being put on trial for the murder of Frank Steunenberg, of which he was acquitted, would run for the Socialist nomination for president. At this time however the Socialist Party was fracturing between its radical and more moderate elements, and Debs was deemed the only candidate capable of keeping the party unified. He was overwhelmingly nominated for the presidency on the first ballot, with Benjamin Hanford again named as his running-mate. General election [ edit ] Campaign [ edit ] The aging and balding "Boy Orator of the Platte " delivers a speech. With the Free Silver issue no longer dominant, Bryan campaigned on a progressive platform attacking "government by privilege." His campaign slogan, "Shall the People Rule?", was featured on numerous posters and campaign memorabilia. However, Taft undercut Bryan's liberal support by accepting some of his reformist ideas, and Roosevelt's progressive policies blurred the distinctions between the parties. Republicans also used the slogan "Vote for Taft now, you can vote for Bryan anytime," a sarcastic reference to Bryan's two failed previous presidential campaigns. The Socialist candidate, Eugene Debs, embarked on an ambitious whistle-stop tour aboard a train nicknamed the Red Special, giving speeches regarding the Socialist cause across the country. The exertion of the tour exhausted Debs, and at certain points his brother Theodore - who bore a great resemblance to Eugene - substituted for him to allow the candidate to rest.[7] Businessmen continued to support the Republican Party, and Bryan failed to secure the support of labor. As a result, Bryan ended up with the worst of his three defeats in the national popular vote. He lost almost all the northern states to Taft and the popular vote by 8 percentage points. This would be Bryan's last campaign for the presidency, although he would remain a popular figure within the Democratic Party and in 1912 would play a key role in securing the presidential nomination for Woodrow Wilson. Charles W. Bryan, William's brother, would become the (losing) Democratic nominee for Vice President in 1924. Results [ edit ] [8] Results by county explicitly indicating the percentage for the winning candidate. Shades of red are for Taft (Republican), shades of blue are for Bryan (Democratic), shades of green are for "Other(s)" (Non-Democratic/Non-Republican), grey indicates zero recorded votes, and white indicates territories not elevated to statehood. Roosevelt handing over his policies to his political protégé, William H. Taft. Forty-six states participated, as Oklahoma had joined the Union less than a year before. Bryan won forty-eight counties in the new state of Oklahoma. The most important increase in number of counties carried by Bryan was in the West South Central section, in part due to the vote of newly admitted Oklahoma.[9] Of the 2,858 counties making returns, Taft won in 1,494 (52.27%) while Bryan carried 1,355 (47.41%). Nine (0.31%) counties recorded more votes cast for "Other(s)" than either of the two-party candidates, whilst twenty-eight counties (0.97%) recorded zero votes due to being inhabited either by Native Americans who would not gain full citizenship for sixteen years, or by disenfranchised southern African-Americans. Taft had a majority in 1,325 counties while Bryan had a majority in 1,204 counties. By carrying 1,355 counties, Bryan won more counties than he had in 1900 (1,340), but he did not reach or surpass the number of counties he had won in 1896 (1,559). While Bryan won more counties than McKinley in 1896, Bryan failed to carry more counties than the Republican candidate in 1900 or 1904. Compared with his strength in previous elections, however, Bryan carried 69 counties in 1908 which had not been Democratic in either 1896 or 1900.[10] Bryan increased the area carried by Democrats in every part of the country except New England and the South. He doubled the number of Democratic counties in Wisconsin and won more counties in Indiana than were carried by plurality vote by the Democrats in any election in the Fourth Party System except 1912. He made decided gains in Missouri and in his home state of Nebraska,[9] besides achieving notable victories in Colorado and Nevada. However, in four Western states (Washington, Oregon, Wyoming, and North Dakota), there was not one Democratic county. This was true likewise of Michigan, Delaware, and each of the New England states. The total vote increased greatly, by more than a million vis-à-vis 1904. Each party shared in the increase, but whereas Taft had nearly fifty thousand more than Theodore Roosevelt, Bryan had nearly 1,500,000 more votes than Alton Parker had garnered, and more than in either of his previous campaigns. It was noticeable that the "other" vote was only about seven thousand less than four years earlier. The "other" vote was a plurality in nine counties in the states of Georgia and Texas. The size of the vote cast for the defeated Bryan in 1908 is clear evidence of perhaps the most striking feature of the American presidential vote. In this third attempt at the presidency, and in an election following one in which the nominee of his party polled only five million votes, Bryan had heavy support in every section of the country, and in every state. Moreover, nearly two-thirds of the vote cast for Bryan was from the fifteen states of the (Northeastern) Mid-Atlantic, East North Central, and West North Central sections, in which the Democratic candidate carried only one state (Nebraska). Despite all conclusions as to predominant sentiment in the different sections and its economic, social, and political causes, there was a national vote cast for Bryan, and it was urban as well as rural; it was eastern, western, southern, and northern. Everywhere the Democratic Party was the minority party, and it was not hopeless, nor was it helpless. It was the agency for the expression of the opposition of almost six and a half million voters.[10] This was the last election in which the Republicans won California, Idaho, Kansas, Missouri, Montana, New Hampshire, North Dakota, Ohio and Wyoming until 1920. As of 2017, this is the last of only two elections when Kansas[11] and Nebraska[12] have not voted for the same candidate.[a] The election of 1908 was the last election in which a Republican won the presidency without winning Nebraska. It is also the last election in which a Republican won without Nevada and/or Colorado until 2016. Source (Popular Vote): Leip, David. "1908 Presidential Election Results". Dave Leip's Atlas of U.S. Presidential Elections . Source (Electoral Vote): "Electoral College Box Scores 1789–1996". National Archives and Records Administration . Popular vote Taft 51.57% Bryan 43.04% Debs 2.83% Chafin 1.71% Others 0.85% Electoral vote Taft 66.46% Bryan 33.54% Geography of results [ edit ] Results by county, shaded according to winning candidate's percentage of the vote Cartographic gallery [ edit ] Map of presidential election results by county Map of Republican presidential election results by county Map of Democratic presidential election results by county Map of "other" presidential election results by county Cartogram of presidential election results by county Cartogram of Republican presidential election results by county Cartogram of Democratic presidential election results by county Cartogram of "other" presidential election results by county Results by state [ edit ] [13] States won by Taft/Sherman States won by Bryan/Kern Close states [ edit ] Margin of victory less than 1% (34 electoral votes): Missouri, 0.09% Maryland, 0.25% Margin of victory less than 5% (46 electoral votes): Colorado, 1.12% Indiana, 1.49% Nebraska, 1.54% Kentucky, 1.71% Nevada, 1.78% Montana, 4.37% Oklahoma, 4.66% Margin of victory between 5% and 10% (60 electoral votes): Delaware, 6.16% Tennessee, 6.86% Ohio, 6.20% North Carolina, 8.73% Kansas, 9.58% Statistics [ edit ] Counties with Highest Percent of Vote (Republican) Counties with Highest Percent of Vote (Democratic) Counties with Highest Percent of Vote (Other) Campaign memorabilia [ edit ] Taft-Sherman postcard Collier's magazine cover Humorous postcard John Johnson ribbon Taft-Sherman postcard with U.S. Capitol Bryan-Kern postcard with U.S. Capitol See also [ edit ] Notes [ edit ] References [ edit ] Further reading [ edit ]
<gh_stars>1-10 declare type Port = chrome.runtime.Port; export class ChromeRuntimePort implements Port { sender?: chrome.runtime.MessageSender | undefined; onDisconnect: chrome.runtime.PortDisconnectEvent; onMessage: chrome.runtime.PortMessageEvent; onMessageHandlers = new Set<(message: any, port: Port) => void>(); constructor(readonly name: string) { this.onMessage = { addListener: (onMessageHandler: (message: any, port: Port) => void) => { this.onMessageHandlers.add(onMessageHandler); }, removeListener: (onMessageHandler: (message: any, port: Port) => void) => { this.onMessageHandlers.delete(onMessageHandler); } } as any; this.onDisconnect = { addListener: (onDisconnectHandler: (port: Port) => void) => { }, removeListener: (onDisconnectHandler: (port: Port) => void) => { } } as any; document.documentElement!.addEventListener('message-from-extension', e => { if (e instanceof CustomEvent && e.detail) { this.onMessageHandlers.forEach(handler => handler(JSON.parse(e.detail), this)); } }); } public postMessage<TMessage>(message: TMessage) { document.documentElement!.dispatchEvent( new CustomEvent('message-from-portal', { detail: JSON.stringify(message) })); } public disconnect() { } }
def format_words(words, max_rows=10, max_cols=8, sep=" "): lines = [sep.join(words[i : i + max_cols]) for i in range(0, len(words), max_cols)] if len(lines) > max_rows: lines = lines[: max_rows - 1] n_missing = int(len(words) - (max_cols * len(lines))) out_str = "\n".join(lines) out_str += f"\n...({n_missing} more)" else: out_str = "\n".join(lines) return out_str
a1 = int(input()) a2 = int(input()) k1 = int(input()) k2 = int(input()) n = int(input()) if n <= a1*(k1-1)+a2*(k2-1): print(0,end=" ") elif n > a1*(k1-1)+a2*(k2-1): print(min(n-a1*(k1-1)-a2*(k2-1),a1+a2),end=" ") if k1 <= k2: print(min(n//k1,a1)+max(n-min(n//k1,a1)*k1,0)//k2,end=" ") elif k2 < k1: print(min(n//k2,a2)+max(n-min(n//k2,a2)*k2,0)//k1,end=" ")
/// Swaps out the DMA buffers. /// /// NOTE: For the CVE-2018-6242 exploit, usage of the high /// buffer effectively reduces the amount of writes necessary. pub fn switch_dma_buffer(&mut self, high: bool) -> Result<()> { if self.is_dma_buffer_high != high { self.write(&[0x00; 0x1000])?; self.swap_dma_buffers(); } Ok(()) }
def update_window_handle(self): self._hwnd = self.get_window_handle();
<gh_stars>10-100 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- from typing import Any, Dict from .kusto_engine import KustoEngine from .constants import ConnStrKeys, Schema class AriaEngine(KustoEngine): # Constants # --------- _URI_SCHEMA_NAME = Schema.ARIA # no spaces, underscores, and hyphe-minus, because they are ignored in parser _ALT_URI_SCHEMA_NAMES = [_URI_SCHEMA_NAME] _RESERVED_CLUSTER_NAME = _URI_SCHEMA_NAME _DEFAULT_CLUSTER_NAME = "https://kusto.aria.microsoft.com" _VALID_KEYS_COMBINATIONS = [ [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.CLIENTID, ConnStrKeys.CLIENTSECRET], [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.CLIENTID, ConnStrKeys.CERTIFICATE, ConnStrKeys.CERTIFICATE_THUMBPRINT], [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.CLIENTID, ConnStrKeys.CODE], [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.CODE], [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.CLIENTID, ConnStrKeys.USERNAME, ConnStrKeys.PASSWORD], [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.USERNAME, ConnStrKeys.PASSWORD], [ConnStrKeys.DATABASE, ConnStrKeys.ALIAS, ConnStrKeys.ANONYMOUS], ] _VALID_KEYS_COMBINATIONS_NEW = [ { "must": [ConnStrKeys.DATABASE, ConnStrKeys.CLIENTID, ConnStrKeys.CLIENTSECRET], "extra": [], "optional": [ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL] }, { "must": [ConnStrKeys.DATABASE, ConnStrKeys.CLIENTID, ConnStrKeys.CERTIFICATE, ConnStrKeys.CERTIFICATE_THUMBPRINT], "extra": [], "optional": [ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL] }, { "must": [ConnStrKeys.DATABASE, ConnStrKeys.CODE], "extra": [ConnStrKeys.CLIENTID, ConnStrKeys.USERNAME], "optional": [ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL] }, { "must": [ConnStrKeys.DATABASE, ConnStrKeys.USERNAME, ConnStrKeys.PASSWORD], "extra": [ConnStrKeys.CLIENTID], "optional": [ConnStrKeys.ALIAS, ConnStrKeys.TENANT, ConnStrKeys.AAD_URL, ConnStrKeys.CLIENTID] }, { "must": [ConnStrKeys.DATABASE, ConnStrKeys.ANONYMOUS], "extra": [], "optional": [ConnStrKeys.ALIAS] } ] # Class methods # ------------- # Instance methods # ---------------- def __init__(self, conn_str:str, user_ns:Dict[str,Any], current:KustoEngine=None, conn_class=None, **options)->Any: super(AriaEngine, self).__init__(conn_str, user_ns, current, conn_class, **options)
<filename>pkg/utils/build/sbom.go package build import ( "encoding/json" "fmt" "os" "github.com/openshift/library-go/pkg/image/reference" "github.com/openshift/oc/pkg/cli/image/extract" "github.com/openshift/oc/pkg/cli/image/imagesource" "k8s.io/cli-runtime/pkg/genericclioptions" "k8s.io/klog" ) func GetParsedSbomFilesContentFromImage(image string) (*SbomPurl, *SbomCyclonedx, error) { dockerImageRef, err := reference.Parse(image) if err != nil { return nil, nil, fmt.Errorf("cannot parse docker pull spec (image) %s, error: %+v", image, err) } tmpDir, err := os.MkdirTemp(os.TempDir(), "sbom") if err != nil { return nil, nil, fmt.Errorf("error when creating a temp directory for extracting files: %+v", err) } klog.Infof("extracting contents of container image %s to dir: %s", image, tmpDir) eMapping := extract.Mapping{ ImageRef: imagesource.TypedImageReference{Type: "docker", Ref: dockerImageRef}, To: tmpDir, } e := extract.NewExtractOptions(genericclioptions.IOStreams{Out: os.Stdout, ErrOut: os.Stderr}) e.Mappings = []extract.Mapping{eMapping} if err := e.Run(); err != nil { return nil, nil, fmt.Errorf("error: %+v", err) } purl, err := getSbomPurlContent(tmpDir) if err != nil { return nil, nil, fmt.Errorf("failed to get sbom purl content: %+v", err) } cyclonedx, err := getSbomCyclonedxContent(tmpDir) if err != nil { return nil, nil, fmt.Errorf("failed to get sbom cyclonedx content: %+v", err) } return purl, cyclonedx, nil } type SbomPurl struct { ImageContents struct { Dependencies []struct { Purl string `json:"purl"` } `json:"dependencies"` } `json:"image_contents"` } type SbomCyclonedx struct { Components []struct { Name string `json:"name"` } `json:"components"` } func getSbomPurlContent(rootDir string) (*SbomPurl, error) { sbomPurlFilePath := rootDir + "/root/buildinfo/content_manifests/sbom-purl.json" file, err := os.Stat(sbomPurlFilePath) if err != nil { return nil, fmt.Errorf("sbom file not found in path %s", sbomPurlFilePath) } if file.Size() == 0 { return nil, fmt.Errorf("sbom file %s is empty", sbomPurlFilePath) } b, err := os.ReadFile(sbomPurlFilePath) if err != nil { return nil, fmt.Errorf("error when reading sbom file %s: %v", sbomPurlFilePath, err) } sbom := &SbomPurl{} if err := json.Unmarshal(b, sbom); err != nil { return nil, fmt.Errorf("error when parsing sbom PURL json: %v", err) } return sbom, nil } func getSbomCyclonedxContent(rootDir string) (*SbomCyclonedx, error) { sbomCyclonedxFilePath := rootDir + "/root/buildinfo/content_manifests/sbom-cyclonedx.json" file, err := os.Stat(sbomCyclonedxFilePath) if err != nil { return nil, fmt.Errorf("sbom file not found in path %s", sbomCyclonedxFilePath) } if file.Size() == 0 { return nil, fmt.Errorf("sbom file %s is empty", sbomCyclonedxFilePath) } b, err := os.ReadFile(sbomCyclonedxFilePath) if err != nil { return nil, fmt.Errorf("error when reading sbom file %s: %v", sbomCyclonedxFilePath, err) } sbom := &SbomCyclonedx{} if err := json.Unmarshal(b, sbom); err != nil { return nil, fmt.Errorf("error when parsing sbom CycloneDX json: %v", err) } return sbom, nil }
The existence of the book is well-known; its contents legendary. But apart from a few off-kilter snapshots posted to Flickr in 2006, images of the document itself were scarce. So when Niko Skourtis, Jesse Reed and Hamish Smyth found the 1970 manual in a locker beneath a pile of dirty clothes in the Pentagram basement, they did the world a favor and posted its pages, PDF by PDF, on a new website . "The subway rider should be given only information at the point of decision," proclaimed the designers. "Never before. Never after." The 180-page binder, the key to the system's iconic design choices, outlines a meticulous vision of signage intended not merely to look good -- though it does -- but to simplify navigation of the subterranean labyrinth. In its attention to passenger behavior, the manual goes above and beyond what most of us would term graphic design. Late one night last August, three Pentagram designers rummaging through the design firm's basement archives found the Rosetta Stone of New York subway graphics: the original Standards Manual , designed by Bob Noorda and Massimo Vignelli in the late 1960s. "None of us had ever seen anything like that before," Skourtis says. "In its full glory." Nearly fifty years ago, the New York City Transit Authority hired Bob Noorda's Unimark International to develop a uniform system of signage for the subway system. Since the subway comprised what had been three separate systems -- the IND, the IRT, and the BMT -- the stations and trains were cluttered with conflicting, confusing instructions. Noorda and Vignelli made it look easy. They gave the system its sans-serif typeface (the sign-makers would not or could not yet print Helvetica, so the designers settled initially for Standard Medium), its color-coded disks,and its pared-down, modular signage. A wonder of precision, the Standards Manual even addresses the heights of conductors with and without hats. "It's pretty much a design bible as far as standards manuals go," Skourtis says. Some things have changed in the last fifty years. Standard Medium has been (rather famously) replaced by Helvetica. In an effort to discourage graffiti, signs now feature white lettering on black, as opposed to Unimark's prescribed black-on-white. The surest sign of the book's age? A mock-up sign warning readers: "Clerk not required to accept bills over 5 dollars." But the graphic language of the subway is nearly the same, and a flip through the book relays its remarkable aesthetic coherence. (If anything, the current system of signage is a slight under-design of the Unimark model. Noorda wanted every station peppered with "Directories," alphabetical listings of stations showing, in symbols, exactly how to get from here to there.) The original document offers some remarkably crisp images of the subway's vernacular, like this diagram of how to construct the system's most commonly used symbol: There are, the Manual stresses, six and only six ways to use these arrows. The designers even included some examples of ways not to use these symbols (at the bottom of the page). One imagines that the designers would also have forbade the now-common U-turn arrow , if they had foreseen its use. But the Standard Manual's most famous contribution is surely the vibrant, colored disks for trains, today the stuff of t-shirts and posters: And unlike Vignelli's diagrammatic 1972 subway map, which has achieved immortality in the design world since it was banished in 1979, these symbols have garnered the affection of strap-hangers and typographers alike. The reaction to the published manual, Skourtis said, has been huge. "We've gotten a gigantic response -- the site crashed two or three times in the first week," he said. "We got a ton of emails. People were so happy to see the thing in its entirety."
/** * @author Alex Black */ public class RegressionEvalTest extends BaseDL4JTest { @Test public void testRegressionEvalMethods() { //Basic sanity check MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.ZERO).list() .layer(0, new OutputLayer.Builder().activation(Activation.TANH) .lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(5).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); INDArray f = Nd4j.zeros(4, 10); INDArray l = Nd4j.ones(4, 5); DataSet ds = new DataSet(f, l); DataSetIterator iter = new ExistingDataSetIterator(Collections.singletonList(ds)); org.nd4j.evaluation.regression.RegressionEvaluation re = net.evaluateRegression(iter); for (int i = 0; i < 5; i++) { assertEquals(1.0, re.meanSquaredError(i), 1e-6); assertEquals(1.0, re.meanAbsoluteError(i), 1e-6); } ComputationGraphConfiguration graphConf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.ZERO).graphBuilder() .addInputs("in").addLayer("0", new OutputLayer.Builder() .lossFunction(LossFunctions.LossFunction.MSE) .activation(Activation.TANH).nIn(10).nOut(5).build(), "in") .setOutputs("0").build(); ComputationGraph cg = new ComputationGraph(graphConf); cg.init(); RegressionEvaluation re2 = cg.evaluateRegression(iter); for (int i = 0; i < 5; i++) { assertEquals(1.0, re2.meanSquaredError(i), 1e-6); assertEquals(1.0, re2.meanAbsoluteError(i), 1e-6); } } @Test public void testRegressionEvalPerOutputMasking() { INDArray l = Nd4j.create(new double[][] {{1, 2, 3}, {10, 20, 30}, {-5, -10, -20}}); INDArray predictions = Nd4j.zeros(l.shape()); INDArray mask = Nd4j.create(new double[][] {{0, 1, 1}, {1, 1, 0}, {0, 1, 0}}); RegressionEvaluation re = new RegressionEvaluation(); re.eval(l, predictions, mask); double[] mse = new double[] {(10 * 10) / 1.0, (2 * 2 + 20 * 20 + 10 * 10) / 3, (3 * 3) / 1.0}; double[] mae = new double[] {10.0, (2 + 20 + 10) / 3.0, 3.0}; double[] rmse = new double[] {10.0, Math.sqrt((2 * 2 + 20 * 20 + 10 * 10) / 3.0), 3.0}; for (int i = 0; i < 3; i++) { assertEquals(mse[i], re.meanSquaredError(i), 1e-6); assertEquals(mae[i], re.meanAbsoluteError(i), 1e-6); assertEquals(rmse[i], re.rootMeanSquaredError(i), 1e-6); } } @Test public void testRegressionEvalTimeSeriesSplit(){ INDArray out1 = Nd4j.rand(new int[]{3, 5, 20}); INDArray outSub1 = out1.get(all(), all(), interval(0,10)); INDArray outSub2 = out1.get(all(), all(), interval(10, 20)); INDArray label1 = Nd4j.rand(new int[]{3, 5, 20}); INDArray labelSub1 = label1.get(all(), all(), interval(0,10)); INDArray labelSub2 = label1.get(all(), all(), interval(10, 20)); RegressionEvaluation e1 = new RegressionEvaluation(); RegressionEvaluation e2 = new RegressionEvaluation(); e1.eval(label1, out1); e2.eval(labelSub1, outSub1); e2.eval(labelSub2, outSub2); assertEquals(e1, e2); } }
<filename>disconnect-vaadin/src/main/java/com/github/fluorumlabs/disconnect/vaadin/crud/DialogLayout.java package com.github.fluorumlabs.disconnect.vaadin.crud; import com.github.fluorumlabs.disconnect.core.components.HtmlComponent; import js.lang.external.vaadin.crud.DialogLayoutElement; /** * <strong>Mixins:</strong> ElementMixin, ThemableMixin, ElementMixin */ class DialogLayout extends HtmlComponent<DialogLayoutElement> { // !wca! get editorPosition: string // !wca! set editorPosition: string // !wca! observe editorPosition: string // !wca! get mobile: boolean // !wca! set mobile: boolean // !wca! observe mobile: boolean // !wca! get noCloseOnEsc: boolean // !wca! set noCloseOnEsc: boolean // !wca! observe noCloseOnEsc: boolean // !wca! get noCloseOnOutsideClick: boolean // !wca! set noCloseOnOutsideClick: boolean // !wca! observe noCloseOnOutsideClick: boolean // !wca! get opened: boolean // !wca! set opened: boolean // !wca! observe opened: boolean // !wca! get theme: string // !wca! set theme: string // !wca! observe theme: string }
<reponame>PoProstuWitold/fastify-react-yt-downloader import { FastifyInstance, FastifyRequest, RouteOptions, FastifyReply } from 'fastify' import { Video } from '../models/video.model' import { downloadQueue } from '../queues/download.queue' import fs from 'fs/promises' const downloadVideoOptions: Partial<RouteOptions> = { schema: { body: { type: 'object', required: ['youtubeUrl'], } } } export const downloadRouter = (fastify: FastifyInstance, opts: RouteOptions, done: Function) => { fastify.get( '/:id', opts, async (req: FastifyRequest, rep: FastifyReply) => { //@ts-ignore const { id } = req.params const video = await Video.findById(id) if (!video) { return rep.status(404).send('Video not found') } const { file } = video rep.status(204).download(file) } ) fastify.post( '/', downloadVideoOptions, async (req: FastifyRequest, rep: FastifyReply) => { try { //@ts-ignore const { youtubeUrl } = req.body await downloadQueue.add({ youtubeUrl }) return rep.status(200).send('Downloading') } catch (error) { throw error } } ) fastify.get( '/', opts, async (req: FastifyRequest, rep: FastifyReply) => { const videos = await Video.find().sort({ createdAt: -1 }) rep.status(200).send(videos) } ) fastify.delete( '/:id', opts, async (req: FastifyRequest, rep: FastifyReply) => { //@ts-ignore const { id } = req.params; const video = await Video.findByIdAndDelete(id) if (!video) { return rep.status(404).send('Video not found') } await fs.unlink(video.file!) rep.status(200).send(video) } ) done() }
<filename>dtbeat-algorithm-sorting/src/main/java/com/dtbeat/sorting/RadixSort.java<gh_stars>0 package com.dtbeat.sorting; import java.util.Arrays; import java.util.OptionalInt; /** * RadixSort * * @author elvinshang * @version Id: RadixSort.java, v0.0.1 2020/9/5 23:36 dtbeat.com $ */ public class RadixSort { private static final int INF = Integer.MAX_VALUE; /** * Sorts array by RadixSort algorithm * * @param arr the array */ public static void sort(int[] arr) { if (arr == null || arr.length <= 1) { return; } final int N = arr.length; final int R = 10; int[] aux = new int[N]; int div = 1; while (true) { int[] counts = new int[R + 1]; boolean flag = false; // frequency for (int i = 0; i < N; i++) { int r = (arr[i] / div); int b = (r % 10); if(r > 0) { flag = true; } counts[b + 1]++; } if (!flag) { break; } // frequency to index for (int i = 0; i < R; i++) { counts[i + 1] += counts[i]; } // class for (int i = 0; i < N; i++) { int b = (arr[i] / div % 10); aux[counts[b]++] = arr[i]; } // rewrite for (int i = 0; i < N; i++) { arr[i] = aux[i]; } div *= 10; } } }
/* * Every spell that affects the group should run through here * perform_mag_groups contains the switch statement to send us to the right * magic. * * group spells affect everyone grouped with the caster who is in the room, * caster last. * * To add new group spells, you shouldn't have to change anything in * mag_groups -- just add a new case to perform_mag_groups. */ void mag_groups(int level, struct char_data *ch, int spellnum, int savetype) { struct char_data *tch, *k; struct follow_type *f, *f_next; if (ch == NULL) return; if (!AFF_FLAGGED(ch, AFF_GROUP)) return; if (ch->master != NULL) k = ch->master; else k = ch; for (f = k->followers; f; f = f_next) { f_next = f->next; tch = f->follower; if (IN_ROOM(tch) != IN_ROOM(ch)) continue; if (!AFF_FLAGGED(tch, AFF_GROUP)) continue; if (ch == tch) continue; perform_mag_groups(level, ch, tch, spellnum, savetype); } if ((k != ch) && AFF_FLAGGED(k, AFF_GROUP)) perform_mag_groups(level, ch, k, spellnum, savetype); perform_mag_groups(level, ch, ch, spellnum, savetype); }
//Find the pointer to the link of a specific location Link* Network::getLink(Coord node_id, Direction direction) { Coord link_id; if(net_type == MESH_3D) { switch(direction) { case EAST: link_id.x = node_id.x; link_id.y = node_id.y; link_id.z = node_id.z; break; case WEST: link_id.x = node_id.x - 1; link_id.y = node_id.y; link_id.z = node_id.z; break; case NORTH: link_id.x = node_id.y - 1; link_id.y = node_id.z; link_id.z = node_id.x + net_width; break; case SOUTH: link_id.x = node_id.y; link_id.y = node_id.z; link_id.z = node_id.x + net_width; break; case UP: link_id.x = node_id.z; link_id.y = node_id.x; link_id.z = node_id.y + 2 * net_width; break; case DOWN: link_id.x = node_id.z - 1; link_id.y = node_id.x; link_id.z = node_id.y + 2 * net_width; break; default: link_id.x = node_id.x; link_id.y = node_id.y; link_id.z = node_id.z; break; } if((link_id.x >= 0) && (link_id.x < net_width-1) &&(link_id.y >= 0) && (link_id.y < net_width) &&(link_id.z >= 0) && (link_id.y < 3*net_width)) { return &link[link_id.x][link_id.y][link_id.z]; } else { cerr <<"# of nodes: "<<num_nodes<<endl; cerr <<"Network width: "<<net_width<<endl; cerr <<"Direction: "<<direction<<endl; cerr <<"Node coordinate: ("<<node_id.x<<", "<<node_id.y<<", "<<node_id.z<<")\n"; cerr <<"Link coordinate: ("<<link_id.x<<", "<<link_id.y<<", "<<link_id.z<<")\n"; cerr <<"Can't find correct link id!\n"; return NULL; } } else { switch(direction) { case EAST: link_id.x = node_id.x; link_id.y = node_id.y; break; case WEST: link_id.x = node_id.x - 1; link_id.y = node_id.y; break; case NORTH: link_id.x = node_id.y - 1; link_id.y = node_id.x + net_width; break; case SOUTH: link_id.x = node_id.y; link_id.y = node_id.x + net_width; break; default: link_id.x = node_id.x; link_id.y = node_id.y; break; } if((link_id.x >= 0) && (link_id.x < net_width-1) &&(link_id.y >= 0) && (link_id.y < 2*net_width)) { return link[link_id.x][link_id.y]; } else { cerr <<"# of nodes: "<<num_nodes<<endl; cerr <<"Network width: "<<net_width<<endl; cerr <<"Direction: "<<direction<<endl; cerr <<"Node coordinate: ("<<node_id.x<<", "<<node_id.y<<")\n"; cerr <<"Link coordinate: ("<<link_id.x<<", "<<link_id.y<<")\n"; cerr <<"Can't find correct link id!\n"; return NULL; } } }
/*---------------------------------------------------------------------- NAME do_attributes DESCRIPTION If there are a lot of attributes we need to do them in a separate window That's what this function is for. RETURNS -----------------------------------------------------------------------*/ int do_attributes(char *fname, int index, lvar_st *l_vars) { char name[MAX_NC_NAME]; int32 nattrs; int32 nt; int32 dims[MAX_VAR_DIMS]; int32 rank; int32 status; int32 fid = FAIL; int32 sds; int32 count; intn j; int ret_value = SUCCEED; ENTER(2,"do_attributes"); if ((fid = SDstart(fname, DFACC_RDONLY)) == FAIL) { gateway_err(l_vars->hfp,"do_attributes: failed to start SDS interface handling on file\n",0,l_vars); ret_value = FAIL; goto done; } if ((sds = SDselect(fid, index)) == FAIL) { gateway_err(l_vars->hfp,"do_attributes: failed to select SDS \n",0,l_vars); ret_value = FAIL; goto done; } if ((status = SDgetinfo(sds, name, &rank, dims, &nt, &nattrs)) == FAIL) { gateway_err(l_vars->hfp,"do_attributes: failed to get info on SDS \n",0,l_vars); ret_value = FAIL; goto done; } if(nattrs) { fprintf(l_vars->hfp, "Dataset <B>%s</B> has the following attributes :\n", name); DBUG_PRINT(1,(LOGF, "do_attributes: Dataset %s has the following attributes :\n", name)); fprintf(l_vars->hfp, "<UL>\n"); for(j = 0; j < nattrs; j++) { char *valstr = NULL; if ((status = SDattrinfo(sds, j, name, &nt, &count)) == FAIL) { gateway_err(l_vars->hfp,"do_attributes: failed to get attribute info on SDS \n",0,l_vars); ret_value = FAIL; goto done; } if ((valstr = get_attribute(sds, j, nt, count,l_vars)) == NULL) { gateway_err(l_vars->hfp,"do_attributes: failed to convert attribute into a string\n",0,l_vars); continue; } fprintf(l_vars->hfp, "<LI> Attribute <i>%s</i> has the value : <pre>%s</pre>", name, valstr); DBUG_PRINT(1,(LOGF, "%s : %s \n", name, valstr)); if (valstr != NULL) { HDfreespace((void *)valstr); valstr = NULL; } } fprintf(l_vars->hfp, "</UL>\n"); } else fprintf(l_vars->hfp, "Dataset <B>%s</B> doesn't have attributes.<P>\n", name); fprintf(l_vars->hfp, "</UL>\n"); done: if (fid != FAIL) SDend(fid); EXIT(2,"do_attributes"); return ret_value; }
/** * A builder for {@code CommandNode}s. * * @param <T> the type of the source * @param <B> this */ public static abstract class Builder<T, B extends Builder<T, B>> extends ArgumentBuilder<T, B> { protected String description = ""; /** * Sets the description. * * @param description the description * @return {@code this} */ public B description(String description) { this.description = description; return getThis(); } /** * Sets the command to be executed. * * @param command the command to be executed * @return {@code this} */ public B executes(Execution<T> command) { return executes((Command<T>) command); } /** * Adds an optional child built using the given builder. Children of the * optional child are also added to this builder. * * @param builder the builder which is to build the optional child * @return {@code this} */ public B optionally(ArgumentBuilder<T, ?> builder) { return optionally(builder.build()); } /** * Adds an optional child. Children of the optional child are also added * to this builder. * * @param node the optional child * @return {@code this} */ public B optionally(CommandNode<T> node) { then(node); for (var child : node.getChildren()) { then(child); } return getThis(); } }
// SMembers returns array of `values` from set stored at `key`. // Returned bool is set to false when no error occured and `key` does not exist. func (cache *redisCache) SMembers(key Key, values Values) (bool, error) { hash := cache.convertKeyToHash(key) exists, err := cache.client.Exists(hash).Result() if err != nil { log.WithField("key", key).WithError(err).Error("SMembers: failed to check if key exists") return false, err } if !exists { return false, nil } results, err := cache.client.SMembers(hash).Result() if err != nil { return false, err } err = cache.unmarshalValues(results, values) if err != nil { log.WithField("values", values).WithError(err).Error("SMembers: failed to unmarshal value") return false, err } return true, nil }