content
stringlengths
10
4.9M
/******************************************************************************* * * Function avct_l2c_config_cfm_cback * * Description This is the L2CAP config confirm callback function. * * * Returns void * ******************************************************************************/ void avct_l2c_config_cfm_cback(uint16_t lcid, tL2CAP_CFG_INFO* p_cfg) { tAVCT_LCB* p_lcb; p_lcb = avct_lcb_by_lcid(lcid); if (p_lcb != NULL) { AVCT_TRACE_DEBUG("avct_l2c_config_cfm_cback: 0x%x, ch_state: %d, res: %d", lcid, p_lcb->ch_state, p_cfg->result); if (p_lcb->ch_state == AVCT_CH_CFG) { if (p_cfg->result == L2CAP_CFG_OK) { p_lcb->ch_flags |= AVCT_L2C_CFG_CFM_DONE; if (p_lcb->ch_flags & AVCT_L2C_CFG_IND_DONE) { p_lcb->ch_state = AVCT_CH_OPEN; avct_lcb_event(p_lcb, AVCT_LCB_LL_OPEN_EVT, NULL); } } else { AVCT_TRACE_DEBUG( "ERROR avct_l2c_config_cfm_cback L2CA_DisconnectReq %d ", p_lcb->ch_state); p_lcb->ch_result = p_cfg->result; L2CA_DisconnectReq(lcid); } } AVCT_TRACE_DEBUG("ch_state cfc: %d ", p_lcb->ch_state); } }
async def _update_phising_urls(self): urls = await self.client.api.fetch_phising_url_list() self.client.logger.info("Updated phising url list") self.phising_urls = urls
import { IMock, IPlayable } from "../moq"; import { PresetBuilder } from "./preset-builder"; import { ExpectedExpressions } from "../expected-expressions/expected-expressions"; import { MimicsPreset } from "../presets/mimics.preset"; import { ReturnsPreset } from "../presets/returns.preset"; import { ThrowsPreset } from "../presets/throws.preset"; import { CallbacksPreset } from "../presets/callbacks.preset"; import { resolveBuilder } from "../../tests.components/resolve.builder"; import { InjectionToken } from "@angular/core"; import { IPreset } from "../presets/preset"; import { PlayTimes } from "../playables/play-times"; describe("Preset builder", () => { let resolve: ReturnType<typeof resolveBuilder>; const Mock = new InjectionToken<IMock<unknown>>("mock"); const Set = new InjectionToken<(preset: IPreset<unknown>) => void>("set"); const Target = new InjectionToken<ExpectedExpressions<any>>("target"); beforeEach(() => { const mock = <IMock<unknown>>{}; const set = jasmine.createSpy("set"); const target = <ExpectedExpressions<any>>{}; resolve = resolveBuilder([ [Mock, mock], [Set, set], [Target, target], [PresetBuilder, new PresetBuilder(mock, set, target)] ]); }); it("Defines a mimics preset", () => { const origin = {}; const builder = resolve(PresetBuilder); const actual = builder.mimics(origin); const expected = new MimicsPreset(PlayTimes.Always(), resolve(Target), origin); expect(resolve(Set)).toHaveBeenCalledWith(expected); expect(actual).toBe(resolve(Mock)); }); it("Defines a returns preset", () => { const value = "value"; const builder = resolve(PresetBuilder); const actual = builder.returns(value); const expected = new ReturnsPreset(PlayTimes.Always(), resolve(Target), value); expect(resolve(Set)).toHaveBeenCalledWith(expected); expect(actual).toBe(resolve(Mock)); }); it("Defines a throws preset", () => { const exception = new Error(); const builder = resolve(PresetBuilder); const actual = builder.throws(exception); const expected = new ThrowsPreset(PlayTimes.Always(), resolve(Target), exception); expect(resolve(Set)).toHaveBeenCalledWith(expected); expect(actual).toBe(resolve(Mock)); }); it("Defines a callbacks preset", () => { const callback = () => undefined; const builder = resolve(PresetBuilder); const actual = builder.callback(callback); const expected = new CallbacksPreset(PlayTimes.Always(), resolve(Target), callback); expect(resolve(Set)).toHaveBeenCalledWith(expected); expect(actual).toBe(resolve(Mock)); }); it("Sets playable", () => { const playable = <IPlayable>{}; const builder = new PresetBuilder(resolve(Mock), resolve(Set), resolve(Target)); const actual = builder.play(playable); builder.callback(undefined); builder.returns(undefined); builder.mimics(undefined); builder.throws(undefined); expect(resolve(Set)).toHaveBeenCalledWith(new CallbacksPreset(playable, resolve(Target), undefined)); expect(resolve(Set)).toHaveBeenCalledWith(new ReturnsPreset(playable, resolve(Target), undefined)); expect(resolve(Set)).toHaveBeenCalledWith(new MimicsPreset(playable, resolve(Target), undefined)); expect(resolve(Set)).toHaveBeenCalledWith(new ThrowsPreset(playable, resolve(Target), undefined)); expect(actual).toBe(builder); }); });
// AddRouteToMonitoring adds a new route to the config. func (a *API) AddRouteToMonitoring(w http.ResponseWriter, r *http.Request) { var ( t inputRequest decoder = json.NewDecoder(r.Body) ) if err := decoder.Decode(&t); err != nil { panic(err) } requestInstance := request.New(t.URL, t.Headers, t.Params, t.Body, t.Labels) a.config.AddRoute( config.GetNewRouteType( t.Method, t.URL, requestInstance.GetHeadersConfigFormatted(), requestInstance.GetParamsConfigFormatted(), requestInstance.GetBodyConfigFormatted(), t.Labels, ), ) a.reloadConfigURLs <- struct{}{} a.Data = "success" a.send(w, a.marshalled()) }
<filename>examples/simple.c #pragma scop for (int i = 0; i < N; i++) for (int j = 0; j < N; j++) { A[i][j] = 0; } #pragma endscop
/** * If the resource serving servlet context is available and the resource * is available in the context, create a URL to the resource in that context. * If not, create a local URL for the requested resource. */ protected String resolveResourceContextPath(HttpServletRequest request, String resource) { final String resourceContextPath = this.getResourceServerContextPath(); this.logger.debug("Attempting to locate resource serving webapp with context path: {}", resourceContextPath); final ServletContext resourceContext = this.servletContext.getContext(resourceContextPath); if (resourceContext == null || !resourceContextPath.equals(resourceContext.getContextPath())) { this.logger.warn("Could not find resource serving webapp under context path {} ensure the resource server is deployed and cross context dispatching is enable for this web application", resourceContextPath); return request.getContextPath(); } this.logger.debug("Found resource serving webapp at: {}", resourceContextPath); URL url = null; try { url = resourceContext.getResource(resource); } catch (MalformedURLException e) { } if (url == null) { this.logger.debug("Resource serving webapp {} doesn't contain resource {} Falling back to the local resource.", resourceContextPath, resource); return request.getContextPath(); } this.logger.debug("Resource serving webapp {} contains resource {} Using resource server.", resourceContextPath, resource); return resourceContextPath; }
<reponame>epicbytes/next-template<filename>src/components/fields/password/password.tsx import { FC } from "react" import { useField } from "react-final-form" import type { PasswordProps } from "./password.d" import { composeValidators } from "@/lib/form-errors" import cc from "classcat" export const Password: FC<PasswordProps> = ({ type = "password", name, placeholder, readOnly, validate = [], ...props }) => { const { meta: { touched, error }, input, } = useField(name, { validate: composeValidators(validate), subscription: { value: true, touched: true, error: true, }, }) return ( <div className="relative"> {input.value !== "" && ( <label className="absolute left-5 -top-2.5 rounded bg-blue text-white text-xs py-0.5 px-1.5"> {placeholder} </label> )} <input type={type} className={cc([ "border border-gray-input border-b-2 border-b-gray-light rounded-t w-full text-gray-text px-6 resize-none placeholder-gray-light h-input", { "border-error": touched && error }, ])} readOnly={readOnly} {...props} {...input} placeholder={placeholder} /> {touched && error && ( <small className={"text-red text-center w-full block"}> {error} </small> )} </div> ) }
/** Multi delete query pre-check. @param thd Thread handler @param tables Global/local table list @retval FALSE OK @retval TRUE error */ bool multi_delete_precheck(THD *thd, TABLE_LIST *tables) { SELECT_LEX *select_lex= thd->lex->first_select_lex(); TABLE_LIST *aux_tables= thd->lex->auxiliary_table_list.first; TABLE_LIST **save_query_tables_own_last= thd->lex->query_tables_own_last; DBUG_ENTER("multi_delete_precheck"); for (TABLE_LIST *tl= aux_tables; tl; tl= tl->next_global) { if (tl->table) continue; if (tl->correspondent_table) tl->table= tl->correspondent_table->table; } DBUG_ASSERT(aux_tables != 0); if (check_table_access(thd, SELECT_ACL, tables, FALSE, UINT_MAX, FALSE)) DBUG_RETURN(TRUE); thd->lex->query_tables_own_last= 0; if (check_table_access(thd, DELETE_ACL, aux_tables, FALSE, UINT_MAX, FALSE)) { thd->lex->query_tables_own_last= save_query_tables_own_last; DBUG_RETURN(TRUE); } thd->lex->query_tables_own_last= save_query_tables_own_last; if ((thd->variables.option_bits & OPTION_SAFE_UPDATES) && !select_lex->where) { my_message(ER_UPDATE_WITHOUT_KEY_IN_SAFE_MODE, ER_THD(thd, ER_UPDATE_WITHOUT_KEY_IN_SAFE_MODE), MYF(0)); DBUG_RETURN(TRUE); } DBUG_RETURN(FALSE); }
/** * {@link InterfaceCriteria} that tests whether a given network interface and * address satisfy <i>any</i> of a contained set of {@link InterfaceCriteria}. * * @author Brian Stansberry */ public class AnyInterfaceCriteria implements InterfaceCriteria { private static final long serialVersionUID = 3384500068401101329L; private final Set<InterfaceCriteria> criteria = new LinkedHashSet<InterfaceCriteria>(); /** * Creates a new AnyInterfaceCriteria * * @param criteria the criteria to check to see if any are satisfied. * Cannot be <code>null</code> * * @throws IllegalArgumentException if <code>criteria</code> is <code>null</code> */ public AnyInterfaceCriteria(Set<InterfaceCriteria> criteria) { Assert.checkNotNullParam("criteria", criteria); this.criteria.addAll(criteria); } @Override public Map<NetworkInterface, Set<InetAddress>> getAcceptableAddresses(Map<NetworkInterface, Set<InetAddress>> candidates) throws SocketException { final Map<NetworkInterface, Set<InetAddress>> result = new HashMap<NetworkInterface, Set<InetAddress>>(); for (InterfaceCriteria ic : criteria) { final Map<NetworkInterface, Set<InetAddress>> testee = AbstractInterfaceCriteria.cloneCandidates(candidates); final Map<NetworkInterface, Set<InetAddress>> accepted = ic.getAcceptableAddresses(testee); addAccepted(accepted, result); } return result; } @Override public int hashCode() { return criteria.hashCode(); } @Override public boolean equals(Object o) { if (!(o instanceof AnyInterfaceCriteria)) { return false; } return criteria.equals(((AnyInterfaceCriteria)o).criteria); } @Override public int compareTo(InterfaceCriteria o) { if (this.equals(o)) { return 0; } return o instanceof InetAddressMatchInterfaceCriteria ? -1 : 1; } private void addAccepted(Map<NetworkInterface, Set<InetAddress>> accepted, Map<NetworkInterface, Set<InetAddress>> result) { for (Map.Entry<NetworkInterface, Set<InetAddress>> entry : accepted.entrySet()) { Set<InetAddress> addresses = result.get(entry.getKey()); if (addresses == null) { result.put(entry.getKey(), new HashSet<InetAddress>(entry.getValue())); } else { addresses.addAll(entry.getValue()); } } } }
Michigan high students are fighting a state bill that would let employers pay workers under 20 less than the minimum wage. High School Democrats of America has collected 100 signatures against the proposal by the state's Senate Commerce Committee, according to local media reports. "I just want to make sure that people know that this is very much an issue for young people," said Logan Arkema, 17, a senior at Byron Center High School outside Grand Rapids and former chair of the Michigan High School Democrats who now serves as development director for High School Democrats of America. Michigan already allows employers to pay people under 18 either 85 percent of Michigan's minimum wage or the federal minimum wage, whichever is greater. State Sen. Margaret O'Brien, a Republican from Kalamazoo County, is sponsoring Senate Bill 250, which would change the law to include all employees under 20. She said the bill would encourage employers to give jobs to people with little experience. "We want to get their foot in the door. ... If they can get their foot in the door and get the training and experience they need, then their wage will go up to minimum wage or higher," O'Brien said. Michigan's current minimum wage is $8.50 per hour. Under the proposal, employers could pay people under 20 the lower federal minimum wage of $7.25. Michigan's minimum wage increased by 35 cents overnight on Jan. 1. Across the state, 3.8 percent of hourly workers earn at or below the minimum wage. Arkema, who helped start the online petition against the measure, said O'Brien's bill amounts to allowing employers to pay young people less for doing the same tasks as older workers. "I've got friends who are on the far right of the political spectrum and on the far left and they all, virtually, agree that this is not good for them or any high school students," Arkema said. The Senate Commerce Committee approved the bill in June. It could come up for a vote by the state Senate later this year.
<filename>app/src/main/java/org/fundacionparaguaya/adviserplatform/jobs/SyncJob.java<gh_stars>0 package org.fundacionparaguaya.adviserplatform.jobs; import android.support.annotation.NonNull; import android.util.Log; import com.evernote.android.job.Job; import com.evernote.android.job.JobManager; import com.evernote.android.job.JobRequest; import org.fundacionparaguaya.adviserplatform.data.remote.AuthenticationManager; import org.fundacionparaguaya.adviserplatform.data.repositories.SyncManager; import org.fundacionparaguaya.adviserplatform.util.MixpanelHelper; import java.util.concurrent.atomic.AtomicBoolean; import timber.log.Timber; /** * A job to sync the database. */ public class SyncJob extends Job { public static final String TAG = "SyncJob"; public static final long SYNC_INTERVAL_MS = 1800000; //30 mins private SyncManager mSyncManager; private AuthenticationManager mAuthManager; private AtomicBoolean mIsAlive = new AtomicBoolean(); public SyncJob(SyncManager syncManager, AuthenticationManager authManager) { super(); this.mSyncManager = syncManager; this.mAuthManager = authManager; mIsAlive.set(true); } // @Override protected void onCancel() { // mIsAlive.set(false); Timber.d("Cancel requested... (We'll do our best)"); } @Override @NonNull protected Result onRunJob(@NonNull Params params) { MixpanelHelper.SyncEvents.syncStarted(getContext()); final AuthenticationManager.AuthenticationStatus status = mAuthManager.getStatus(); Log.d(TAG, String.format("Authentication Status: %s", status)); if(status != AuthenticationManager.AuthenticationStatus.AUTHENTICATED) { mAuthManager.refreshToken(); } if(status != AuthenticationManager.AuthenticationStatus.AUTHENTICATED) { return Result.RESCHEDULE; } if(params.isExact()) //cancel any scheduled jobs cause we running RIGHT HERE, RIGHT NOW BOI { stopPeriodic(); } Result syncResult; if (mSyncManager.sync(mIsAlive)) { syncResult = Result.SUCCESS; } else syncResult = Result.FAILURE; if(params.isExact()) { schedulePeriodic(); //enough fun, let's get those regularly scheduled jobs back in } MixpanelHelper.SyncEvents.syncEnded(getContext(), syncResult == Result.SUCCESS); Log.d(TAG, "Sync is over"); return syncResult; } public static void sync() { new JobRequest.Builder(TAG) .startNow() .build() .schedule(); } public static void schedulePeriodic() { new JobRequest.Builder(TAG) .setPeriodic(SYNC_INTERVAL_MS, 600000) .setRequiresDeviceIdle(false) .setRequiredNetworkType(JobRequest.NetworkType.CONNECTED) .setRequirementsEnforced(true) .build() .schedule(); } /** * Looks for any existing jobs that have been created (even if they haven't been ran) * @return if there are job(s) that have been created for syncing */ public static boolean isSyncAboutToStart() { boolean inProgress=false; for(JobRequest jobRequest: JobManager.instance().getAllJobRequestsForTag(TAG)) { inProgress|=(jobRequest.isExact() && jobRequest.getStartMs()<500); } return inProgress; } private static void stopPeriodic() { for(JobRequest job: JobManager.instance().getAllJobRequestsForTag(TAG)) { if(job.isPeriodic()) JobManager.instance().cancel(job.getJobId()); } } public static void cancelAll() { JobManager.instance().cancelAllForTag(TAG); } }
<gh_stars>0 from mediapub_extensions.ApiWrappers.Wordpress import Wordpress from mediapub_extensions.ApiWrappers.Snowflake import Snowflake from mediapub_extensions.ApiWrappers.Snowflake import GoogleAnalytics from mediapub_extensions.ApiWrappers.BigQuery import BigQuery from mediapub_extensions.ApiWrappers.SQLServer import SQLServer from mediapub_extensions.ApiWrappers.GoogleAdManager import GoogleAdManager __version__ = '1.0.beta' __author__ = '<NAME> <EMAIL>' __all__ = []
Bringing the brain into the test tube: an experiment illustrating the effect of ethanol on nerve terminal viability Ethanol is primarily responsible for the behavioural effects of acute alcoholic beverage consumption, which involves central nervous system dysfunction. The mechanisms of ethanol action in the nervous system are poorly understood, particularly those related to the neurotoxicity of high acute ethanol consumption. We now describe a simple experiment showing that a concentration of ethanol, which is reached in the plasma after high acute ethanol intake, disrupts rat brain nerve terminals, as measured by the release of lactate dehydrogenase. This cytolytic action of ethanol was further enhanced upon depolarisation of the nerve terminals suggesting that the mechanism of action of ethanol might not be related to modification of lipid bilayer properties. © 2001 IUBMB. Published by Elsevier Science Ltd. All rights reserved.
Chuck E. Cheese's fined for child-labor violations BAY AREA Nine Chuck E. Cheese's pizza parlors in the Bay Area have been fined a total of more than $28,000 for violating federal child-labor laws by having teenagers operate dangerous equipment, authorities said Tuesday. Restaurants in Cupertino, Redwood City, Brentwood, Newark, San Bruno, Fairfield and two in San Jose allowed teenagers to load and operate trash compactors, while the Chuck E. Cheese's in Rohnert Park allowed minors to operate a dough mixer, according to the U.S. Department of Labor's wage and hour division. Minors are not allowed to operate the equipment. "We all want young workers to develop the skills and experience necessary to compete in the marketplace, but safety must never be sacrificed in the process," said Ruben Rosalez, acting administrator in the Western region for the Labor Department's wage and hour division. "Employers have an obligation to ensure minors are not performing tasks that could be harmful, which is why these child labor rules were established." The Labor Department said Chuck E. Cheese's has agreed to comply with the federal regulations and has paid the civil penalties. The company has also instructed underage employees not to operate the machines and applied stickers to the equipment warning that use by minors is prohibited. Brenda Holloway, a spokeswoman for the pizza parlors' parent company, CEC Entertainment Inc. of Irving, Texas, said Chuck E. Cheese's had not known that such stickers were required and had been unaware of certain other rules. "As soon as we were made aware of that, we did correct the deficiencies and paid our fines," Holloway said. "We're walking the straight and narrow now."
/******************************************************************************* FREE.C Author: <NAME> Date: 07-MAY-90 Copyright (c) 1992-5 MusculoGraphics, Inc. All rights reserved. Portions of this source code are copyrighted by MusculoGraphics, Inc. Description: Routines: free_model : frees a model structure free_plot : frees a plot structure free_muscles : frees memory malloced for muscle structure elements free_default_muscle : frees memory malloced for defaultmuscle elements free_menu : frees a menu structure free_form : frees a form structure *******************************************************************************/ #include "universal.h" #include "globals.h" #include "functions.h" #include "normtools.h" /*************** DEFINES (for this file only) *********************************/ /*************** STATIC GLOBAL VARIABLES (for this file only) *****************/ /*************** EXTERNED VARIABLES (declared in another file) ****************/ #if ! ENGINE extern ModelStruct* sMotionModel; #endif /*************** PROTOTYPES for STATIC FUNCTIONS (for this file only) *********/ static void free_segment(SegmentStruct* seg, ModelStruct* ms); static void free_saved_segment(SaveSegments* seg, ModelStruct* ms); void free_model(int mod) { if (gModel[mod] != NULL) { #if ! ENGINE if (sMotionModel == gModel[mod]) sMotionModel = NULL; #endif freeModelStruct(gModel[mod]); gModel[mod] = NULL; } } void freeModelStruct(ModelStruct* ms) { int i, j, k; FREE_IFNOTNULL(ms->name); if (ms->pathptrs != NULL) { for (i=0; i<ms->numsegments*ms->numsegments; i++) FREE_IFNOTNULL(ms->pathptrs[i]); free(ms->pathptrs); } FREE_IFNOTNULL(ms->jointfilename); FREE_IFNOTNULL(ms->musclefilename); FREE_IFNOTNULL(ms->bonepathname); FREE_IFNOTNULL(ms->mocap_dir); for (i=0; i<ms->num_motion_files; i++) FREE_IFNOTNULL(ms->motionfilename[i]); for (i=0; i<ms->numgroups; i++) { FREE_IFNOTNULL(ms->muscgroup[i].name); FREE_IFNOTNULL(ms->muscgroup[i].muscle_index); free_menu(&ms->muscgroup[i].menu); } FREE_IFNOTNULL(ms->muscgroup); for (i=0; i<ms->save.numsavedmuscgroups; i++) { FREE_IFNOTNULL(ms->save.muscgroup[i].name); FREE_IFNOTNULL(ms->save.muscgroup[i].muscle_index); } FREE_IFNOTNULL(ms->save.muscgroup); free_form(&ms->gencform); free_form(&ms->dynparamsform); for (i = 0; i < ms->gc_chpanel.numoptions; i++) FREE_IFNOTNULL(ms->gc_chpanel.checkbox[i].name); FREE_IFNOTNULL(ms->gc_chpanel.checkbox); //FREE_IFNOTNULL(ms->gc_chpanel.title); title uses static char for (i = 0; i < ms->gc_lockPanel.numoptions; i++) FREE_IFNOTNULL(ms->gc_lockPanel.checkbox[i].name); FREE_IFNOTNULL(ms->gc_lockPanel.checkbox); //FREE_IFNOTNULL(ms->gc_lockPanel.title); title uses static char for (i = 0; i < ms->numseggroups; i++) { FREE_IFNOTNULL(ms->seggroup[i].name); FREE_IFNOTNULL(ms->seggroup[i].segment); } FREE_IFNOTNULL(ms->seggroup); for (i=0; i<ms->numjoints; i++) { FREE_IFNOTNULL(ms->joint[i].name); FREE_IFNOTNULL(ms->joint[i].solverType); FREE_IFNOTNULL(ms->joint[i].in_seg_ground_path); #if INCLUDE_MOCAP_MODULE FREE_IFNOTNULL(ms->joint[i].mocap_segment); #endif } FREE_IFNOTNULL(ms->joint); for (i=0; i<ms->save.numsavedjnts; i++) FREE_IFNOTNULL(ms->save.joint[i].name); FREE_IFNOTNULL(ms->save.joint); for (i=0; i<ms->numsegments; i++) free_segment(&ms->segment[i], ms); FREE_IFNOTNULL(ms->segment); for (i=0; i<ms->save.numsavedsegments; i++) free_saved_segment(&ms->save.segment[i], ms); FREE_IFNOTNULL(ms->save.segment); for (i=0; i<ms->save.num_markers; i++) FREE_IFNOTNULL(ms->save.marker[i].name); FREE_IFNOTNULL(ms->save.marker); for (i=0; i<ms->num_wrap_objects; i++) { FREE_IFNOTNULL(ms->wrapobj[i]->name); FREE_IFNOTNULL(ms->wrapobj[i]); } FREE_IFNOTNULL(ms->wrapobj); for (i=0; i<ms->save.num_wrap_objects; i++) FREE_IFNOTNULL(ms->save.wrap_object[i].name); FREE_IFNOTNULL(ms->save.wrap_object); for (i=0; i<ms->numgencoords; i++) { if (ms->gencoord[i]->defined == yes) { FREE_IFNOTNULL(ms->gencoord[i]->name); FREE_IFNOTNULL(ms->gencoord[i]->jointnum); #if INCLUDE_MOCAP_MODULE FREE_IFNOTNULL(ms->gencoord[i]->mocap_segment); #endif FREE_IFNOTNULL(ms->gencoord[i]->group); } } FREE_IFNOTNULL(ms->gencoord); FREE_IFNOTNULL(ms->save.gencoord); for (i = 0; i < 2*GENBUFFER; i++) FREE_IFNOTNULL(ms->genc_help[i].text); for (i = 0; i < ms->num_deformities; i++) { if (ms->deformity[i].deform_name) { for (j = 0; j < ms->deformity[i].num_deforms; j++) FREE_IFNOTNULL(ms->deformity[i].deform_name[j]); FREE_IFNOTNULL(ms->deformity[i].deform_name); } FREE_IFNOTNULL(ms->deformity[i].deform); } FREE_IFNOTNULL(ms->deformity); for (i = 0; i < ms->numligaments; i++) { FREE_IFNOTNULL(ms->ligament[i].name); for (j = 0; j < ms->ligament[i].numlines; j++) { FREE_IFNOTNULL(ms->ligament[i].line[j].mp_orig); FREE_IFNOTNULL(ms->ligament[i].line[j].mp); } FREE_IFNOTNULL(ms->ligament[i].line); } FREE_IFNOTNULL(ms->ligament); free_muscles(ms); free_default_muscle(ms->default_muscle); for (i=0; i<ms->func_array_size; i++) free_function(ms->function[i], yes); FREE_IFNOTNULL(ms->function); if (ms->save.function) { for (i=0; i<ms->func_array_size; i++) free_function(ms->save.function[i], yes); FREE_IFNOTNULL(ms->save.function); } #if ! ENGINE for (i = 0; i < ms->num_motion_objects; i++) free_motion_object(&ms->motion_objects[i], ms); FREE_IFNOTNULL(ms->motion_objects); #endif FREE_IFNOTNULL(ms->save.muscwrap_associations); for (i = 0; i < ms->numworldobjects; i++) { FREE_IFNOTNULL(ms->worldobj[i].name); FREE_IFNOTNULL(ms->worldobj[i].filename); if (ms->worldobj[i].wobj) free_polyhedron(ms->worldobj[i].wobj, yes, ms); } FREE_IFNOTNULL(ms->worldobj); for (i = 0; i < ms->save.num_deforms; i++) { FREE_IFNOTNULL(ms->save.deform[i].name); FREE_IFNOTNULL(ms->save.deform[i].innerBox); FREE_IFNOTNULL(ms->save.deform[i].innerBoxUndeformed); FREE_IFNOTNULL(ms->save.deform[i].outerBox); FREE_IFNOTNULL(ms->save.deform[i].outerBoxUndeformed); } FREE_IFNOTNULL(ms->save.deform); for (i = 0; i < ms->num_constraint_objects; i++) { FREE_IFNOTNULL(ms->constraintobj[i].name); FREE_IFNOTNULL(ms->constraintobj[i].joints); FREE_IFNOTNULL(ms->constraintobj[i].qs); for (j = 0; j < ms->constraintobj[i].numPoints; j++) FREE_IFNOTNULL(ms->constraintobj[i].points[j].name); FREE_IFNOTNULL(ms->constraintobj[i].points); } FREE_IFNOTNULL(ms->constraintobj); for (i = 0; i < ms->save.num_constraint_objects; i++) { FREE_IFNOTNULL(ms->save.constraintobj[i].name); FREE_IFNOTNULL(ms->save.constraintobj[i].joints); FREE_IFNOTNULL(ms->save.constraintobj[i].qs); for (j = 0; j < ms->save.constraintobj[i].numPoints; j++) FREE_IFNOTNULL(ms->save.constraintobj[i].points[j].name); FREE_IFNOTNULL(ms->save.constraintobj[i].points); } FREE_IFNOTNULL(ms->save.constraintobj); for (i = 0; i < ms->save.num_conspt_associations; i++) { for (j = 0; j < ms->save.conspt_associations[i].numPoints; j++) FREE_IFNOTNULL(ms->save.conspt_associations[i].savedPoints[j].name); FREE_IFNOTNULL(ms->save.conspt_associations[i].savedPoints); } FREE_IFNOTNULL(ms->save.conspt_associations); FREE_IFNOTNULL(ms->segment_drawing_order); for (i = 0; i < MAXSAVEDVIEWS; i++) FREE_IFNOTNULL(ms->dis.view_name[i]); for (i = 0; i < ms->numgencgroups; i++) { FREE_IFNOTNULL(ms->gencgroup[i].name); FREE_IFNOTNULL(ms->gencgroup[i].gencoord); } FREE_IFNOTNULL(ms->gencgroup); FREE_IFNOTNULL(ms->gencslider.sl); FREE_IFNOTNULL(ms->dis.devs); FREE_IFNOTNULL(ms->dis.dev_values); FREE_IFNOTNULL(ms->dis.muscleson); FREE_IFNOTNULL(ms->forceUnits); FREE_IFNOTNULL(ms->lengthUnits); // The motions are deleted by delete_model() so that the appropriate // events can be generated, so all that remains here is the array of // motion structure pointers. FREE_IFNOTNULL(ms->motion); FREE_IFNOTNULL(ms); } static void free_segment(SegmentStruct* seg, ModelStruct* ms) { int j; if (seg->defined == no) return; FREE_IFNOTNULL(seg->name); for (j=0; j<seg->numBones; j++) free_polyhedron(&seg->bone[j], no, ms); FREE_IFNOTNULL(seg->bone); for (j=0; j<seg->numSpringPoints; j++) { FREE_IFNOTNULL(seg->springPoint[j].name); } FREE_IFNOTNULL(seg->springPoint); FREE_IFNOTNULL(seg->group); if (seg->springFloor) { FREE_IFNOTNULL(seg->springFloor->name); FREE_IFNOTNULL(seg->springFloor->filename); free_polyhedron(seg->springFloor->poly, yes, ms); FREE_IFNOTNULL(seg->springFloor->points); FREE_IFNOTNULL(seg->springFloor); } for (j=0; j<seg->numContactObjects; j++) { FREE_IFNOTNULL(seg->contactObject[j].name); FREE_IFNOTNULL(seg->contactObject[j].filename); free_polyhedron(seg->contactObject[j].poly, yes, ms); } FREE_IFNOTNULL(seg->contactObject); if (seg->forceMatte) { FREE_IFNOTNULL(seg->forceMatte->name); FREE_IFNOTNULL(seg->forceMatte->filename); free_polyhedron(seg->forceMatte->poly, yes, ms); FREE_IFNOTNULL(seg->forceMatte); } for (j=0; j<seg->numMarkers; j++) { FREE_IFNOTNULL(seg->marker[j]->name); FREE_IFNOTNULL(seg->marker[j]); } FREE_IFNOTNULL(seg->marker); for (j=0; j<seg->num_deforms; j++) { FREE_IFNOTNULL(seg->deform[j].name); FREE_IFNOTNULL(seg->deform[j].innerBox); FREE_IFNOTNULL(seg->deform[j].innerBoxUndeformed); FREE_IFNOTNULL(seg->deform[j].outerBox); FREE_IFNOTNULL(seg->deform[j].outerBoxUndeformed); } FREE_IFNOTNULL(seg->deform); #if INCLUDE_MOCAP_MODULE FREE_IFNOTNULL(seg->gait_scale_segment); FREE_IFNOTNULL(seg->mocap_segment); FREE_IFNOTNULL(seg->mocap_scale_chain_end1); FREE_IFNOTNULL(seg->mocap_scale_chain_end2); #endif } static void free_saved_segment(SaveSegments* seg, ModelStruct* ms) { int j; FREE_IFNOTNULL(seg->name); for (j=0; j<seg->numSpringPoints; j++) { FREE_IFNOTNULL(seg->springPoint[j].name); } FREE_IFNOTNULL(seg->springPoint); if (seg->springFloor) { FREE_IFNOTNULL(seg->springFloor->name); FREE_IFNOTNULL(seg->springFloor->filename); free_polyhedron(seg->springFloor->poly, yes, ms); FREE_IFNOTNULL(seg->springFloor->points); FREE_IFNOTNULL(seg->springFloor); } for (j=0; j<seg->numContactObjects; j++) { FREE_IFNOTNULL(seg->contactObject[j].name); FREE_IFNOTNULL(seg->contactObject[j].filename); free_polyhedron(seg->contactObject[j].poly, yes, ms); } FREE_IFNOTNULL(seg->contactObject); if (seg->forceMatte) { FREE_IFNOTNULL(seg->forceMatte->name); FREE_IFNOTNULL(seg->forceMatte->filename); free_polyhedron(seg->forceMatte->poly, yes, ms); FREE_IFNOTNULL(seg->forceMatte); } } #if ! ENGINE void free_plot(int plotnum) { int i, j; FREE_IFNOTNULL(gPlot[plotnum]->title); FREE_IFNOTNULL(gPlot[plotnum]->xname); /* JPL 11/2/00 TODO: for some reason, freeing the yname is causing * a crash, so remove it for now. */ /* FREE_IFNOTNULL(gPlot[plotnum]->yname);*/ for (i=0; i<gPlot[plotnum]->numcurves; i++) { FREE_IFNOTNULL(gPlot[plotnum]->curve[i]->xvalues); FREE_IFNOTNULL(gPlot[plotnum]->curve[i]->yvalues); FREE_IFNOTNULL(gPlot[plotnum]->curve[i]->name); if (gPlot[plotnum]->curve[i]->num_events > 0) { for (j=0; j<gPlot[plotnum]->curve[i]->num_events; j++) FREE_IFNOTNULL(gPlot[plotnum]->curve[i]->event[j].name); FREE_IFNOTNULL(gPlot[plotnum]->curve[i]->event); } FREE_IFNOTNULL(gPlot[plotnum]->curve[i]); } if (gPlot[plotnum]->num_file_events > 0) { for (j=0; j<gPlot[plotnum]->num_file_events; j++) FREE_IFNOTNULL(gPlot[plotnum]->file_event[j].name); FREE_IFNOTNULL(gPlot[plotnum]->file_event); } FREE_IFNOTNULL(gPlot[plotnum]); gPlot[plotnum] = NULL; } #endif void free_muscle(dpMuscleStruct *muscle, dpMuscleStruct* dm) { int i; if (muscle == NULL) return; if (muscle->name != dm->name) FREE_IFNOTNULL(muscle->name); if (muscle->path) { FREE_IFNOTNULL(muscle->path->mp_orig); FREE_IFNOTNULL(muscle->path->mp); FREE_IFNOTNULL(muscle->path); } if (muscle->group != dm->group) FREE_IFNOTNULL(muscle->group); if (muscle->max_isometric_force != dm->max_isometric_force) FREE_IFNOTNULL(muscle->max_isometric_force); if (muscle->pennation_angle != dm->pennation_angle) FREE_IFNOTNULL(muscle->pennation_angle); if (muscle->min_thickness != dm->min_thickness) FREE_IFNOTNULL(muscle->min_thickness); if (muscle->max_thickness != dm->max_thickness) FREE_IFNOTNULL(muscle->max_thickness); if (muscle->min_material != dm->min_material) FREE_IFNOTNULL(muscle->min_material); if (muscle->max_material != dm->max_material) FREE_IFNOTNULL(muscle->max_material); if (muscle->max_contraction_vel != dm->max_contraction_vel) FREE_IFNOTNULL(muscle->max_contraction_vel); if (muscle->optimal_fiber_length != dm->optimal_fiber_length) FREE_IFNOTNULL(muscle->optimal_fiber_length); if (muscle->resting_tendon_length != dm->resting_tendon_length) FREE_IFNOTNULL(muscle->resting_tendon_length); if (muscle->momentarms != dm->momentarms) FREE_IFNOTNULL(muscle->momentarms); if (muscle->active_force_len_func != dm->active_force_len_func) FREE_IFNOTNULL(muscle->active_force_len_func); if (muscle->passive_force_len_func != dm->passive_force_len_func) FREE_IFNOTNULL(muscle->passive_force_len_func); if (muscle->tendon_force_len_func != dm->tendon_force_len_func) FREE_IFNOTNULL(muscle->tendon_force_len_func); if (muscle->force_vel_func != dm->force_vel_func) FREE_IFNOTNULL(muscle->force_vel_func); if (muscle->excitation_func != dm->excitation_func) FREE_IFNOTNULL(muscle->excitation_func); if (muscle->wrapStruct) { for (i = 0; i < muscle->numWrapStructs; i++) { FREE_IFNOTNULL(muscle->wrapStruct[i]->mp_wrap[0].wrap_pts); FREE_IFNOTNULL(muscle->wrapStruct[i]->mp_wrap[1].wrap_pts); FREE_IFNOTNULL(muscle->wrapStruct[i]); } FREE_IFNOTNULL(muscle->wrapStruct); } if (muscle->muscle_model_index != dm->muscle_model_index) FREE_IFNOTNULL(muscle->muscle_model_index); if (muscle->dynamic_params) { for (i = 0; i < muscle->num_dynamic_params; i++) { if (muscle->dynamic_params[i] != dm->dynamic_params[i]) FREE_IFNOTNULL(muscle->dynamic_params[i]); } FREE_IFNOTNULL(muscle->dynamic_params); } } void free_muscles(ModelStruct* model) { int i; if (model == NULL) return; for (i=0; i<model->nummuscles; i++) { free_muscle(model->muscle[i], model->default_muscle); FREE_IFNOTNULL(model->muscle[i]); } FREE_IFNOTNULL(model->muscle); } /* FREE_DEFMUSC: */ void free_default_muscle(dpMuscleStruct* dm) { int i; if (dm == NULL) return; FREE_IFNOTNULL(dm->name); FREE_IFNOTNULL(dm->group); FREE_IFNOTNULL(dm->max_isometric_force); FREE_IFNOTNULL(dm->pennation_angle); FREE_IFNOTNULL(dm->min_thickness); FREE_IFNOTNULL(dm->max_thickness); FREE_IFNOTNULL(dm->min_material); FREE_IFNOTNULL(dm->max_material); FREE_IFNOTNULL(dm->muscle_model_index); FREE_IFNOTNULL(dm->max_contraction_vel); FREE_IFNOTNULL(dm->optimal_fiber_length); FREE_IFNOTNULL(dm->resting_tendon_length); FREE_IFNOTNULL(dm->momentarms); FREE_IFNOTNULL(dm->tendon_force_len_func); FREE_IFNOTNULL(dm->active_force_len_func); FREE_IFNOTNULL(dm->passive_force_len_func); FREE_IFNOTNULL(dm->force_vel_func); FREE_IFNOTNULL(dm->excitation_func); for (i = 0; i < dm->num_dynamic_params; i++) FREE_IFNOTNULL(dm->dynamic_params[i]); FREE_IFNOTNULL(dm->dynamic_params); for (i = 0; i < dm->num_dynamic_params; i++) FREE_IFNOTNULL(dm->dynamic_param_names[i]); FREE_IFNOTNULL(dm->dynamic_param_names); } void free_menu(Menu* mn) { int i; for (i=0; i<mn->numoptions; i++) FREE_IFNOTNULL(mn->option[i].name); FREE_IFNOTNULL(mn->title); FREE_IFNOTNULL(mn->option); } void free_form(Form* frm) { int i; for (i=0; i<frm->numoptions; i++) FREE_IFNOTNULL(frm->option[i].name); FREE_IFNOTNULL(frm->title); FREE_IFNOTNULL(frm->option); } void free_checkbox_panel(CheckBoxPanel* panel) { int i; for (i=0; i<panel->numoptions; i++) FREE_IFNOTNULL(panel->checkbox[i].name); FREE_IFNOTNULL(panel->title); FREE_IFNOTNULL(panel->checkbox); } /* ------------------------------------------------------------------------- free_motion_object - ---------------------------------------------------------------------------- */ public void free_motion_object(MotionObject* mo, ModelStruct* ms) { if (mo) { FREE_IFNOTNULL(mo->name); FREE_IFNOTNULL(mo->filename); FREE_IFNOTNULL(mo->materialname); free_polyhedron(&mo->shape, no, ms); } } /* ------------------------------------------------------------------------- free_motion_object_instance - ---------------------------------------------------------------------------- */ public void free_motion_object_instance(MotionObjectInstance* mi, ModelStruct* model) { if (mi) { FREE_IFNOTNULL(mi->name); mi->num_channels = 0; #if ! ENGINE if (mi->currentMaterial.normal_list) glDeleteLists(mi->currentMaterial.normal_list, 1); if (mi->currentMaterial.highlighted_list) glDeleteLists(mi->currentMaterial.highlighted_list, 1); delete_display_list(mi->aux_display_obj, model); #endif FREE_IFNOTNULL(mi->channels); } } #if ! ENGINE void delete_display_list(GLuint display_list, ModelStruct* model) { if (display_list) { if (model) { // TODO_SCENE: the model for this display list may be in more than one scene // (window). To delete the display list, you have to glutSetWindow to the one // that was current when the display list was created. For now, assume that // this is the first scene that contains the model. int savedWindow = glutGetWindow(); Scene* scene = get_first_scene_containing_model(model); if (scene) { glutSetWindow(scene->window_glut_id); glDeleteLists(display_list, 1); } glutSetWindow(savedWindow); } else { glDeleteLists(display_list, 1); } } } void delete_polyhedron_display_list(PolyhedronStruct* ph, ModelStruct* model) { if (ph && ph->gl_display) { if (model) { // TODO_SCENE: the polyhedron has only one display list, but the model // may be in more than one scene (window). To delete the display list, // you have to glutSetWindow to the one that was current when the display // list was created. For now, assume that this is the first scene that // contains the model. int savedWindow = glutGetWindow(); Scene* scene = get_first_scene_containing_model(model); if (scene) { glutSetWindow(scene->window_glut_id); glDeleteLists(ph->gl_display, 1); ph->gl_display = 0; } glutSetWindow(savedWindow); } else { glDeleteLists(ph->gl_display, 1); ph->gl_display = 0; } } } void delete_segment_display_lists(SegmentStruct* seg, ModelStruct* model) { if (seg && model) { // TODO_SCENE: the segment's polyhedra have only one display list each, // but the model may be in more than one scene (window). To delete the display // lists, you have to glutSetWindow to the one that was current when the display // lists were created. For now, assume that this is the first scene that // contains the model. int i, savedWindow = glutGetWindow(); Scene* scene = get_first_scene_containing_model(model); if (scene) { glutSetWindow(scene->window_glut_id); for (i=0; i<seg->numBones; i++) { glDeleteLists(seg->bone[i].gl_display, 1); seg->bone[i].gl_display = 0; } } glutSetWindow(savedWindow); } } #endif
<reponame>mikedig/cdb-productivity-api #include "cdb_tile/Tile.h" #include <algorithm> #include <iostream> #include <iomanip> #include <sstream> #include <math.h> namespace cognitics { namespace cdb { std::string get_uref_subdir(uint32_t uref) { std::stringstream ss; ss << "U" << uref; return ss.str(); } std::string getLongitudeString(int lon) { std::stringstream ss; if (lon < 0) ss << "W" << std::setw(3) << std::setfill('0') << abs(lon); else ss << "E" << std::setw(3) << std::setfill('0') << abs(lon); return ss.str(); } std::string getLatitudeString(int lat) { std::stringstream ss; if (lat < 0) ss << "S" << std::setw(2) << std::setfill('0') << abs(lat); else ss << "N" << std::setw(2) << std::setfill('0') << abs(lat); return ss.str(); } //////////////////////////////////////////////////////////////////////////////// std::string Tile::getFilename(void) const { int32_t isouth = static_cast<int32_t>(floor(coordinates.low().latitude().value())); int32_t iwest = static_cast<int32_t>(floor(coordinates.low().longitude().value())); // LatLon_Dnnn_Snnn_Tnnn_LOD_Un_Rn.xxx (p117) std::stringstream ss; ss << "Tiles"; ss << "/" << getLatitudeString(isouth); ss << "/" << getLongitudeString(iwest); ss << "/"; ss << std::setw(3) << std::setfill('0') << int(dataset.code()); ss << "_" << dataset.name(); if (lod < 0) ss << "/LC"; else ss << "/L" << std::setw(2) << std::setfill('0') << int(lod); ss << "/U" << uref; //Filename part ss << "/" << getLatitudeString(isouth); ss << getLongitudeString(iwest); ss << "_D" << std::setw(3) << std::setfill('0') << int(dataset.code()); ss << "_S" << std::setw(3) << std::setfill('0') << cs1; ss << "_T" << std::setw(3) << std::setfill('0') << cs2; if (lod < 0) ss << "_LC"; else ss << "_L" << std::setw(2) << std::setfill('0') << int(lod); ss << "_U" << uref; ss << "_R" << rref; ss << ".jp2"; return ss.str(); } Tile::Tile(CoordinatesRange _coordinates, Dataset _ds, int _lod, uint32_t _uref, uint32_t _rref, uint32_t _cs1, uint32_t _cs2) : coordinates(_coordinates), dataset(_ds), lod(_lod), uref(_uref), rref(_rref), cs1(_cs1), cs2(_cs2) { } std::string Tile::Path() const { int32_t isouth = static_cast<int32_t>(floor(coordinates.low().latitude().value())); int32_t iwest = static_cast<int32_t>(floor(coordinates.low().longitude().value())); // LatLon_Dnnn_Snnn_Tnnn_LOD_Un_Rn.xxx (p117) std::stringstream ss; ss << "Tiles"; ss << "/" << getLatitudeString(isouth); ss << "/" << getLongitudeString(iwest); ss << "/"; ss << std::setw(3) << std::setfill('0') << int(dataset.code()); ss << "_" << dataset.name(); if (lod < 0) ss << "/LC"; else ss << "/L" << std::setw(2) << std::setfill('0') << int(lod); ss << "/U" << uref; return ss.str(); } std::string Tile::Filename() const { int32_t isouth = static_cast<int32_t>(floor(coordinates.low().latitude().value())); int32_t iwest = static_cast<int32_t>(floor(coordinates.low().longitude().value())); std::stringstream ss; ss << getLatitudeString(isouth); ss << getLongitudeString(iwest); ss << "_D" << std::setw(3) << std::setfill('0') << int(dataset.code()); ss << "_S" << std::setw(3) << std::setfill('0') << cs1; ss << "_T" << std::setw(3) << std::setfill('0') << cs2; if (lod < 0) ss << "_LC"; else ss << "_L" << std::setw(2) << std::setfill('0') << int(lod); ss << "_U" << uref; ss << "_R" << rref; return ss.str(); } } }
import { getLoggerFor } from '../../logging/LogUtil'; import { InternalServerError } from '../errors/InternalServerError'; import type { AsyncHandler } from './AsyncHandler'; import { findHandler } from './HandlerUtil'; /** * A composite handler that tries multiple handlers one by one * until it finds a handler that supports the input. * The handlers will be checked in the order they appear in the input array, * allowing for more fine-grained handlers to check before catch-all handlers. */ export class WaterfallHandler<TIn, TOut> implements AsyncHandler<TIn, TOut> { protected readonly logger = getLoggerFor(this); private readonly handlers: AsyncHandler<TIn, TOut>[]; /** * Creates a new WaterfallHandler that stores the given handlers. * @param handlers - Handlers over which it will run. */ public constructor(handlers: AsyncHandler<TIn, TOut>[]) { this.handlers = handlers; } /** * Checks if any of the stored handlers can handle the given input. * @param input - The data that would need to be handled. * * @returns A promise resolving if at least 1 handler supports to input, or rejecting if none do. */ public async canHandle(input: TIn): Promise<void> { await findHandler(this.handlers, input); } /** * Finds a handler that supports the given input and then lets it handle the given data. * @param input - The data that needs to be handled. * * @returns A promise corresponding to the handle call of a handler that supports the input. * It rejects if no handlers support the given data. */ public async handle(input: TIn): Promise<TOut> { let handler: AsyncHandler<TIn, TOut>; try { handler = await findHandler(this.handlers, input); } catch (error: unknown) { this.logger.warn('All handlers failed. This might be the consequence of calling handle before canHandle.'); throw new InternalServerError('All handlers failed', { cause: error }); } return handler.handle(input); } /** * Identical to {@link AsyncHandler.handleSafe} but optimized for composite * by only needing 1 canHandle call on members. * @param input - The input data. * * @returns A promise corresponding to the handle call of a handler that supports the input. * It rejects if no handlers support the given data. */ public async handleSafe(input: TIn): Promise<TOut> { const handler = await findHandler(this.handlers, input); return handler.handle(input); } }
/** * Tries to transform the given object to match the given target type. * <p> * The class is only transformed if it implements the {@link Transformable} interface. * <p> * Transformations are done recursively until a matching transformation happens or the class tried * to transform does not implement the {@link Transformable} interface. * * @param source the object to transform * @param target the target type * @param <T> the generic type of the target * @return a transformed object matching the given type or <tt>null</tt> to indicate that no conversion was possible */ public <T> T make(Object source, Class<T> target) { Class<?> classToTransform = source.getClass(); while (Transformable.class.isAssignableFrom(classToTransform)) { T result = makeWithClass(source, classToTransform, target); if (result != null) { return result; } classToTransform = classToTransform.getSuperclass(); } return null; }
#include "Window.h" #include "glm\glm.hpp" #include "glm\gtc\matrix_transform.hpp" #include "glm\gtc\matrix_access.hpp" #include "ResourcesLoader.h" #include "Random.h" #include "Quad.h" #include "Shader.h" #include "Camera.h" #include "Cube.h" #include "Sphere.h" #include "Utils.h" #include "TextRenderer.h" #include "Plane.h" #include "Model.h" #include "TimeOfDayManager.h" #include "imgui\imgui.h" #include "imgui\imgui_impl_glfw.h" #include "imgui\imgui_impl_opengl3.h" #include <fstream> #include <iostream> float waterHeight = 0.0f; Plane waterTopPlane, waterBottomPlane; void IntersectFrustumEdgeWaterPlane(const glm::vec3 &start, const glm::vec3 &end, std::vector<glm::vec3>& points) { glm::vec3 delta = end - start; glm::vec3 dir = glm::normalize(delta); float length = glm::length(delta); float distance = 0.0f; if (waterTopPlane.IntersectRay(start, dir, distance)) { if (distance <= length) { glm::vec3 hitPos = start + dir * distance; points.push_back(glm::vec3(hitPos.x, waterHeight, hitPos.z)); } } if (waterBottomPlane.IntersectRay(start, dir, distance)) { if (distance <= length) { glm::vec3 hitPos = start + dir * distance; points.push_back(glm::vec3(hitPos.x, waterHeight, hitPos.z)); } } } void IntersectFrustumWaterPlane(const Frustum &frustum, std::vector<glm::vec3>& points) { glm::vec3 corners[8]; corners[0] = frustum.nbl; corners[1] = frustum.ntl; corners[2] = frustum.ntr; corners[3] = frustum.nbr; corners[4] = frustum.fbl; corners[5] = frustum.ftl; corners[6] = frustum.ftr; corners[7] = frustum.fbr; IntersectFrustumEdgeWaterPlane(corners[0], corners[1], points); IntersectFrustumEdgeWaterPlane(corners[1], corners[2], points); IntersectFrustumEdgeWaterPlane(corners[2], corners[3], points); IntersectFrustumEdgeWaterPlane(corners[3], corners[0], points); IntersectFrustumEdgeWaterPlane(corners[4], corners[5], points); IntersectFrustumEdgeWaterPlane(corners[5], corners[6], points); IntersectFrustumEdgeWaterPlane(corners[6], corners[7], points); IntersectFrustumEdgeWaterPlane(corners[7], corners[4], points); IntersectFrustumEdgeWaterPlane(corners[0], corners[4], points); IntersectFrustumEdgeWaterPlane(corners[1], corners[5], points); IntersectFrustumEdgeWaterPlane(corners[2], corners[6], points); IntersectFrustumEdgeWaterPlane(corners[3], corners[7], points); } bool SegmentPlaneIntersection(const glm::vec3 &a, const glm::vec3 &b, const glm::vec3 &n, float d, glm::vec3 &q) { glm::vec3 ab = b - a; float t = (d - glm::dot(n, a)) / glm::dot(n, ab); if (t > -0.0f && t <= 1.0f) { q = a + t * ab; return true; } return false; } int main() { unsigned int width = 1280; unsigned int height = 720; Window window; window.Init(width, height); TextRenderer textRenderer; textRenderer.Init("Data/Fonts/arial_sdf.fnt", "Data/Fonts/arial_sdf.png"); textRenderer.Resize(width, height); Camera camera; camera.SetProjectionMatrix(75.0f, (float)width / height, 0.2f, 1000.0f); camera.SetPos(glm::vec3(0.0f)); TimeOfDayManager tod; tod.Init(); Shader shader; shader.Load("Data/Shaders/ProjGridWater/cube.vert", "Data/Shaders/ProjGridWater/cube.frag"); Shader waterShader; waterShader.Load("Data/Shaders/ProjGridWater/water.vert", "Data/Shaders/ProjGridWater/water.frag"); Shader skydomeShader; skydomeShader.Load("Data/Shaders/ProjGridWater/skydome.vert", "Data/Shaders/ProjGridWater/skydome.frag"); Sphere skydome; skydome.Load(); Model dragon; dragon.Load("Data/Models/dragon.obj"); Model platform; platform.Load("Data/Models/platform.obj"); GLuint inscatterTexture = utils::Load3DTexture("Data/Textures/inscatter.raw", 256, 128, 32, 4, GL_FLOAT); GLuint transmittanceTexture = utils::LoadRAW2DTexture("Data/Textures/transmittance.raw", 256, 64, 3, GL_FLOAT); GLuint normalMap = utils::LoadTexture("Data/Textures/oceanwaves_ddn2.png", false); const unsigned int resolution = 128; std::vector<glm::vec2> vertices(resolution * resolution); std::vector<unsigned short> indices((resolution - 1) * (resolution - 1) * 6); for (size_t x = 0; x < resolution; x++) { for (size_t z = 0; z < resolution; z++) { vertices[x * resolution + z] = glm::vec2((float)x / (resolution - 1), (float)z / (resolution - 1)); } } int index = 0; for (size_t x = 0; x < resolution - 1; x++) { for (size_t z = 0; z < resolution - 1; z++) { indices[index++] = (x + 0) * resolution + (z + 0); indices[index++] = (x + 0) * resolution + (z + 1); indices[index++] = (x + 1) * resolution + (z + 1); indices[index++] = (x + 0) * resolution + (z + 0); indices[index++] = (x + 1) * resolution + (z + 1); indices[index++] = (x + 1) * resolution + (z + 0); } } GLuint vao, vbo, ibo; glGenVertexArrays(1, &vao); glGenBuffers(1, &vbo); glGenBuffers(1, &ibo); glBindVertexArray(vao); glBindBuffer(GL_ARRAY_BUFFER, vbo); glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(glm::vec2), vertices.data(), GL_STATIC_DRAW); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo); glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned short), indices.data(), GL_STATIC_DRAW); glEnableVertexAttribArray(0); glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), nullptr); glBindVertexArray(0); GLuint reflectionFBO; glGenFramebuffers(1, &reflectionFBO); glBindFramebuffer(GL_FRAMEBUFFER, reflectionFBO); GLuint reflectionTexture; glCreateTextures(GL_TEXTURE_2D, 1, &reflectionTexture); glTextureParameteri(reflectionTexture, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTextureParameteri(reflectionTexture, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTextureParameteri(reflectionTexture, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTextureParameteri(reflectionTexture, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTextureStorage2D(reflectionTexture, 1, GL_RGBA8, width / 2, height / 2); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, reflectionTexture, 0); GLuint rbo; glGenRenderbuffers(1, &rbo); glNamedRenderbufferStorage(rbo, GL_DEPTH_COMPONENT24, width / 2, height / 2); glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rbo); GLenum fboStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER); if (fboStatus != GL_FRAMEBUFFER_COMPLETE) { std::cout << "Framebuffer not complete: " << fboStatus << "\n"; return -1; } glBindFramebuffer(GL_FRAMEBUFFER, 0); GLuint refractionFBO; glCreateFramebuffers(1, &refractionFBO); GLuint refractionTexture; glCreateTextures(GL_TEXTURE_2D, 1, &refractionTexture); glTextureParameteri(refractionTexture, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTextureParameteri(refractionTexture, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTextureParameteri(refractionTexture, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTextureParameteri(refractionTexture, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTextureStorage2D(refractionTexture, 1, GL_RGBA8, width / 2, height / 2); GLuint refractionDepth; glCreateTextures(GL_TEXTURE_2D, 1, &refractionDepth); glTextureParameteri(refractionDepth, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTextureParameteri(refractionDepth, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTextureParameteri(refractionDepth, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTextureParameteri(refractionDepth, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTextureStorage2D(refractionDepth, 1, GL_DEPTH_COMPONENT24, width / 2, height / 2); glNamedFramebufferTexture(refractionFBO, GL_COLOR_ATTACHMENT0, refractionTexture, 0); glNamedFramebufferTexture(refractionFBO, GL_DEPTH_ATTACHMENT, refractionDepth, 0); if (glCheckNamedFramebufferStatus(refractionFBO, GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { std::cout << "Framebuffer not complete: " << fboStatus << "\n"; return -1; } std::vector<glm::vec3> points; glm::vec4 viewCorners[4]; glm::mat4 viewFrame; float sum = 1.5f; float scale = 18.0f; float timeOfDay = 12.0f; glm::vec2 normalMapOffset0 = glm::vec2(); glm::vec2 normalMapOffset1 = glm::vec2(); Plane waterPlane; waterPlane.SetNormalAndPoint(glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, waterHeight, 0.0f)); waterBottomPlane.SetNormalAndPoint(glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, waterHeight - sum, 0.0f)); waterTopPlane.SetNormalAndPoint(glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, waterHeight + sum, 0.0f)); glm::vec3 frustumCornersWorld[8]; int frustumIndices[12][2] = { { 0,1 }, { 1,2 }, { 2,3 }, { 3,0 }, { 4,5 }, { 5,6 }, { 6,7 }, { 7,4 }, { 0,4 }, { 1,5 }, { 2,6 }, { 3,7 } }; Profiler &profiler = window.GetProfiler(); while (!window.ShouldClose()) { window.Update(); if (window.WasResized()) { width = window.GetWidth(); height = window.GetHeight(); glViewport(0, 0, width, height); } if (Input::MouseMoved() && Input::IsMouseButtonDown(1)) camera.Look(); else if (!Input::IsMouseButtonDown(1)) camera.firstMouse = true; if (Input::IsKeyPressed(GLFW_KEY_W)) camera.Move(FORWARD, window.GetDeltaTime()); if (Input::IsKeyPressed(GLFW_KEY_S)) camera.Move(BACKWARD, window.GetDeltaTime()); if (Input::IsKeyPressed(GLFW_KEY_A)) camera.Move(LEFT, window.GetDeltaTime()); if (Input::IsKeyPressed(GLFW_KEY_D)) camera.Move(RIGHT, window.GetDeltaTime()); tod.Update(window.GetDeltaTime()); const TimeInfo &timeInfo = tod.GetCurrentTimeInfo(); points.clear(); //IntersectFrustumWaterPlane(camera.GetFrustum(), points); /*if (points.size() > 0) {*/ glm::vec3 camPos = camera.GetPos(); float range = std::max(0.0f, 10.0f) + 5.0f; if (camPos.y < waterHeight) { camPos.y = std::min(camPos.y, waterHeight - range); } else { camPos.y = std::max(camPos.y, waterHeight + range); } //float minCamHeight = waterHeight + sum + 10.0f; //camPos.y = std::max(minCamHeight, camPos.y); glm::vec3 focus = camera.GetPos() + camera.GetForward() * scale; focus.y = waterHeight; viewFrame = glm::lookAt(camPos, focus, glm::vec3(0.0f, 1.0f, 0.0f)); // Construct view and projection matrices glm::mat4 projectorViewProj = camera.GetProjectionMatrix() * viewFrame; const Frustum &frustum = camera.GetFrustum(); frustumCornersWorld[0] = frustum.nbl; frustumCornersWorld[1] = frustum.ntl; frustumCornersWorld[2] = frustum.ntr; frustumCornersWorld[3] = frustum.nbr; frustumCornersWorld[4] = frustum.fbl; frustumCornersWorld[5] = frustum.ftl; frustumCornersWorld[6] = frustum.ftr; frustumCornersWorld[7] = frustum.fbr; range = std::max(1.0f, 10.0f); // For each corner if its world space position is // between the wave range then add it to the list. for (size_t i = 0; i < 8; i++) { if (frustumCornersWorld[i].y <= waterHeight + range && frustumCornersWorld[i].y >= waterHeight - range) { points.push_back(frustumCornersWorld[i]); } } // Now take each segment in the frustum box and check // to see if it intersects the ocean plane on both the // upper and lower ranges. for (size_t i = 0; i < 12; i++) { glm::vec3 p0 = frustumCornersWorld[frustumIndices[i][0]]; glm::vec3 p1 = frustumCornersWorld[frustumIndices[i][1]]; glm::vec3 max, min; if (SegmentPlaneIntersection(p0, p1, glm::vec3(0.0f, 1.0f, 0.0f), waterHeight + range, max)) { points.push_back(max); } if (SegmentPlaneIntersection(p0, p1, glm::vec3(0.0f, 1.0f, 0.0f), waterHeight - range, min)) { points.push_back(min); } } float xmin = std::numeric_limits<float>::max(); float ymin = std::numeric_limits<float>::max(); float xmax = std::numeric_limits<float>::min(); float ymax = std::numeric_limits<float>::min(); glm::vec4 q = glm::vec4(0.0f); glm::vec4 p = glm::vec4(0.0f); // Now convert each world space position into // projector screen space. The min/max x/y values // are then used for the range conversion matrix. // Calculate the x and y span of vVisible in projector space for (size_t i = 0; i < points.size(); i++) { // Project the points of intersection between the frustum and the waterTop or waterBottom plane to the waterPlane q.x = points[i].x; q.y = waterHeight; q.z = points[i].z; q.w = 1.0f; // Now transform the points to projector space p = projectorViewProj * q; p.x /= p.w; p.y /= p.w; if (p.x < xmin) xmin = p.x; if (p.y < ymin) ymin = p.y; if (p.x > xmax) xmax = p.x; if (p.y > ymax) ymax = p.y; } // Create a matrix that transform the [0,1] range to [xmin,xmax] and [ymin,ymax] and leave the z and w intact glm::mat4 rangeMap; rangeMap = glm::row(rangeMap, 0, glm::vec4(xmax - xmin, 0.0f, 0.0f, xmin)); rangeMap = glm::row(rangeMap, 1, glm::vec4(0.0f, ymax - ymin, 0.0f, ymin)); rangeMap = glm::row(rangeMap, 2, glm::vec4(0.0f, 0.0f, 1.0f, 0.0f)); rangeMap = glm::row(rangeMap, 3, glm::vec4(0.0f, 0.0f, 0.0f, 1.0f)); // Now update the projector matrix with the range conversion matrix glm::mat4 projectorToWorld = glm::inverse(projectorViewProj) * rangeMap; glm::vec2 ndcCorners[4]; ndcCorners[0] = glm::vec2(0.0f, 0.0f); ndcCorners[1] = glm::vec2(1.0f, 0.0f); ndcCorners[2] = glm::vec2(1.0f, 1.0f); ndcCorners[3] = glm::vec2(0.0f, 1.0f); // Now transform the corners of the for (int i = 0; i < 4; i++) { glm::vec4 a, b; // Transform the ndc corners to world space a = projectorToWorld * glm::vec4(ndcCorners[i].x, ndcCorners[i].y, -1.0f, 1.0f); b = projectorToWorld * glm::vec4(ndcCorners[i].x, ndcCorners[i].y, 1.0f, 1.0f); // And calculate the intersection between the line made by this two points and the water plane // in homogeneous space // The rest of the grid vertices will then be interpolated in the vertex shader float h = waterHeight; glm::vec4 ab = b - a; float t = (a.w * h - a.y) / (ab.y - ab.w * h); viewCorners[i] = a + ab * t; } //} profiler.BeginQuery("Reflection"); glBindFramebuffer(GL_FRAMEBUFFER, reflectionFBO); glViewport(0, 0, width / 2, height / 2); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glCullFace(GL_FRONT); glDepthFunc(GL_LEQUAL); camPos = camera.GetPos(); float dist = 2.0f * (camPos.y - waterHeight); camPos.y -= dist; camera.SetPos(camPos); float pitch = camera.GetPitch(); camera.SetPitch(-pitch); skydomeShader.Use(); skydomeShader.SetMat4("projView", camera.GetProjectionMatrix() * camera.GetViewMatrix()); skydomeShader.SetVec3("camPos", camPos); skydomeShader.SetVec3("lightDir", timeInfo.dirLightDirection); glBindTextureUnit(0, transmittanceTexture); glBindTextureUnit(1, inscatterTexture); skydome.Render(); glDepthFunc(GL_LESS); glCullFace(GL_BACK); shader.Use(); shader.SetMat4("projView", camera.GetProjectionMatrix() * camera.GetViewMatrix()); shader.SetVec3("sunDir", timeInfo.dirLightDirection); shader.SetVec3("sunLightColor", timeInfo.dirLightColor); glm::mat4 m = glm::translate(glm::mat4(1.0f), glm::vec3(0.0f, 4.0f, -10.0f)); shader.SetMat4("modelMatrix", m); dragon.Render(); m = glm::translate(glm::mat4(1.0f), glm::vec3(25.0f, 0.0f, 0.0f)); m = glm::scale(m, glm::vec3(8.0f)); shader.SetMat4("modelMatrix", m); platform.Render(); camPos.y += dist; camera.SetPos(camPos); camera.SetPitch(pitch); profiler.EndQuery(); profiler.BeginQuery("Refraction"); glBindFramebuffer(GL_FRAMEBUFFER, refractionFBO); glClearColor(0.08f, 0.16f, 0.3f, 1.0f); glViewport(0, 0, width / 2, height / 2); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); shader.Use(); shader.SetMat4("projView", camera.GetProjectionMatrix() * camera.GetViewMatrix()); shader.SetVec3("sunDir", timeInfo.dirLightDirection); shader.SetVec3("sunLightColor", timeInfo.dirLightColor); m = glm::translate(glm::mat4(1.0f), glm::vec3(25.0f, 0.0f, 0.0f)); m = glm::scale(m, glm::vec3(8.0f)); shader.SetMat4("modelMatrix", m); platform.Render(); profiler.EndQuery(); profiler.BeginQuery("Main"); glBindFramebuffer(GL_FRAMEBUFFER, 0); glViewport(0, 0, width, height); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glCullFace(GL_FRONT); glDepthFunc(GL_LEQUAL); skydomeShader.Use(); skydomeShader.SetMat4("projView", camera.GetProjectionMatrix() * camera.GetViewMatrix()); skydomeShader.SetVec3("camPos", camera.GetPos()); skydomeShader.SetVec3("lightDir", timeInfo.dirLightDirection); glBindTextureUnit(0, transmittanceTexture); glBindTextureUnit(1, inscatterTexture); skydome.Render(); glDepthFunc(GL_LESS); glCullFace(GL_BACK); shader.Use(); shader.SetMat4("projView", camera.GetProjectionMatrix() * camera.GetViewMatrix()); shader.SetVec3("sunDir", timeInfo.dirLightDirection); shader.SetVec3("sunLightColor", timeInfo.dirLightColor); m = glm::translate(glm::mat4(1.0f), glm::vec3(0.0f, 4.0f, -10.0f)); shader.SetMat4("modelMatrix", m); dragon.Render(); m = glm::translate(glm::mat4(1.0f), glm::vec3(25.0f, 0.0f, 0.0f)); m = glm::scale(m, glm::vec3(8.0f)); shader.SetMat4("modelMatrix", m); platform.Render(); if (points.size() > 0) { float nAngle0 = 42.0f * (3.14159f / 180.0f); float nAngle1 = 76.0f * (3.14159f / 180.0f); float deltaTime = window.GetDeltaTime(); normalMapOffset0 += glm::vec2(glm::cos(nAngle0), glm::sin(nAngle0)) * 0.055f * deltaTime; normalMapOffset1 += glm::vec2(glm::cos(nAngle1), glm::sin(nAngle1)) * 0.025f * deltaTime; waterShader.Use(); waterShader.SetMat4("projView", camera.GetProjectionMatrix() * camera.GetViewMatrix()); waterShader.SetMat4("projectionMatrix", camera.GetProjectionMatrix()); waterShader.SetMat4("viewFrame", viewFrame); waterShader.SetVec4("viewCorner0", viewCorners[0]); waterShader.SetVec4("viewCorner1", viewCorners[1]); waterShader.SetVec4("viewCorner2", viewCorners[2]); waterShader.SetVec4("viewCorner3", viewCorners[3]); waterShader.SetVec3("camPos", camera.GetPos()); waterShader.SetFloat("time", window.GetElapsedTime()); waterShader.SetVec4("normalMapOffset", glm::vec4(normalMapOffset0.x, normalMapOffset0.y, normalMapOffset1.x, normalMapOffset1.y)); waterShader.SetVec3("sunDir", timeInfo.dirLightDirection); waterShader.SetVec3("sunLightColor", timeInfo.dirLightColor); glBindTextureUnit(0, reflectionTexture); glBindTextureUnit(1, refractionTexture); glBindTextureUnit(2, refractionDepth); glBindTextureUnit(3, normalMap); glBindVertexArray(vao); glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_SHORT, nullptr); } profiler.EndQuery(); profiler.BeginQuery("ImGui"); ImGui_ImplOpenGL3_NewFrame(); ImGui_ImplGlfw_NewFrame(); ImGui::NewFrame(); ImGui::Begin("Settings", (bool*)0, 0); ImGui::Text("Press WASD to move"); ImGui::Text("Right click and move the mouse to look around"); if (ImGui::SliderFloat("Time of day", &timeOfDay, 0.0f, 24.0f, "%.1f")) { tod.SetCurrentTime(timeOfDay); } ImGui::SliderFloat("Scale", &scale, 0.0f, 100.0f); if (ImGui::SliderFloat("Sum", &sum, 0.0f, 15.0f)) { waterBottomPlane.SetNormalAndPoint(glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, waterHeight - sum, 0.0f)); waterTopPlane.SetNormalAndPoint(glm::vec3(0.0f, 1.0f, 0.0f), glm::vec3(0.0f, waterHeight + sum, 0.0f)); } ImGui::End(); ImGui::Render(); ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); profiler.EndQuery(); profiler.BeginQuery("Text"); glEnable(GL_BLEND); glDepthMask(GL_FALSE); textRenderer.AddText("CPU time: " + std::to_string(window.GetDeltaTime() * 1000.0f) + " ms", glm::vec2(30.0f, 460.0f), glm::vec2(0.3f, 0.3f)); textRenderer.AddText(profiler.GetResults(), glm::vec2(30.0f, 420.0f), glm::vec2(0.3f, 0.3f)); textRenderer.Render(); glDepthMask(GL_TRUE); profiler.EndQuery(); profiler.EndFrame(); window.SwapBuffers(); } shader.Dispose(); waterShader.Dispose(); dragon.Dispose(); window.Dispose(); return 0; }
a=map(int, raw_input().split()) b=map(int, raw_input().split()) c=input() r1=a[0]+a[1]+a[2] r2=b[0]+b[1]+b[2] res= r1//5 + r2//10 if r1%5!=0: res+=1 if r2%10!=0: res+=1 if res<=c: print "YES" else: print "NO"
/* * Output a message in the lower left corner of the screen * and wait for carriage return. */ void error(const char *fmt, PARG *parg) { int col = 0; static char return_to_continue[] = " (press RETURN)"; errmsgs++; if (any_display && is_tty) { if (!oldbot) squish_check(); at_exit(); clear_bot(); at_enter(AT_STANDOUT); col += so_s_width; } col += less_printf(fmt, parg); if (!(any_display && is_tty)) { (void) putchr('\n'); return; } putstr(return_to_continue); at_exit(); col += sizeof (return_to_continue) + so_e_width; get_return(); lower_left(); clear_eol(); if (col >= sc_width) screen_trashed = 1; flush(); }
/** * 1.2 Retrieve service provider config w/ authz */ @Test public void testRetrieveSPConfigWithAuth() { Response<JsonObject> resp = client.getSrvProvConf(); assertResponse(resp); }
Proposal by Australian Muslim group to allow 'safe spaces' for young Muslims to discuss 'inflammatory' issues causes some to think. A Muslim group in Australia is demanding taxpayer funds be diverted from anti-terrorism programs to create a “safe space” for angry young Muslims to express “inflammatory” comments that they otherwise wouldn’t be allowed to say in public. The Islamic Council of Victoria (ICV) said such forums enabled young people's opinions to be "respectfully and intelligently debated and challenged", and wants money for federal counter-terrorism and anti-extremism programs be diverted, “to create refuges where 'emotionally overloaded' youths can voice inflammatory comments,” reports Yahoo7 News. The safe space is needed because if such remarks were made publicly, they would be seen as “inflammatory,” according to the proposal. But State Premier Daniel Andrews said he was "very troubled" by the idea of a space where people "could be radical". Jason Reed/REUTERS Thinking Australians The ICV, which says it represents some 200,000 Muslims in Victoria, says the community is suffering mental health and other problems because of the widespread suspicion it faces. According to the BBC, it recommends funding a number of community-led initiatives - complaining that financial resources at the moment are concentrated mainly on national efforts to counter violent extremism. ICV spokesman Adel Salman said they did not consider the "safe space" proposal controversial, because it is a practice that is already used to help young people in countries around the world. "This is about good practice because the youth require an avenue to express their views in a safe environment... where they feel their views are valued, where they can be respectfully challenged and counter views presented," he said. He said such spaces would be "conducted with experts who are familiar with the methodology, and understand the way the conversation can be guided". Salman criticised headlines which called the proposal "rage space" and "hate space", saying the media had misconstrued what they were asking for. But Premier Andrews ruled out any possibility of funding such an initiative. "I am very troubled by the suggestion that we might have a space where people could be radical as part of a de-radicalisation programme. That makes no sense to me whatsoever". Radio host Tom Elliot said the program was “quite possibly the dumbest idea” he had "ever heard". “What they are effectively saying is they want a tax-payer funded place where they can say things that otherwise they wouldn’t be allowed to say,” Elliot told Sunrise. Others said the “safe spaces” could be turned into jihadist recruitment centers. “They would be a hot bed of passing ideas and information along,” said Mal Lees. One Nation leader Pauline Hanson decried the idea as “a load of rubbish”. “How much more money have we got to put into this to make them feel good about themselves?” she asked. Hanson is calling for a ban on Muslim immigration from certain countries linked to terrorism, pointing out that Australia is still in a position to save itself given their Muslim population is around 2.4%, significantly less than many European countries. The Muslim group said their proposal was justified because of the “Islamophobia” they are subjected to. Presumably, they believe that anyone who pushes back against angry Muslims calling for jihad is being ‘Islamophobic’. Infowars calls this "yet another example of how Muslim leaders are trying to entrench themselves in a grievance narrative, when in reality they should be working solely on routing out the extremists in their midst, not encouraging them." Earlier this week, Victoria was rocked by a terror attack when gunman Yacqub Khayre took a woman hostage at an apartment building before killing another man and wounding three police officers. ISIS subsequently claimed responsibility for the siege. He was later shot dead by police. The BBC reports that Australia has prevented a number of attacks in recent years, and has been on alert amid fears over the return of Australians fighting for Islamist militant groups in Syria.
<gh_stars>10-100 package com.etheller.warsmash.units; /** * A hashable wrapper object for a String that can be used as the key in a * hashtable, but which disregards case as a key -- except that it will remember * case if directly asked for its value. The game needs this to be able to show * the original case of a string to the user in the editor, while still doing * map lookups in a case insensitive way. * * @author Eric * */ public final class StringKey { private final String string; public StringKey(final String string) { this.string = string; } public String getString() { return this.string; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((this.string.toLowerCase() == null) ? 0 : this.string.toLowerCase().hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final StringKey other = (StringKey) obj; if (this.string == null) { if (other.string != null) { return false; } } else if (!this.string.equalsIgnoreCase(other.string)) { return false; } return true; } }
<gh_stars>0 import { Injectable } from '@angular/core'; import { HttpClient, HttpErrorResponse, } from '@angular/common/http'; import { Subject } from 'rxjs'; import { environment } from 'src/environments/environment'; export interface APIQueryResult { row_count: number; column_names?: string[]; rows?: any[][]; } export interface APIResponse { success: boolean; message?: string; } @Injectable({ providedIn: 'root' }) export class APIService { public static readonly baseURL: string = environment.apiBaseURL; public siteName: string; public errorResponseSubject: Subject<HttpErrorResponse> = new Subject(); public client: HttpClient; constructor( httpClient: HttpClient, ) { this.siteName = environment.siteName; this.client = httpClient; } }
<filename>packages/map-editor/src/main.ts<gh_stars>1-10 /** * @license * Copyright 2021 piyoppi * SPDX-License-Identifier: MIT */ export { GridImageGenerator } from './GridImageGenerator' export { CallbackItem } from './CallbackItem' export { MapCanvas } from './MapCanvas' export { Projects, Project } from './Projects' export { ColiderCanvas } from './ColiderCanvas' export { EditorCanvas } from './EditorCanvas' export { AutoTileSelector } from './AutoTileSelector' export { MapChipSelector } from './MapChipSelector' export { convertFromCursorPositionToChipPosition, convertChipPositionDivisionByCursorSize } from './CursorPositionConverter'
def register(seconds, all_users): participants = [] started_message = "New%20session%20started.%20Type%20'register'%20to%20enter." api.post_message(started_message) api.post_message("After%2060%20seconds%20a%20loser%20will%20be%20chosen%20to%20bring%20coffee%20for%20the%20winners!") time.sleep(5*seconds/6) api.post_message("{0}%20seconds%20remaining!".format(seconds/6)) time.sleep(seconds/6) for message in api.get_latest_messages(30): if message["text"] == "register": api.post_message(all_users[message["user"]].name + "%20registered") participants.append(all_users[message["user"]]) ended = True elif message["text"] == started_message.replace("%20", " "): break return participants
/******************************************************************************** * Copyright (c) 2020 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ********************************************************************************/ import { Server } from 'http'; import SocketIO from 'socket.io'; import { ServiceFactory } from './factories/serviceFactory'; import { zmqSubscribe } from './routes/zmqSubscribe'; import { zmqUnsubscribe } from './routes/zmqUnsubscribe'; import { ZmqService } from './services/zmqService'; import { AppHelper } from './utils/appHelper'; AppHelper.build( (app, config, port) => { ServiceFactory.register('zmq', () => new ZmqService(config)); const server = new Server(app); const socketServer = SocketIO(server); server.listen(port); socketServer.on('connection', (socket) => { socket.on('subscribe', (data) => socket.emit('subscribe', zmqSubscribe(config, socket, data))); socket.on('unsubscribe', (data) => socket.emit('unsubscribe', zmqUnsubscribe(config, socket, data))); }); }, true);
package com.notronix.lw.impl.method.auth; import com.google.gson.Gson; import com.notronix.lw.impl.method.AbstractLinnworksAPIMethod; import java.time.Instant; public class GetServerUTCTimeMethod extends AbstractLinnworksAPIMethod<Instant> { @Override public String getURI() { return "Auth/GetServerUTCTime"; } @Override public Instant getResponse(Gson gson, String jsonPayload) { return gson.fromJson(jsonPayload, Instant.class); } }
Ronald A. Fisher and the improvement of humankind In this paper we argue that the motif underlying Ronald Aylmer Fisher (1890-1962) scientific endeavors was eugenics and the notion of differential fertility. Fisher’s contribution to Neo-Darwinian synthesis and the development of several basic concepts of modern statistics, among others, derived from his interest in providing sound grounds to the hypothesis that the reproduction of the ‘well-born’ ought to be encouraged, while individuals “unfit for civilized society” were to be financially and socially discouraged from bearing children. Fisher believed that all striving notwithstanding, all human societies were doomed to decadence and collapse due to purely biological reasons, being eugenics the only approach likely to prevent such sorry fate. In Fisher’s work statistics, evolution theory, genetics and eugenics form one single logical structure, since all of them directly concern a more general problem, i.e., the biological improvement of humankind. Eugenics did not disappear after the end of World War II, but was reframed at least partially as present-day genetics, including clinical genetic counseling.
# --*-coding:utf-8-*-- def f(str): s = {} s[0] = 0 pat = 0 bits = [2**i for i in range(26)] lenOfStr = len(str) for c in str: pat ^= bits[ord(c)-97] s[pat] = min( min(s.get(pat^bit, lenOfStr) + 1 for bit in bits), s.get(pat, lenOfStr)) if pat == 0: return 1 return s[pat] str = input() print(f(str))
<reponame>Istarnion/RGB-D-Lauchpad #ifndef FILES_H_ #define FILES_H_ typedef struct { int width; int height; unsigned int *pixels; // 3-channel RGB } Image; char *LoadTextFile(const char *filename); void FreeTextFile(char *file_data); Image *LoadImage(Image *img, const char *filename); void FreeImage(Image *img); #endif /* end of include guard: FILES_H_ */
Effectiveness of the addition of therapeutic alliance with minimal intervention in the treatment of patients with chronic, nonspecific low back pain and low risk of involvement of psychosocial factors: a study protocol for a randomized controlled trial (TalkBack trial) Background The stratified model of care has been an effective approach for the treatment of low back pain. However, the treatment of patients with low risk of psychosocial-factor involvement is unclear. The addition of the therapeutic alliance to a minimal intervention may be an option for the treatment of low back pain. This paper reports on the rationale, design and protocol for a randomized controlled trial with blind assessor to assess the effectiveness of the addition of therapeutic alliance with minimal intervention on pain and disability in patients with chronic, nonspecific low back pain. Methods Two hundred and twenty-two patients with chronic, nonspecific low back pain and low risk of involvement of psychosocial factors will be assessed and randomly allocated into three groups (n = 74 patients per group). The Positive Therapeutic Alliance group will receive counseling and guidance with an emphasis on therapeutic alliance and empathy. The Usual Treatment group will receive the same information and counseling with limited interaction with the therapist. The Control group will not receive any intervention. The treatment will be composed by two intervention sessions with a 1-week interval. A blinded assessor will collect the following outcomes at baseline, 1 month, 6 months and 12 months after randomization: pain intensity (Pain Numerical Rating Scale), specific disability (Patient-specific Functional Scale), general disability (Oswestry Disability Index), global perceived effect (Global Perceived Effect Scale), empathy (Consultation and Relational Empathy Measure), credibility and expectations related to treatment. The analysis will be performed using linear mixed models. Discussion This will be the first study to understand the effect of combining enhanced therapeutic alliance to a treatment based on counseling, information and advice (minimal intervention). The addition of the therapeutic alliance to minimal intervention may improve the treatment of chronic, nonspecific low back pain. Trial registration ClinicalTrials.gov, NCT 02497625. Registered on 10 July 2015. Electronic supplementary material The online version of this article (doi:10.1186/s13063-017-1784-z) contains supplementary material, which is available to authorized users. Methods: Two hundred and twenty-two patients with chronic, nonspecific low back pain and low risk of involvement of psychosocial factors will be assessed and randomly allocated into three groups (n = 74 patients per group). The Positive Therapeutic Alliance group will receive counseling and guidance with an emphasis on therapeutic alliance and empathy. The Usual Treatment group will receive the same information and counseling with limited interaction with the therapist. The Control group will not receive any intervention. The treatment will be composed by two intervention sessions with a 1-week interval. A blinded assessor will collect the following outcomes at baseline, 1 month, 6 months and 12 months after randomization: pain intensity (Pain Numerical Rating Scale), specific disability (Patient-specific Functional Scale), general disability (Oswestry Disability Index), global perceived effect (Global Perceived Effect Scale), empathy (Consultation and Relational Empathy Measure), credibility and expectations related to treatment. The analysis will be performed using linear mixed models. (Continued on next page) (Continued from previous page) Discussion: This will be the first study to understand the effect of combining enhanced therapeutic alliance to a treatment based on counseling, information and advice (minimal intervention). The addition of the therapeutic alliance to minimal intervention may improve the treatment of chronic, nonspecific low back pain. Keywords: Low back pain, Empathy, Subgroups, Minimal intervention Background Low back pain remains a global public health problem . It is considered to be the sixth leading cause of disabilityadjusted life years among more than 200 health conditions . Despite the increase in quantity and quality of research in recent decades, the available treatments for low back pain tend to produce minor or moderate effects. These effects are positive in the short term and only some of them maintain long-term improvements . The high prevalence of low back pain makes impossible the use of treatment strategies that demand high resources for all patients . Studies have shown that a small group of patients who develop chronic pain are responsible for most of the treatment-related costs . Thus, it is important to identify patients who would benefit from a specific treatment from those who would show little benefit or even worsening of symptoms . Low back pain is an ideal condition for the stratification of treatment subgroups since it is a condition that affects a heterogeneous population, presenting prognostic variation and relying on different diagnostic options and available treatments . Stratification based on the prognosis of the clinical condition is a form of classifying low-back-pain patients in subgroups and it is based on the risk of developing persistent pain and disability related to psychosocial factors . Psychosocial factors (e.g., depression, stress, aspects related to work and fear of movement ) are predictive for the occurrence of new episodes and the long-term persistence of pain and disability . Treatment in subgroups may also facilitate the clinical decision-making process and guide appropriate treatment for these patients . Moreover, it may assist therapeutic decision-making, maximize treatment benefits, reduce risks and increase health system efficiency . Recent studies have shown the cost-effectiveness of the treatment based on the chronic low back pain prognosis in patients with high and medium risks of developing chronic problems related to psychosocial factors . Positive results were associated with decreased pain and disability, in addition to reduction in cost and work absenteeism . However, the results of a recent randomized controlled trial showed no significant differences for patients with low-risk of involvement of psychosocial factors when compared the usual treatment and targeted treatment based on subgroups . These negative results may be explained by the fact that patients already presented low levels of pain and disability and by the use of questionnaires with low responsiveness for low-risk patients . Disability questionnaires frequently used to evaluate patients with low back pain are not sensible to detect a clinical change in patients with low level of disability. For this kind of patients, it is recommended to use specific questionnaires of disability . In the treatment of these low-risk patients it is recommended that information and guidance be provided on correct diagnosis, prognosis, symptoms, physical activity levels, return to work and disease severity, preferably at the first contact with the health professional . Considering the good prognosis of low-risk patients, some studies suggest a minimal intervention approach (counseling sessions and positive information) . This approach might be a quick and low-cost treatment option to the health system. Furthermore, identifying appropriate treatment for each patient may prevent the unnecessary use of expensive or extended resources . There is also the possibility of reducing the use of diagnostic procedures (imaging tests, for example), decreasing the number of consultations during the recovery process and consequently less use of financial resources. Thus, some studies emphasize the need to develop more effective treatment strategies for this patient subgroup . One of the possibilities of the nonsignificant results deriving from minimal intervention with these patients is that patients with low risk of developing chronic problems have low to medium levels of pain and disability. Therefore, it is necessary to evaluate patients using measurement instruments that are more responsive to this clinical condition . The extent of treatment effectiveness may be influenced by factors other than the intervention chosen. Contextual factors comprise the therapeutic effect of any intervention. These factors consist of a complex interaction between technical and communicative knowledge in addition to the therapist's ability to meet the expectations of the patient during treatment . Studies have shown that a good relationship between the therapist and patient is related to decreased pain level and disability as well as improved satisfaction with treatment . The therapeutic alliance may be defined as harmony or social connection between therapist and patient . However, both the magnitude and extent of this association in musculoskeletal patients are unknown . This gap in the medical literature reinforces the need for studies on this subject, especially in the treatment of patients with musculoskeletal complaints. Emerging evidence indicates that the degree and quality of the interaction between clinician and patient is important because it influences the magnitude of the active treatment, as well as the degree of placebo effect . Although therapeutic alliance has been shown to produce therapeutic benefits in some areas, such as medicine and psychotherapy, there is little empirical support yet for this subject in physiotherapy . A recent systematic review showed a consistent pattern of positive therapeutic alliance correlated with improved pain, disability and treatment satisfaction in physical rehabilitation . A recent study investigated the effects of increased therapeutic alliance and empathy in the treatment of patients with low back pain receiving active interferential current or a placebo. Patients were randomly allocated into four groups: (1) limited interaction plus active current, (2) limited interaction plus placebo, (3) increased interaction plus active current and (4) increased interaction plus placebo. The results showed clinical improvement associated with increased interaction between therapist and patient, which remained even with the application of placebo . This study evaluated the effects only in the short term and did not classify patients into specific treatment subgroups. Therefore, the rationale of this study is based on emerging evidence of the importance of the therapeutic alliance in the clinical context . Appropriate treatment for low-risk patients can provide important clinical benefits. It could be a fast and low-cost treatment option for the health system since it would help in identifying patients who do not require unnecessary or extensive assessment and treatment . To date, there are no studies that have investigated the therapeutic alliance combined with minimal intervention in the treatment of patients with chronic, nonspecific low back pain and low risk of psychosocial-factor involvement. Therefore, the objective of this study is to evaluate the effectiveness of the addition of the therapeutic alliance with minimal intervention in the treatment of patients with chronic, nonspecific low back pain with a low risk of having psychosocial-factor involvement in their pain, specific and general disability, global perceived effect, empathy, credibility and expectation. The hypothesis of this study is that there will be a clinical benefit in pain, and especially in specific disability, 1 month after randomization into the group receiving treatment with the addition of the therapeutic alliance. Study design This is a three-arm randomized controlled trial with blinded assessor. The patients will be divided into three groups: a Positive Therapeutic Alliance (PTA) group, a Usual Treatment (UT) group and a Control group (CG). Table 1 details the study timeline and follows the style of the Standard Protocol Items: Recommendations for Interventional Trial (SPIRIT) guideline. The SPIRIT Checklist and figure for the study protocol are provided as Additional file 1 and Additional file 2. The World Health Organization Trial Registration Data Set is provided as Additional file 3. Sample size calculation The sample size calculation was carried out to detect a difference of 1 point on the Pain Numerical Rating Scale (estimated standard deviation = 1.84) and 1 point on the Patient-specific Functional Scale (estimated standard deviation = 1.8). The calculations were performed considering an α = 0.05, a statistical power of 80% and a followup loss of 15%. Thus, this study will comprise a sample of 222 patients who will be divided into three treatment groups (n = 74 patients per group). Eligibility criteria This study will be carried out with patients on the treatment waiting lists of two physical therapy teaching clinics in the city of Taubaté, São Paulo (approved by the Ethics Committee of Universidade Bandeirante Anhanguera n. 44720315.5.0000.5372). The patients will be invited personally by phone. This study will include patients who have had chronic, nonspecific low back pain for at least 3 months, aged between 18 and 80 years, and who are able to read and write in Portuguese and classified as low risk of having psychosocial factors involved according to the Start Back Screening Tool (SBST-Brazil) questionnaire . Patients with a history of spinal surgery, serious spinal diseases, nerve root compromise, diseases associated with cognitive impairment confirmed by medical diagnosis, or pregnancy will be excluded. Procedures Patients will be evaluated by a previously trained blinded assessor who will provide information on the procedures and criteria for study participation. The assessor will contact patients by phone to confirm their eligibility criteria and to fill out the SBST-Brazil questionnaire. In the case of acceptance of participation and compliance with the eligibility criteria, the patient will be forwarded to initial assessment. The blinded assessor will collect data at the baseline utilizing a neutral communication style so as not to influence the therapeutic alliance involved in patient care. A coding system in follow-up assessments will be carried out to ensure blinding. After this initial procedure, eligible patients will be referred to the therapist responsible for the interventions. An independent researcher not involved in the recruitment and evaluation of patients will perform randomization using Microsoft Excel software random number generation. The secret allocation will be made using sequential, numbered, sealed opaque envelopes. Due to the nature of the study, it will not be possible to blind the therapist or patients. All researchers involved in the stages of this study will receive adequate training to carry out the activities. Patient recruitment will begin in September 2015 and the date of completion will be December 2016. Assessments The elegibility criteria, collection of demographic and anthropometric data from the patients as well the outcomes will be conducted by a blinded assessor. The randomization to the treatments groups will be done before the initial evaluation. The assessor will be aware of the allocation only after data analysis. All the questionnaires and scales used to assess primary and secondary outcomes have been translated and adapted into Brazilian-Portuguese and have shown adequate measurement properties. The primary outcomes of this study will be pain and specific disability after 1 month of randomization. The study's secondary outcomes will be pain and specific disability 6 and 12 months after randomization; and general disability and perceived global effect 1, 6 and 12 months after randomization. Empathy, credibility and expectations related to treatment will be used as additional outcomes. Demographic characteristics The initial assessment information will include age, gender, height, weight, educational level and job information related to low back pain (pain location, duration, use of medication and previous treatments). STarT Back Screening Tool The SBST-Brazil questionnaire is a prognostic evaluation tool which aims to classify patients into low, medium or high risk of developing long-term pain or disability related to psychosocial factors . The questionnaire contains questions related to pain, disability, comorbidities, bothersomeness, pain catastrophizing, fear, anxiety and depression. For scoring and classification, the patient must choose between "agree" (1 point) or "disagree" (0 points) on the first eight questions. Question 9 contains five possible answers: not at all (0 points), slightly (0 points), moderately (0 point), very much (1 point) and extremely (1 point). The total score of the SBST-Brazil represents the sum of all questions and ranges from 0 to 9. Scores ≤3 points classify the patient as low risk. Scores ≥4 points indicate the need to examine the psychological subscale comprised of questions 5 to 9. Patients with scores ≤3 points on the psychological subscale are classified as medium risk and patients with scores ≥4 are classified as high risk. The SBST-Brazil was only used for screening and not as an outcome measure instrument. Pain Numerical Rating Scale The Pain Numerical Rating Scale is a scale of 11 points (0-10) that aims to measure the intensity of pain reported by the patient, where 0 = "no pain" and 10 = "pain as bad as could be" . In this study patients will be asked about their pain intensity in the last 7 days. Patient-Specific Functional Scale The Patient-specific Functional Scale provides reliable measurements of limitations in specific tasks . The patient is asked to choose three activities that they cannot perform or can perform with difficulty because of pain. Each activity is scored on a Likert scale that ranges from 0 (unable to perform the activity) to 10 (able to perform the activity at preinjury level). The final result is the average of the three scores from 0 to 10 points. High values indicate a lower degree of limitation in performing specific tasks . This questionnaire presents good responsiveness with patients with low functional limitation . Oswestry Disability Index The Oswestry Disability Index aims to assess disability associated with low back pain . This questionnaire consists of 10 items describing everyday situations that patients may find difficult to perform due to low back pain. Each of the 10 items is scored from 0 to 5, adding up to a maximum score of 50 points. The result is multiplied by 2 to obtain a percentage. From this obtained percentage it is possible to classify the patients as having: minimal disability (0% to 20%), moderate disability (20% to 40%), severe disability (40% to 60%), crippled (60% to 80%) and bedridden (80% to 100%). Global Perceived Effect Scale The Global Perceived Effect Scale seeks to evaluate clinical changes perceived by the patient by comparing the onset of symptoms to the current situation . It is an 11-point grading scale (−5 to 5) wherein −5 = vastly worse; 0 = no change; and 5 = completely recovered. High scores indicate greater perceived recovery of the condition . This scale is more responsive with patients who have low functional limitation . Consultation and Relational Empathy -CARE The Consultation and Relational Empathy (CARE) Measure is a questionnaire to assess patients' perception of empathy and communication during consultation . It consists of 10 questions answered according to a Likert scale of 1 to 5 (where 1 = poor and 5 = excellent). The final score is obtained by adding the questions' scores, with a maximum score of 50 and a minimum score of 10 points. High values indicate greater empathy. Each question contains a "not applicable" answer; in this case, the item must be excluded and the result will be obtained from the average score of the questions answered . The questionnaire will be applied by a blinded assessor after the first treatment session. Expectation and credibility Improvement expectation will be assessed by a numerical scale of 11 points, with 0 = "no expectation of improvement" and 10 = "the highest expectation of improvement" . Credibility regarding the treatment will be evaluated by four questions. These questions include confidence-related issues that treatment can help to ease and deal with pain, confidence in recommending treatment to other patient and how much treatment makes sense to the patient. These questions will be scored according to a Likert scale ranging from 0 (no confident) to 6 (totally confident) . Expectation and credibility will be evaluated after a week of randomization. Treatment protocol Patients will be randomized into three groups: a Positive Therapeutic Alliance (PTA) group, a Usual Treatment (UT) group and a Control group (CG). Treatment sessions will be conducted face-to-face and individually with each patient. The treatment sessions will be delivered by two physical therapists with postgraduate qualifications and a completed Master's degree in the research field. The treatment protocol offered to the PTA and UT groups will be based on research that demonstrates the effectiveness of empathy and communication training with patients . Patients in the PTA and UT groups will receive two 60-min treatment sessions with a 1-week interval. The sessions will be held in an ordinary physical therapy office following the standards of comfort, organization and hygiene. The treatment protocol is initially structured in education and differentiation regarding the nonverbal behavior of the therapist and key issues related to the treatment. In order to ensure treatment fidelity, and to standardize the information that will be provided to patients in both groups, the therapists will follow a description of the relevant issues to be addressed during treatment (Tables 2 and 3). The treatment protocol recommendations were developed based on previous studies and designed to ensure the enhancement of the therapeutic alliance in one group and reduce it in the other . Considering the type of intervention, the therapists will not need any physical or informational materials to deliver the intervention. It is important to emphasize that the PTA and UT groups will receive the same informational content related to low back pain. The difference will be the form used to transmit this information. The patients in the PTA group will undergo an intervention based on treatment guidelines involving guidance and information related to a return to daily activities, advice on dealing with the pain and clear explanation of their signs and symptoms . The sessions will be structured with the intention of increasing therapeutic alliance and empathy, based on topics relating to the condition and behavior taken from The Back Book . UT-group patients will also receive the same informational content on low back pain based on The Back Book . Therapy sessions will be performed with limited interaction between patient and therapist and the information will be transmitted in a clear and straightforward manner . Patients in the CG will not receive any treatment and will be instructed not to seek treatment for the first month after randomization. The treatment offered to the PTA or UT groups will be available for patients who are interested after a period of 1 year. Patients will not be limited in looking for other types of treatment after the first month and will be evaluated at baseline and during every reassessment. Since patients will be recruited from a waiting list and will receive the care offered in this study after 1 year, we believe that this conduct does not violate any ethical principles with these patients. Patients who require additional interventions other than those offered by this study will be referred for treatment at teaching clinics of the participating universities. Patients' use of medication will not be limited and will be controlled at each reassessment. Patients will be advised not to seek other treatment options during the intervention period. Patients will not receive any financial compensation for participation. Statistical analysis A researcher not involved in data collection and without any conflict of interest will be responsible for monitoring "Permanent or severe back damages are rare" "It is really very difficult to damage your back" "Back pain is rarely caused by a serious illness" "Back pain prognosis is favorable" Imaging tests • Imaging findings in asymptomatic people • Normal degenerative signs "About 60% of the asymptomatic population has some sort of degenerative sign" "There is nothing seriously wrong in your back" Physical activity and Rest • The importance of physical activity • Low back pain behavior "The sooner you resume your activities the sooner you will get better" "Rest is not good for the back" "You may have some limitations, but the most important thing is to keep moving" Pain control • Medication • Available treatments • Self-care "There are several steps you can take to help your back pain" "Physicians and physical therapists can help your back pain, but only you are able to take care of yourself" Family and work, anxiety and stress-related factors • The importance of family support • The importance of controlling anxiety and stress • The influence of these factors on back pain "Gradually return to your social activities and work! The sooner you return, the less future problems you will have" "You really can help yourself!" "People who cope better with the pain return to their activities faster and better" "You will have good and bad days, this is normal!" "Although the natural history is positive, there are some factors that contribute to pain that becomes chronic, we will work on them" Concepts of chronic pain • Current concept of pain • Pain threshold • Central hypersensitivity "Pain does not mean back injury" "Pain is a response to sensory stimuli, and it is not necessarily related to any tissue damage in the spine" "Pain can just be a misunderstanding of the body" Encouragement 51,55 • Good prognosis: subgroup of low risk of involvement of psychosocial factors • Return to activities "This information is taken from the best and most current research in the world" "Do not let your back pain take over your life, you are able to get through it" "I am also interested in your personal symptoms" "Does this pain change during the day?" "Describe the pain to me" "Which activity of your daily life makes the pain worse?" Primary limited daily activity Question the patient demonstrating an interest in a way to encourage a return to activities "How did this pain affect your lifestyle?" Session closure Reinforce positive messages "You did very well today! And certainly you will be fine" "I understand that these chronic situations sometimes get out of our control, but I am sure you will do well" data. The randomization process and blinding of patients and assessors will be audited monthly by an experienced researcher. All data will be entered twice before statistical procedures. A researcher who will receive the data in an encrypted format to ensure confidentiality will conduct the statistical analysis. Linear mixed models will be used for primary analysis to verify the differences between the average effects of interventions in reassessments carried out 1 month, 6 months and 12 months after randomization. The differences between groups will be calculated by interaction terms of groups versus time. The time will be coded as a categorical variable (baseline, 1 month, 6 months and 12 months after randomization). No interim analysis will be performed. The process of analysis of treatment efficacy will follow the intention-to-treat analysis . If a patient abandons treatment or refuses to answer any reassessment, the lost data will not be replaced. All statistical procedures will be performed with a significance level of 5% and through the SPSS 19 software for Windows. Discussion The results of this study will help us to understand the effect of combining increased therapeutic alliance with a type of treatment based on advice, information and advice for patients with chronic low back pain and low risk of psychosocial-factor involvement. We believe that the UT group will present significantly better treatment results when compared to the CG. Regarding the PTA group, the study hypothesis is that: 1 month after randomization, there will be a significant clinical decrease in pain and especially in specific disability, when compared to the other two study groups. This assumption is based on recent studies, which have shown that the therapeutic alliance does in fact have a positive influence on clinical outcomes of patients with irritable bowel syndrome , rehabilitation needs and chronic musculoskeletal disorders . Treatment adherence appears to be directly influenced by patients' trust and empathy with the health care professional. Thus, decreased specific disability may occur because the patient follows the guidelines provided by the health care professional with more confidence. Our research group expects to publish the results of this study in an internationally recognized journal and to release the spreadsheet with the data encoded during the first semester of 2018. It is known that a minority of patients who develop chronic pain are responsible for most of the treatmentrelated costs . It is, therefore, important to identify whether patients at low risk of psychosocial-factor involvement respond positively to a minimal, effective and inexpensive treatment intervention. To date, this study is the largest randomized controlled trial involving the combination of therapeutic alliance with minimal intervention in patients with chronic, nonspecific low back pain. The results of this study may help the clinical practice of professionals dealing with patients with chronic, nonspecific low back. The health system can benefit from this type of intervention, avoiding excessive or unnecessary diagnostic investigations and treatments for these patients. This may lead to cost reduction regarding medication, diagnostic tests, consultations, treatments or absence from work. Furthermore, the health care system could provide access and opportunity for treatment for patients who require more attention to symptom evaluation and treatment (medium and high risk psychosocial-factor involvement). In conclusion, there might be the possibility to reduce waiting time for treatment, facilitating the flow and quality of primary care for low-risk low-back-pain patients. Ethics approval and consent to participate The coordinating department of the clinics where the study will be conducted approved all ethical procedures. The Research Ethics Committee of Faculdade Anhanguera de Taubaté (CAAE 44720315.5.0000.5372) approved this study. The research project is registered at ClinicalTrials.gov (http://www.clinicaltrials.gov/) under the number NCT 02497625 . All patients will be briefed on the study procedures through a Consent Form that will inform patients that the study aims to investigate the effects of treatment with counseling and guidance through different forms of communication for patients with chronic, nonspecific low back pain and low risk of psychosocial-factor involvement. The collected data will be stored in locked cabinets and only the blinded assessor will have access to this information. The data will be entered and saved on computers with password protection to ensure confidentiality. If the eligibility criteria, outcomes or statistical analysis have to be modified after the beginning of the randomization process the relevant parties (Ethics Committee, trial registries and funder) will be informed of the modifications. Dissemination plans Study findings will be presented in conferences abstracts, poster presentations and scientific publications in medical journals. The principal investigator will work with the research fellows to prepare manuscripts for publications.
<reponame>LoganDark/BetterFoliage package mods.betterfoliage.mixin; import mods.betterfoliage.Hooks; import net.minecraft.block.AbstractBlock; import net.minecraft.block.Block; import net.minecraft.block.BlockState; import net.minecraft.util.math.BlockPos; import net.minecraft.world.BlockView; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Redirect; /** * Mixin to override the result of {@link BlockState}.getAmbientOcclusionLightValue(). * * Needed to avoid excessive darkening of Round Logs at the corners, now that they are not full blocks. */ @Mixin(AbstractBlock.AbstractBlockState.class) @SuppressWarnings({"deprecation"}) public class MixinBlockState { private static final String callFrom = "Lnet/minecraft/block/AbstractBlock$AbstractBlockState;getAmbientOcclusionLightLevel(Lnet/minecraft/world/BlockView;Lnet/minecraft/util/math/BlockPos;)F"; // why is the INVOKEVIRTUAL target class Block in the bytecode, not AbstractBlock? private static final String callTo = "Lnet/minecraft/block/Block;getAmbientOcclusionLightLevel(Lnet/minecraft/block/BlockState;Lnet/minecraft/world/BlockView;Lnet/minecraft/util/math/BlockPos;)F"; @Redirect(method = callFrom, at = @At(value = "INVOKE", target = callTo)) float getAmbientOcclusionValue(Block block, BlockState state, BlockView reader, BlockPos pos) { return Hooks.getAmbientOcclusionLightValueOverride(block.getAmbientOcclusionLightLevel(state, reader, pos), state); } }
Linda Horan sued state for card to let her buy marijuana in Maine An Alstead woman who successfully fought to get a medical marijuana ID card in New Hampshire before state dispensaries opened has died.Linda Horan, 64, sought medical marijuana to help relieve the pain and nausea that she experienced from stage 4 lung cancer.Horan sued New Hampshire to get a medical marijuana ID after state health officials determined that they couldn't issue ID cards until after state dispensaries opened. Horan said that if she had an official ID from New Hampshire, she could go to Maine to buy and use medical marijuana.A judge ruled in November that the state could begin issuing ID cards once it was clear where the dispensaries would be built and who was building them. Since that requirement was met, the Department of Health and Human Services could issue the ID.Horan bought marijuana in Maine in December."What I have in front of me is not opioids, and that's the most important thing to me," she said. "I did not want to go out in an opioid haze. I wanted to be awake and aware as long as I can."Rep. Renny Cushing, D-Hampton, confirmed Monday that Horan had died. Cushing helped Horan in her fight to get her medical marijuana ID card. An Alstead woman who successfully fought to get a medical marijuana ID card in New Hampshire before state dispensaries opened has died. Linda Horan, 64, sought medical marijuana to help relieve the pain and nausea that she experienced from stage 4 lung cancer. Advertisement Horan sued New Hampshire to get a medical marijuana ID after state health officials determined that they couldn't issue ID cards until after state dispensaries opened. Horan said that if she had an official ID from New Hampshire, she could go to Maine to buy and use medical marijuana. A judge ruled in November that the state could begin issuing ID cards once it was clear where the dispensaries would be built and who was building them. Since that requirement was met, the Department of Health and Human Services could issue the ID. Horan bought marijuana in Maine in December. "What I have in front of me is not opioids, and that's the most important thing to me," she said. "I did not want to go out in an opioid haze. I wanted to be awake and aware as long as I can." Rep. Renny Cushing, D-Hampton, confirmed Monday that Horan had died. Cushing helped Horan in her fight to get her medical marijuana ID card. AlertMe
/** * Executes the specified method on the provided cached object. * @param cache The cached object on which the method is executed. * @param methodName The method to execute. * @param paramTypes An array containing the types of the parameters of the method (used to distinguish similar * methods) * @param paramValues The parameter values that are passed to the method. * @return The result of the method, or empty otherwise. Methods that return void will return * {@link Boolean} {@code true} here. */ public Object executeMethod(CachedObject cache, String methodName, Class[] paramTypes, Object[] paramValues) { return WebAPI.runOnMain(() -> { Optional<?> obj = cache.getLive(); if (!obj.isPresent()) throw new InternalServerErrorException("Could not get live version of object"); Object o = obj.get(); Method[] ms = Arrays.stream(Util.getAllMethods(o.getClass())).filter(m -> { if (!m.getName().equalsIgnoreCase(methodName)) return false; Class<?>[] reqTypes = m.getParameterTypes(); if (reqTypes.length != paramTypes.length) return false; for (int i = 0; i < reqTypes.length; i++) { if (!reqTypes[i].isAssignableFrom(paramTypes[i])) { return false; } } return true; }).toArray(Method[]::new); if (ms.length == 0) { throw new NotFoundException("Could not find requested method"); } try { Method m = ms[0]; m.setAccessible(true); Object res = m.invoke(o, paramValues); if (m.getReturnType() == Void.class || m.getReturnType() == void.class) return true; return res; } catch (Exception e) { throw new InternalServerErrorException(e.getMessage()); } }); }
/** * Server part of oracle program * * @author Alexander Savchenko * @version 1.0 * @since 2019-07-23 */ public class Server { private static final Logger LOG = LoggerFactory.getLogger(Server.class); private static final Map<String, Command> COMMANDS = StreamEx.of(Command.values()).toMap(Command::value, identity()); private final Socket socket; public Server(Socket socket) { this.socket = socket; } /** * entry point of the server program work * * @throws IOException if an io error occurs */ public void run() throws IOException { try (final Connection connection = new Connection(socket)) { handshake(connection); mainLoop(connection); } } /** * Sends welcome message, shows supported commands * * @param connection socket connection */ private void handshake(Connection connection) { connection.send("Hello, dear friend, I'm a oracle."); Command.ALL.doResponse(connection); connection.flush(); } /** * Represents main loop of server program that repeatedly waiting commands from client, tries to serve it, * then sends response to client * * @param connection socket connection * @throws IOException if an io error occurs */ private void mainLoop(Connection connection) throws IOException { Command command; do { String call = connection.receive(); command = COMMANDS.get(call.toLowerCase()); if (command != null) { command.doResponse(connection); LOG.info("has been processing command {}", command); } else { connection.send("I don't understand."); connection.send("Try again."); } connection.flush(); } while (command != Command.EXIT); } public static void main(String[] args) { try (final Socket socket = new ServerSocket(2222).accept()) { new Server(socket).run(); } catch (Exception e) { LOG.error("i/o error due to chat", e); } } }
"""Execution workers.""" import asyncio import sys, os, json import re from tempfile import gettempdir from pathlib import Path from shutil import copyfile import concurrent.futures as cf from .core import TaskBase from .helpers import get_available_cpus, read_and_display_async, save, load_and_run import logging logger = logging.getLogger("pydra.worker") class Worker: """A base class for execution of tasks.""" def __init__(self, loop=None): """Initialize the worker.""" logger.debug(f"Initializing {self.__class__.__name__}") self.loop = loop def run_el(self, interface, **kwargs): """Return coroutine for task execution.""" raise NotImplementedError def close(self): """Close this worker.""" async def fetch_finished(self, futures): """ Awaits asyncio's :class:`asyncio.Task` until one is finished. Parameters ---------- futures : set of asyncio awaitables Task execution coroutines or asyncio :class:`asyncio.Task` Returns ------- pending : set Pending asyncio :class:`asyncio.Task`. """ done = set() try: done, pending = await asyncio.wait( futures, return_when=asyncio.FIRST_COMPLETED ) except ValueError: # nothing pending! pending = set() logger.debug(f"Tasks finished: {len(done)}") return pending class DistributedWorker(Worker): """Base Worker for distributed execution.""" def __init__(self, loop=None, max_jobs=None): """Initialize the worker.""" super().__init__(loop=loop) self.max_jobs = max_jobs """Maximum number of concurrently running jobs.""" self._jobs = 0 async def fetch_finished(self, futures): """ Awaits asyncio's :class:`asyncio.Task` until one is finished. Limits number of submissions based on py:attr:`DistributedWorker.max_jobs`. Parameters ---------- futures : set of asyncio awaitables Task execution coroutines or asyncio :class:`asyncio.Task` Returns ------- pending : set Pending asyncio :class:`asyncio.Task`. """ done, unqueued = set(), set() job_slots = self.max_jobs - self._jobs if self.max_jobs else float("inf") if len(futures) > job_slots: # convert to list to simplify indexing logger.warning(f"Reducing queued jobs due to max jobs ({self.max_jobs})") futures = list(futures) futures, unqueued = set(futures[:job_slots]), set(futures[job_slots:]) try: self._jobs += len(futures) done, pending = await asyncio.wait( futures, return_when=asyncio.FIRST_COMPLETED ) except ValueError: # nothing pending! pending = set() self._jobs -= len(done) logger.debug(f"Tasks finished: {len(done)}") # ensure pending + unqueued tasks persist return pending.union(unqueued) class SerialPool: """A simple class to imitate a pool executor of concurrent futures.""" def submit(self, interface, **kwargs): """Send new task.""" self.res = interface(**kwargs) def result(self): """Get the result of a task.""" return self.res def done(self): """Return whether the task is finished.""" return True class SerialWorker(Worker): """A worker to execute linearly.""" def __init__(self): """Initialize worker.""" logger.debug("Initialize SerialWorker") self.pool = SerialPool() def run_el(self, interface, rerun=False, **kwargs): """Run a task.""" self.pool.submit(interface=interface, rerun=rerun, **kwargs) return self.pool def close(self): """Return whether the task is finished.""" class ConcurrentFuturesWorker(Worker): """A worker to execute in parallel using Python's concurrent futures.""" def __init__(self, n_procs=None): """Initialize Worker.""" super().__init__() self.n_procs = get_available_cpus() if n_procs is None else n_procs # added cpu_count to verify, remove once confident and let PPE handle self.pool = cf.ProcessPoolExecutor(self.n_procs) # self.loop = asyncio.get_event_loop() logger.debug("Initialize ConcurrentFuture") def run_el(self, runnable, rerun=False, **kwargs): """Run a task.""" assert self.loop, "No event loop available to submit tasks" return self.exec_as_coro(runnable, rerun=rerun) async def exec_as_coro(self, runnable, rerun=False): """Run a task (coroutine wrapper).""" if isinstance(runnable, TaskBase): res = await self.loop.run_in_executor(self.pool, runnable._run, rerun) else: # it could be tuple that includes pickle files with tasks and inputs ind, task_main_pkl, task_orig = runnable res = await self.loop.run_in_executor( self.pool, load_and_run, task_main_pkl, ind, rerun ) return res def close(self): """Finalize the internal pool of tasks.""" self.pool.shutdown() class SlurmWorker(DistributedWorker): """A worker to execute tasks on SLURM systems.""" _cmd = "sbatch" _sacct_re = re.compile( "(?P<jobid>\\d*) +(?P<status>\\w*)\\+? +" "(?P<exit_code>\\d+):\\d+" ) def __init__(self, loop=None, max_jobs=None, poll_delay=1, sbatch_args=None): """ Initialize SLURM Worker. Parameters ---------- poll_delay : seconds Delay between polls to slurmd sbatch_args : str Additional sbatch arguments max_jobs : int Maximum number of submitted jobs """ super().__init__(loop=loop, max_jobs=max_jobs) if not poll_delay or poll_delay < 0: poll_delay = 0 self.poll_delay = poll_delay self.sbatch_args = sbatch_args or "" self.error = {} def run_el(self, runnable, rerun=False): """Worker submission API.""" script_dir, batch_script = self._prepare_runscripts(runnable, rerun=rerun) if (script_dir / script_dir.parts[1]) == gettempdir(): logger.warning("Temporary directories may not be shared across computers") if isinstance(runnable, TaskBase): cache_dir = runnable.cache_dir name = runnable.name uid = runnable.uid else: # runnable is a tuple (ind, pkl file, task) cache_dir = runnable[-1].cache_dir name = runnable[-1].name uid = f"{runnable[-1].uid}_{runnable[0]}" return self._submit_job(batch_script, name=name, uid=uid, cache_dir=cache_dir) def _prepare_runscripts(self, task, interpreter="/bin/sh", rerun=False): if isinstance(task, TaskBase): cache_dir = task.cache_dir ind = None uid = task.uid else: ind = task[0] cache_dir = task[-1].cache_dir uid = f"{task[-1].uid}_{ind}" script_dir = cache_dir / f"{self.__class__.__name__}_scripts" / uid script_dir.mkdir(parents=True, exist_ok=True) if ind is None: if not (script_dir / "_task.pkl").exists(): save(script_dir, task=task) else: copyfile(task[1], script_dir / "_task.pklz") task_pkl = script_dir / "_task.pklz" if not task_pkl.exists() or not task_pkl.stat().st_size: raise Exception("Missing or empty task!") batchscript = script_dir / f"batchscript_{uid}.sh" python_string = f"""'from pydra.engine.helpers import load_and_run; load_and_run(task_pkl="{str(task_pkl)}", ind={ind}, rerun={rerun}) ' """ bcmd = "\n".join( ( f"#!{interpreter}", f"#SBATCH --output={str(script_dir / 'slurm-%j.out')}", f"{sys.executable} -c " + python_string, ) ) with batchscript.open("wt") as fp: fp.writelines(bcmd) return script_dir, batchscript async def _submit_job(self, batchscript, name, uid, cache_dir): """Coroutine that submits task runscript and polls job until completion or error.""" script_dir = cache_dir / f"{self.__class__.__name__}_scripts" / uid sargs = self.sbatch_args.split() jobname = re.search(r"(?<=-J )\S+|(?<=--job-name=)\S+", self.sbatch_args) if not jobname: jobname = ".".join((name, uid)) sargs.append(f"--job-name={jobname}") output = re.search(r"(?<=-o )\S+|(?<=--output=)\S+", self.sbatch_args) if not output: output_file = str(script_dir / "slurm-%j.out") sargs.append(f"--output={output_file}") error = re.search(r"(?<=-e )\S+|(?<=--error=)\S+", self.sbatch_args) if not error: error_file = str(script_dir / "slurm-%j.err") sargs.append(f"--error={error_file}") else: error_file = None sargs.append(str(batchscript)) # TO CONSIDER: add random sleep to avoid overloading calls rc, stdout, stderr = await read_and_display_async( "sbatch", *sargs, hide_display=True ) jobid = re.search(r"\d+", stdout) if rc: raise RuntimeError(f"Error returned from sbatch: {stderr}") elif not jobid: raise RuntimeError("Could not extract job ID") jobid = jobid.group() if error_file: error_file = error_file.replace("%j", jobid) self.error[jobid] = error_file.replace("%j", jobid) # intermittent polling while True: # 3 possibilities # False: job is still pending/working # True: job is complete # Exception: Polling / job failure done = await self._poll_job(jobid) if done: if ( done in ["CANCELLED", "TIMEOUT", "PREEMPTED"] and "--no-requeue" not in self.sbatch_args ): # loading info about task with a specific uid info_file = cache_dir / f"{uid}_info.json" if info_file.exists(): checksum = json.loads(info_file.read_text())["checksum"] if (cache_dir / f"{checksum}.lock").exists(): # for pyt3.8 we could you missing_ok=True (cache_dir / f"{checksum}.lock").unlink() cmd_re = ("scontrol", "requeue", jobid) await read_and_display_async(*cmd_re, hide_display=True) else: return True await asyncio.sleep(self.poll_delay) async def _poll_job(self, jobid): cmd = ("squeue", "-h", "-j", jobid) logger.debug(f"Polling job {jobid}") rc, stdout, stderr = await read_and_display_async(*cmd, hide_display=True) if not stdout or "slurm_load_jobs error" in stderr: # job is no longer running - check exit code status = await self._verify_exit_code(jobid) return status return False async def _verify_exit_code(self, jobid): cmd = ("sacct", "-n", "-X", "-j", jobid, "-o", "JobID,State,ExitCode") _, stdout, _ = await read_and_display_async(*cmd, hide_display=True) if not stdout: raise RuntimeError("Job information not found") m = self._sacct_re.search(stdout) error_file = self.error[jobid] if int(m.group("exit_code")) != 0 or m.group("status") != "COMPLETED": if m.group("status") in ["CANCELLED", "TIMEOUT", "PREEMPTED"]: return m.group("status") elif m.group("status") in ["RUNNING", "PENDING"]: return False # TODO: potential for requeuing # parsing the error message error_line = Path(error_file).read_text().split("\n")[-2] if "Exception" in error_line: error_message = error_line.replace("Exception: ", "") elif "Error" in error_line: error_message = error_line.replace("Exception: ", "") else: error_message = "Job failed (unknown reason - TODO)" raise Exception(error_message) return True class DaskWorker(Worker): """ A worker to execute in parallel using Dask.distributed. This is an experimental implementation with limited testing. """ def __init__(self, **kwargs): """Initialize Worker.""" super().__init__() try: from dask.distributed import Client except ImportError: logger.critical("Please instiall Dask distributed.") raise self.client = None self.client_args = kwargs logger.debug("Initialize Dask") def run_el(self, runnable, rerun=False, **kwargs): """Run a task.""" return self.exec_dask(runnable, rerun=rerun) async def exec_dask(self, runnable, rerun=False): """Run a task (coroutine wrapper).""" if self.client is None: from dask.distributed import Client self.client = await Client(**self.client_args, asynchronous=True) future = self.client.submit(runnable._run, rerun) result = await future return result def close(self): """Finalize the internal pool of tasks.""" pass
/** * SchedulerFactoryBean instance providing quartz bean to the spring. * * @return a SchedulerFactoryBean instance */ @Bean public SchedulerFactoryBean schedulerFactoryBean() { final SchedulerFactoryBean factory = new SchedulerFactoryBean(); factory.setJobFactory(springBeanJobFactory()); factory.setTriggers(indexJobTrigger()); factory.setJobDetails(indexingJobDetail()); return factory; }
<gh_stars>0 import crcmod from selfdrive.car.hyundai.values import CAR, CHECKSUM hyundai_checksum = crcmod.mkCrcFun(0x11D, initCrc=0xFD, rev=False, xorOut=0xdf) def make_can_msg(addr, dat, alt): return [addr, 0, dat, alt] def create_lkas11(packer, car_fingerprint, bus, apply_steer, steer_req, cnt, enabled, lkas11, hud_alert, lane_visible, left_lane_depart, right_lane_depart, keep_stock=False): values = { "CF_Lkas_Bca_R": lkas11["CF_Lkas_Bca_R"] if keep_stock else 3, "CF_Lkas_LdwsSysState": lane_visible, "CF_Lkas_SysWarning": hud_alert, "CF_Lkas_LdwsLHWarning": left_lane_depart, "CF_Lkas_LdwsRHWarning": right_lane_depart, "CF_Lkas_HbaLamp": lkas11["CF_Lkas_HbaLamp"] if keep_stock else 0, "CF_Lkas_FcwBasReq": lkas11["CF_Lkas_FcwBasReq"] if keep_stock else 0, "CR_Lkas_StrToqReq": apply_steer, "CF_Lkas_ActToi": steer_req, "CF_Lkas_ToiFlt": 0, "CF_Lkas_HbaSysState": lkas11["CF_Lkas_HbaSysState"] if keep_stock else 1, "CF_Lkas_FcwOpt": lkas11["CF_Lkas_FcwOpt"] if keep_stock else 0, "CF_Lkas_HbaOpt": lkas11["CF_Lkas_HbaOpt"] if keep_stock else 3, "CF_Lkas_MsgCount": cnt, "CF_Lkas_FcwSysState": lkas11["CF_Lkas_FcwSysState"] if keep_stock else 0, "CF_Lkas_FcwCollisionWarning": lkas11["CF_Lkas_FcwCollisionWarning"] if keep_stock else 0, "CF_Lkas_FusionState": lkas11["CF_Lkas_FusionState"] if keep_stock else 0, "CF_Lkas_Chksum": 0, "CF_Lkas_FcwOpt_USM": lkas11["CF_Lkas_FcwOpt_USM"] if keep_stock else 2, "CF_Lkas_LdwsOpt_USM": lkas11["CF_Lkas_LdwsOpt_USM"] if keep_stock else 3, } if car_fingerprint == CAR.GENESIS: values["CF_Lkas_Bca_R"] = 2 values["CF_Lkas_HbaSysState"] = lkas11["CF_Lkas_HbaSysState"] if keep_stock else 0 values["CF_Lkas_HbaOpt"] = lkas11["CF_Lkas_HbaOpt"] if keep_stock else 1 values["CF_Lkas_FcwOpt_USM"] = lkas11["CF_Lkas_FcwOpt_USM"] if keep_stock else 2 values["CF_Lkas_LdwsOpt_USM"] = lkas11["CF_Lkas_LdwsOpt_USM"] if keep_stock else 0 elif car_fingerprint == CAR.KIA_OPTIMA: values["CF_Lkas_Bca_R"] = 0 values["CF_Lkas_HbaOpt"] = lkas11["CF_Lkas_HbaOpt"] if keep_stock else 1 values["CF_Lkas_FcwOpt_USM"] = lkas11["CF_Lkas_FcwOpt_USM"] if keep_stock else 0 elif car_fingerprint == CAR.SONATA_LF_TURBO: values["CF_Lkas_FcwOpt_USM"] = 2 if enabled else 1 values["CF_Lkas_LdwsOpt_USM"] = 2 values["CF_Lkas_FcwOpt_USM"] = 2 if enabled else 1 values["CF_Lkas_SysWarning"] = 4 if sys_warning else 0 dat = packer.make_can_msg("LKAS11", 0, values)[2] if car_fingerprint in CHECKSUM["crc8"]: # CRC Checksum as seen on 2019 Hyundai Santa Fe dat = dat[:6] + dat[7:8] checksum = hyundai_checksum(dat) elif car_fingerprint in CHECKSUM["6B"]: # Checksum of first 6 Bytes, as seen on 2018 Kia Sorento checksum = sum(dat[:6]) % 256 else: # Checksum of first 6 Bytes and last Byte as seen on 2018 Kia Stinger checksum = (sum(dat[:6]) + dat[7]) % 256 values["CF_Lkas_Chksum"] = checksum return packer.make_can_msg("LKAS11", bus, values) def create_clu11(packer, bus, clu11, button, speed, cnt): values = { "CF_Clu_CruiseSwState": button, "CF_Clu_CruiseSwMain": clu11["CF_Clu_CruiseSwMain"], "CF_Clu_SldMainSW": clu11["CF_Clu_SldMainSW"], "CF_Clu_ParityBit1": clu11["CF_Clu_ParityBit1"], "CF_Clu_VanzDecimal": clu11["CF_Clu_VanzDecimal"], "CF_Clu_Vanz": speed, "CF_Clu_SPEED_UNIT": clu11["CF_Clu_SPEED_UNIT"], "CF_Clu_DetentOut": clu11["CF_Clu_DetentOut"], "CF_Clu_RheostatLevel": clu11["CF_Clu_RheostatLevel"], "CF_Clu_CluInfo": clu11["CF_Clu_CluInfo"], "CF_Clu_AmpInfo": clu11["CF_Clu_AmpInfo"], "CF_Clu_AliveCnt1": cnt } #if CS.clu11["CF_Clu_Vanz"] < 15 and CS.clu11["CF_Clu_CruiseSwState"] == 2 and not self.acc_cruise_state: return packer.make_can_msg("CLU11", bus, values) def create_scc12(packer, apply_accel, enabled, cnt, scc12): values = { "CF_VSM_Prefill": scc12["CF_VSM_Prefill"], "CF_VSM_DecCmdAct": scc12["CF_VSM_DecCmdAct"], "CF_VSM_HBACmd": scc12["CF_VSM_HBACmd"], "CF_VSM_Warn": scc12["CF_VSM_Warn"], "CF_VSM_Stat": scc12["CF_VSM_Stat"], "CF_VSM_BeltCmd": scc12["CF_VSM_BeltCmd"], "ACCFailInfo": scc12["ACCFailInfo"], "ACCMode": scc12["ACCMode"], "StopReq": scc12["StopReq"], "CR_VSM_DecCmd": scc12["CR_VSM_DecCmd"], "aReqMax": apply_accel if enabled and scc12["ACCMode"] == 1 else scc12["aReqMax"], "TakeOverReq": scc12["TakeOverReq"], "PreFill": scc12["PreFill"], "aReqMin": apply_accel if enabled and scc12["ACCMode"] == 1 else scc12["aReqMin"], "CF_VSM_ConfMode": scc12["CF_VSM_ConfMode"], "AEB_Failinfo": scc12["AEB_Failinfo"], "AEB_Status": scc12["AEB_Status"], "AEB_CmdAct": scc12["AEB_CmdAct"], "AEB_StopReq": scc12["AEB_StopReq"], "CR_VSM_Alive": cnt, "CR_VSM_ChkSum": 0 } dat = packer.make_can_msg("SCC12", 0, values)[2] values["CR_VSM_ChkSum"] = 16 - sum([sum(divmod(i, 16)) for i in dat]) % 16 return packer.make_can_msg("SCC12", 0, values) def create_mdps12(packer, car_fingerprint, cnt, mdps12): values = { "CR_Mdps_StrColTq": mdps12["CR_Mdps_StrColTq"], "CF_Mdps_Def": mdps12["CF_Mdps_Def"], "CF_Mdps_ToiActive": 0, "CF_Mdps_ToiUnavail": 1, "CF_Mdps_MsgCount2": cnt, "CF_Mdps_Chksum2": 0, "CF_Mdps_ToiFlt": mdps12["CF_Mdps_ToiFlt"], "CF_Mdps_SErr": mdps12["CF_Mdps_SErr"], "CR_Mdps_StrTq": mdps12["CR_Mdps_StrTq"], "CF_Mdps_FailStat": mdps12["CF_Mdps_FailStat"], "CR_Mdps_OutTq": mdps12["CR_Mdps_OutTq"] } dat = packer.make_can_msg("MDPS12", 2, values)[2] checksum = sum(dat) % 256 values["CF_Mdps_Chksum2"] = checksum return packer.make_can_msg("MDPS12", 2, values) def create_vsm11(packer, vsm11, enabled, mode, steer_req,bus, cnt): values = { "CR_Esc_StrTqReq": steer_req if enabled else vsm11["CR_Esc_StrTqReq"], "CF_Esc_Act": 1 if enabled and steer_req else vsm11["CF_Esc_Act"], "CF_Esc_CtrMode": mode if enabled else vsm11["CF_Esc_CtrMode"], "CF_Esc_Def": vsm11["CF_Esc_Def"], "CF_Esc_AliveCnt": cnt, "CF_Esc_Chksum": 0, } dat = packer.make_can_msg("VSM11", bus, values)[2] values["CF_Esc_Chksum"] = sum(dat) % 256 return packer.make_can_msg("VSM11", bus, values) def create_vsm2(packer, vsm2, enabled, apply_steer,bus, cnt): values = { "CR_Mdps_StrTq": apply_steer if enabled else vsm2["CR_Esc_StrTqReq"], "CR_Mdps_OutTq": vsm2["CR_Mdps_OutTq"], "CF_Mdps_Def": vsm2["CF_Mdps_Def"], "CF_Mdps_SErr": vsm2["CF_Mdps_SErr"], "CF_Mdps_AliveCnt": vsm2["CF_Mdps_AliveCnt"], "CF_Mdps_Chksum": 0 } dat = packer.make_can_msg("VSM2", bus, values)[2] values["CF_Mdps_Chksum"] = sum(dat) % 256 return packer.make_can_msg("VSM2", bus, values) def create_spas11(packer, frame, en_spas, apply_steer, checksum): values = { "CF_Spas_Stat": en_spas, "CF_Spas_TestMode": 0, "CR_Spas_StrAngCmd": apply_steer, "CF_Spas_BeepAlarm": 0, "CF_Spas_Mode_Seq": 2, "CF_Spas_AliveCnt": frame % 0x200, "CF_Spas_Chksum": 0, "CF_Spas_PasVol": 0 } dat = packer.make_can_msg("SPAS11", 0, values)[2] """ if checksum in CHECKSUM["crc8"]: dat = dat[:6] values["CF_Spas_Chksum"] = hyundai_checksum(dat) else: values["CF_Spas_Chksum"] = sum(dat[:6]) % 256 """ #values["CF_Spas_Chksum"] = sum(dat[:6]) % 256 dat = dat[:6] values["CF_Spas_Chksum"] = hyundai_checksum(dat) #CHECKSOM TEST #dat = [ord(i) for i in dat] #values["CF_Spas_Chksum"] = sum(dat[:6]) % 256 # CRC Checksum #crc = hyundai_checksum(dat[:6] + dat[7]) #dat = [ord(i) for i in dat] # Checksum of first 6 Bytes #cs6b = (sum(dat[:6]) % 256) # Checksum of first 6 Bytes and last Byte #cs7b = ((sum(dat[:6]) + dat[7]) % 256) """ if en_spas is 3: print('3!') elif en_spas is 4: print('4!') elif en_spas is 5: print('5!') """ return packer.make_can_msg("SPAS11", 1, values) #def create_spas12(): #return make_can_msg(1268, "\x00\x00\x00\x00\x00\x00\x00\x00", 1) def create_spas12(packer): values = { "CF_Spas_HMI_Stat": 0, "CF_Spas_Disp": 0, "CF_Spas_FIL_Ind": 0, "CF_Spas_FIR_Ind": 0, "CF_Spas_FOL_Ind": 0, "CF_Spas_FOR_Ind": 0, "CF_Spas_VolDown": 0, "CF_Spas_RIL_Ind": 0, "CF_Spas_RIR_Ind": 0, "CF_Spas_FLS_Alarm": 0, "CF_Spas_ROL_Ind": 0, "CF_Spas_ROR_Ind": 0, "CF_Spas_FCS_Alarm": 0, "CF_Spas_FI_Ind": 0, "CF_Spas_RI_Ind": 0, "CF_Spas_FRS_Alarm": 0, "CF_Spas_FR_Alarm": 0, "CF_Spas_RR_Alarm": 0, "CF_Spas_BEEP_Alarm": 0, "CF_Spas_StatAlarm": 0, "CF_Spas_RLS_Alarm": 0, "CF_Spas_RCS_Alarm": 0, "CF_Spas_RRS_Alarm": 0 } return packer.make_can_msg("SPAS12", 0, values) def create_790(): return make_can_msg(790, "\x00\x00\xff\xff\x00\xff\xff\xff", 0) def create_ems11(packer, ems11, enabled): if enabled: ems11["VS"] = 0 return packer.make_can_msg("EMS11", 1, ems11)
<reponame>adisadi/mmbot import * as moment from "moment"; import { Argv, CommandModule } from "yargs"; import { Database } from "../db/db"; import { CandleCollection, ICandle, ITrade } from "../util/common"; // tslint:disable-next-line:no-var-requires const status = require("node-status"); const console = status.console(); export class SimCommandModule implements CommandModule { public aliases = ["b"]; public command = "sim [algo]"; public describe = "sim desc"; constructor(private database: Database) { } public builder = (yargs: Argv) => { return yargs; } // tslint:disable-next-line:no-empty public handler = (args: any) => { } }
Rolling Stone journalist made enemies in FBI, CIA Paul Joseph Watson Infowars.com June 20, 2013 The revelation that Rolling Stone journalist Michael Hastings was working on a story about the CIA before his death and had contacted a Wikileaks lawyer about being under investigation by the FBI hours before his car exploded into flames has bolstered increasingly valid claims that the 33-year-old was assassinated. Hastings died early Tuesday morning in Hollywood when his car allegedly hit a tree at high speed. The Los Angeles Coroner’s office has not yet been able to officially identify the body as Hastings because it is so badly burned. Skeptics of the official narrative have highlighted eyewitness accounts which state that Hastings’ Mercedes “exploded”. Images of the vehicle appear to show more damage to the rear, around the area of the fuel tank, than the front, leading to speculation that a car bomb which ignited the fuel could have been responsible for the incident. “No matter how you slice this particular pie, a Mercedes is not just going to explode into flames without a little assistance,” writes freelance journalist Jim Stone. “Car fires in new cars happen for three main reasons — running the engine out of oil, or running the engine out of coolant, or after an absolutely huge car mangling accident, having the hot side of the battery short out against the frame before it reaches the fuse panel. And for all 3 of those normal reasons, which account for virtually all car fires in modern cars, the fire would have started in the engine compartment, progressed slowly, and scorched the hell out of the paint before ever reaching the gas tank. That clean paint is the be all tell all, Michael Hastings was murdered, and the rest is detail.” Stone also questions why a white sheet has been draped over the vehicle in the image below. The questions surrounding the precise nature of the “accident” that killed Hastings are given more weight by the fact that the journalist had made enemies within both the FBI and the CIA. “Michael Hastings contacted WikiLeaks lawyer Jennifer Robinson just a few hours before he died, saying that the FBI was investigating him,” the official Wikileaks Twitter account announced yesterday. Hastings “had the Central Intelligence Agency in his sights” and was set to release an article exposing the agency, according to L.A. Weekly. The Obama administration and the Justice Department have openly claimed the authority to assassinate American citizens anywhere in the world if they are deemed a national security threat. A number of American citizens have already been killed as a result of this policy. Is it really that crazy to suggest that Michael Hastings was merely the latest victim of this doctrine? The New York Daily News highlights the fact that Hastings had received multiple death threats before his demise. Following his role in bringing down Gen. Stanley A. McChrystal, Hastings was told by a McChrystal staffer, “We’ll hunt you down and kill you if we don’t like what you write.” “Whenever I’d been reporting around groups of dudes whose job it was to kill people, one of them would usually mention that they were going to kill me,” said Hastings. Hastings was renowned for being “only interested in writing stories someone didn’t want him to write — often his subjects,” according to Buzzflash editor Ben Smith, adding, “He knew that there are certain truths that nobody has an interest in speaking, ones that will make you both your subjects and their enemies uncomfortable. They’re stories that don’t get told because nobody in power has much of an interest in telling them.” The fact that Hastings had made a plethora of enemies as a result of his hard-hitting investigative journalism has prompted a deluge of online comment speculating that the writer’s “car crash” was no accident. “Hastings’ wreck might make sense on the freeway, but I doubt he’d be dumb enough to go 100 mph on Highland. He’s not some dumb college kid,” said one commenter on a local news site. “A warning to other journalists to not dig too deep,” another Reddit user wrote. “Stick with the party line if you want a long, happy life.” If this was an isolated incident then there wouldn’t be so many questions swirling about Hastings’ death. However, he’s certainly not the first individual to go up against the military-industrial complex and wind up in a coffin. Other journalists who have proven to be a thorn in the side of the establishment have met the same fate, from Andrew Breitbart who was about to release damaging pre-election information about Barack Obama before he collapsed and died in strange circumstances, to Gary Webb, the Pullitzer prize-winning author who exposed the CIA’s involvement in the drug trade and subsequently committed “suicide” after apparently shooting himself in the head – twice. More recently, Ibragim Todashev, friend of accused Boston bomber Tamerlan Tsarnev, was shot in the head six times by the FBI, who initially claimed Todashev was armed but later had to admit this was a lie. Speculation has raged that Todashev was assassinated because he had knowledge about the Boston bombings which the feds didn’t want to see the light of public scrutiny. Despite his actions, the murder of Christopher Dorner, who was burned to death by LAPD officers while hiding inside a cabin, shows that authorities will not hesitate to resort to such methods. It’s virtually inevitable that the true cause of Michael Hastings death will never be known and that the mainstream media will demonize anyone who questions the official narrative as a conspiracy theorist. Meanwhile, journalists and others who pose a threat to the military-industrial complex will continue to die in bizarre “accidents” that stink of foul play. ********************* Paul Joseph Watson is the editor and writer for Infowars.com and Prison Planet.com. He is the author of Order Out Of Chaos. Watson is also a host for Infowars Nightly News.
/** * Add connection. * * @param remoteAddress remote address * @param trafficCounter counter */ public void mapChannel(InetSocketAddress remoteAddress, ChannelTrafficShapingHandler trafficCounter) { LOGGER.debug("mapping " + remoteAddress); connectionMap.put(remoteAddress, new Connection(remoteAddress.toString(), trafficCounter)); }
/** * Returned when some type of error occurred checking a parameter. This provides * the key name and display name (if set) of the parameter where the error occurred, * as well as the error type ({@link ApiErrorType}), optionally an error message * (may be default message for the given error type), an exception if the error was * caused by a thrown exception, and if the error occurred in a nested {@link ApiMapParam} * an ApiParamError will be returned with the nested ApiParamErrors which can be traversed * to obtain the innermost ApiParamError where the error/failure occurred. * * @author Blake Howell */ public class ApiParamError { /** * In most cases the key name will be set, but this is not the case when * the failure is of type {@link ApiErrorType#CONDITIONAL_ERROR} and this * may also not be the case in an {@link ApiCustomParam} check. */ public final String keyName; /** * Parameter's display name if it was set. */ public final String displayName; /** * In the case of {@link ApiCollectionParam} this is used to track the index * where the error occurred. This can be combined with {@link #childParamError} * for multidimensional collections. If an {@link ApiParamError} has {@link #index} * set and a {@link #childParamError} then it is either a multidimensional array * or an array of maps. */ public final Integer index; /** * The error type will be the same for all nested errors. I.e., if the error * occurs in a nested map the top level map will have the same errorType as * the failing, nested, parameter. */ public final ApiErrorType errorType; /** * Error message will only exist in the root ApiParamError (i.e., the innermost * childApiParamError). Error messages should be descriptive and usually won't * return a name in them. The user can use the keyName or displayName and concat * it with the error message (e.g., The error message "Must be at least 5 characters * in length." is returned and can be sent back to the user as "Parameter error * in 'displayName'|'keyName'. Must be at least 5 characters in length."). */ public final String errorMessage; /** * Provided so that the library user can access this if need be - can be helpful * for debugging purposes. Will be null if no exception was thrown when checking. */ public final Exception exception; /** * Allows for tracking exactly where the error occurred in nested parameters. */ public final ApiParamError childParamError; public ApiParamError(String keyName, String displayName, Exception exception) { this(keyName, displayName, ApiErrorType.EXCEPTIONAL, null, exception, null, null); } public ApiParamError(String keyName, String displayName, ApiErrorType type, String errorMessage) { this(keyName, displayName, type, errorMessage, null, null, null); } public ApiParamError(String keyName, String displayName, ApiErrorType type, String errorMessage, Exception exception) { this(keyName, displayName, type, errorMessage, exception, null, null); } public ApiParamError(String keyName, String displayName, ApiErrorType type, String errorMessage, Exception exception, ApiParamError childParamError) { this(keyName, displayName, type, errorMessage, exception, null, childParamError); } public ApiParamError(String keyName, String displayName, ApiErrorType type, String errorMessage, Exception exception, Integer index, ApiParamError childParamError) { this.keyName = keyName; this.displayName = displayName; this.errorType = type; this.errorMessage = errorMessage; this.exception = exception; this.index = index; this.childParamError = childParamError; } /** * In certain cases there will not be a key name. * E.g., an {@link ApiMapParamConditionalCheck} may not return * a key name. * @return true if the key name is not null. false otherwise. */ public boolean hasKeyName() { return this.keyName != null; } /** * @return true if the display name is not null. false otherwise. */ public boolean hasDisplayName() { return this.displayName != null; } public boolean hasIndex() { return this.index != null; } /** * @return true if error message is not null. false otherwise. */ public boolean hasErrorMessage() { return this.errorMessage != null; } /** * @return true if child param error is not null. false otherwise. */ public boolean hasChildError() { return this.childParamError != null; } /** * Creates a list for this error and all of its child errors. These are * in order where index 0 is THIS (calling class) error and the ascending * indices are the previous index's child error. * * Note: if this error is the child of another error the parent is not * included (as the child does not have a reference to the parent so it * cannot retrieve it). * @return list of this ApiParamError and any child errors that caused this ApiParamError. */ public List<ApiParamError> getErrorsAsList() { if (this.hasChildError()) { ApiParamError error = this; List<ApiParamError> errorList = new ArrayList<>(2); while (error.hasChildError()) { errorList.add(error); error = error.childParamError; } errorList.add(error); return errorList; } return Collections.singletonList(this); } /** * Creates a map from the fields. This map will be as complex as the * API. If there are many nested maps/arrays they will have to be * traversed in the map. * @return */ public Map<String, Object> toMap() { Map<String, Object> map = new HashMap<>(); if (this.keyName != null) { map.put(ApiLibSettings.ErrorMessageToMapParamNames.KEY_NAME, this.keyName); } if (this.displayName != null) { map.put(ApiLibSettings.ErrorMessageToMapParamNames.DISPLAY_NAME, this.displayName); } if (this.index != null) { map.put(ApiLibSettings.ErrorMessageToMapParamNames.INDEX, this.index); } if (this.errorMessage != null) { map.put(ApiLibSettings.ErrorMessageToMapParamNames.ERROR_MESSAGE, this.errorMessage); } if (this.errorType != null) { map.put(ApiLibSettings.ErrorMessageToMapParamNames.ERROR_TYPE, this.errorType.name()); } if (this.hasChildError()) { map.put(ApiLibSettings.ErrorMessageToMapParamNames.CHILD_ERROR, this.childParamError.toMap()); } return map; } @Override public String toString() { return "Key name: " + this.keyName + ". Display name: " + this.displayName + ". Error message: " + this.errorMessage + ". Error type: " + this.errorType + (this.exception != null ? ("Exception message: " + this.exception.getMessage()) : "."); } /* Static Creation Methods */ public static ApiParamError invalid(String keyName) { return invalid(keyName, null); } /** * Creates ApiParamError with {@link ApiErrorType#INVALID_PARAMETER} * and {@link ApiLibSettings#DEFAULT_INVALID_PARAMETER_MESSAGE}. */ public static ApiParamError invalid(ApiParamBase apiParam) { return invalid(apiParam.keyName, apiParam.displayName); } /** * Creates ApiParamError with {@link ApiErrorType#INVALID_PARAMETER} * and {@link ApiLibSettings#DEFAULT_INVALID_PARAMETER_MESSAGE}. */ public static ApiParamError invalid(ApiParamBase apiParam, String errorMessage) { return invalid(apiParam.keyName, apiParam.displayName, errorMessage); } /** * Creates ApiParamError with {@link ApiErrorType#INVALID_PARAMETER} * and {@link ApiLibSettings#DEFAULT_INVALID_PARAMETER_MESSAGE}. */ public static ApiParamError invalid(String keyName, String displayName) { return invalid(keyName, displayName, ApiLibSettings.DEFAULT_INVALID_PARAMETER_MESSAGE); } /** * Creates ApiParamError with {@link ApiErrorType#INVALID_PARAMETER} * and supplied error message. */ public static ApiParamError invalid(String keyName, String displayName, String failureMessage) { return new ApiParamError(keyName, displayName, ApiErrorType.INVALID_PARAMETER, failureMessage); } /** * Creates ApiParamError with {@link ApiErrorType#MISSING_PARAMETER} * and {@link ApiLibSettings#DEFAULT_MISSING_PARAMETER_MESSAGE}. */ public static ApiParamError missing(ApiParamBase apiParam) { return missing(apiParam.keyName, apiParam.displayName); } /** * Creates ApiParamError with {@link ApiErrorType#MISSING_PARAMETER} * and {@link ApiLibSettings#DEFAULT_MISSING_PARAMETER_MESSAGE}. */ public static ApiParamError missing(String keyName, String displayName) { return missing(keyName, displayName, ApiLibSettings.DEFAULT_MISSING_PARAMETER_MESSAGE); } /** * Creates ApiParamError with {@link ApiErrorType#MISSING_PARAMETER} and supplied error message. */ public static ApiParamError missing(String keyName, String displayName, String errorMessage) { return new ApiParamError(keyName, displayName, ApiErrorType.MISSING_PARAMETER, errorMessage); } /** * Creates ApiParamError with {@link ApiErrorType#FORMAT_ERROR} * and {@link ApiLibSettings#DEFAULT_FORMATTING_ERROR_MESSAGE}. */ public static ApiParamError format(ApiParamBase apiParam) { return format(apiParam.keyName, apiParam.displayName, ApiLibSettings.DEFAULT_FORMATTING_ERROR_MESSAGE); } /** * Creates ApiParamError with {@link ApiErrorType#FORMAT_ERROR} * and {@link ApiLibSettings#DEFAULT_FORMATTING_ERROR_MESSAGE}. */ public static ApiParamError format(ApiParamBase apiParam, String errorMessage) { return format(apiParam.keyName, apiParam.displayName, errorMessage); } /** * Creates ApiParamError with {@link ApiErrorType#FORMAT_ERROR} and supplied error message. */ public static ApiParamError format(String keyName, String displayName, String failureMessage) { return new ApiParamError(keyName, displayName, ApiErrorType.FORMAT_ERROR, failureMessage); } /** * Creates an ApiParamError with {@link ApiErrorType#CONDITIONAL_ERROR} * and returns the error message supplied. */ public static ApiParamError conditional(String failureMessage) { return conditional(null, null, failureMessage); } public static ApiParamError conditional(String keyName, String displayName, String failureMessage) { return new ApiParamError(keyName, displayName, ApiErrorType.CONDITIONAL_ERROR, failureMessage); } /** * Creates ApiParamError with {@link ApiErrorType#CASTING_ERROR} * and {@link ApiLibSettings#DEFAULT_CASTING_ERROR_MESSAGE} * as the error message. */ public static ApiParamError cast(ApiParamBase apiParam, Exception exception) { return cast(apiParam.keyName, apiParam.displayName, exception); } /** * Creates ApiParamError with {@link ApiErrorType#CASTING_ERROR} and supplied error message. */ public static ApiParamError cast(String keyName, String displayName, Exception exception) { return new ApiParamError(keyName, displayName, ApiErrorType.CASTING_ERROR, ApiLibSettings.DEFAULT_CASTING_ERROR_MESSAGE, exception); } /** * Creates ApiParmError with {@link ApiErrorType#EXCEPTIONAL} * @param apiParam * @param exception * @return */ public static ApiParamError exceptional(ApiParamBase apiParam, Exception exception) { return exceptional(apiParam.keyName, apiParam.displayName, exception); } public static ApiParamError exceptional(String keyName, String displayName, Exception exception) { return new ApiParamError(keyName, displayName, ApiErrorType.EXCEPTIONAL, null, exception, null, null); } }
def main(): parser = argparse.ArgumentParser(description=' This modules brings basic statistics of the metadata files from all ' ' the experiments. It outputs the % for each label and the distribution' ' of the cartesian and angular noise that was applied during the picks' ' performed at the apple proxy.') parser.add_argument('--dataset', default='3_proxy_winter22_x1', type=str, help='Select the dataset from: "1_proxy_rob537_x1", "3_proxy_winter22_x1" or "5_real_fall21_x1"') args = parser.parse_args() a = MetadataStats() a.dataset = args.dataset success, failures, count = a.label_counter() print('Success Rate: %.2f ' % (success / (success + failures))) b = a.get_info(16) a.noise_stats(b)
def make_index(base_dir: str, _start: bool = True) -> None: if _start: logger.info("Generating package index") index_path = os.path.join(base_dir, "Packages") index_gzip_path = os.path.join(base_dir, "Packages.gz") with open(index_path, "w") as index_file, gzip.open( index_gzip_path, "wt" ) as index_gzip_file: for entry in os.scandir(base_dir): if entry.name in ("Packages", "Packages.gz"): pass elif entry.is_dir(): make_index(entry.path, _start=False) elif entry.is_file() and entry.name.endswith(".ipk"): with open(entry.path, "rb") as package: metadata = read_ipk_metadata(package) metadata += textwrap.dedent( f"""\ Filename: {entry.name} SHA256sum: {file_sha256(entry.path)} Size: {os.path.getsize(entry.path)} """ ) index_file.write(metadata) index_gzip_file.write(metadata)
<gh_stars>1-10 #!/usr/bin/env python import argparse from pathlib import Path from Bio import SeqIO, SearchIO import pandas as pd import numpy as np from itertools import combinations def parse_args(): parser = argparse.ArgumentParser(description='Calculate scores for a set of pVOG interactions, ' 'provided as a tsv file') optionalArgs = parser._action_groups.pop() requiredArgs = parser.add_argument_group("required arguments") requiredArgs.add_argument('-p', '--profiles-file', dest='profiles_file', type=lambda p: Path(p).resolve(strict=True), required=True, help="The all.hmm file from pVOGs database" ) requiredArgs.add_argument('-g', '--genomes', dest='genomes_fasta', type=lambda p: Path(p).resolve(strict=True), required=True, help="A fasta file with the genomes") requiredArgs.add_argument('-hmm', '--input-hmm', dest='hmmer_in', type=lambda p: Path(p).resolve(strict=True), required=True, help="The regular output file from hmmsearch all pvogs against" "the translated genomes" ) requiredArgs.add_argument('-ani_f', '--ani-matrix', dest='ani_matrix', type=lambda p: Path(p).resolve(strict=True), required=True, help="The square matrix resulting from fastANI with all genomes" ) requiredArgs.add_argument('-aai_f', '--aai-matrix', dest='aai_matrix', type=lambda p: Path(p).resolve(strict=True), required=True, help="The aai square matrix from compareM on all genomes" ) requiredArgs.add_argument('-o', '--output-file', dest='outfile', required=True, type=lambda p: Path(p).resolve(), help="File path to write the results in") parser._action_groups.append(optionalArgs) return parser.parse_args() def get_seq_sizes(seq_fasta): """ Create a dict that holds length sizes for all records in a fasta file. """ seq_sizes = {} with open(seq_fasta, 'r') as fin: for record in SeqIO.parse(fin, "fasta"): seq_sizes[record.id] = len(record.seq) return seq_sizes def get_maximum_index(values_list): """ Get the index of the maximum value in a list param: list: values_list A list of values return: int: The index of the maximum value """ if len(values_list) == 1: max_index = 0 else: max_index = values_list.index(max(values_list)) return max_index def translate_to_genomic_coords(start, end, frame, genome_size): """ Translate the coordinates of the protein from transeq to genomic coordinates. Strand is used here as orientation and not really [non-]conding. If the frame is 1,2 or 3 (-->) I call this (+) strand. Else, it is the (-) strand param: int: start The starting coordinate on the protein param: int: end The ending coordinate on the protein param: int: frame The frame on which it is found (1-6) param: int: genome_size The size of the genomes return: tuple: (genomic start, genomic end, strand) """ nucleic_start = start * 3 nucleic_end = end * 3 if frame == 1: genomic_start = nucleic_start - 2 genomic_end = nucleic_end - 2 if frame == 2: genomic_start = nucleic_start - 1 genomic_end = nucleic_end - 1 if frame == 3: genomic_start = nucleic_start genomic_end = nucleic_end if frame == 4: genomic_start = genome_size - (nucleic_start - 2) genomic_end = genome_size - (nucleic_end - 2) if frame == 5: genomic_start = genome_size - (nucleic_start - 1) genomic_end = genome_size - (nucleic_end -1) if frame == 6: genomic_start = genome_size - nucleic_start genomic_end = genome_size - nucleic_end if frame in [1,2,3]: strand = '+' elif frame in [4,5,6]: strand = '-' else: raise ValueError("frame should be one of 1,2,3,4,5,6") return genomic_start, genomic_end, strand def collect_hmmsearch_info(hmmer_in, genome_sizes): """ Parse the information in a dictionary of the form { pvog_id: { genome_id : { frame : [(genomic_start, genomic_end, accuracy, pvog_coverage), ...], ... , } } } """ hmm_results = {} with open(hmmer_in, 'r') as fin: for record in SearchIO.parse(fin, "hmmer3-text"): # Initialize an empty dictionary for the pvog hmm_results[record.id] = {} for hit in record.hits: if hit.is_included: # From the transeq output, accessions are suffixed # with _1, _2, _3, ..., depending on the strand genome_id = '_'.join(hit.id.split('_')[0:2]) frame = int(hit.id.split('_')[2]) hsps_included = [hsp.is_included for hsp in hit.hsps] # For multiple hsps that pass the threshold if any(hsps_included): # Get their score scores = [hsp.bitscore for hsp in hit.hsps] # and select the best one max_i = get_maximum_index(scores) best_hsp = hit.hsps[max_i] # Translate back to genomic coordinates genomic_coords = translate_to_genomic_coords(best_hsp.env_start, best_hsp.env_end, frame, genome_sizes.get(genome_id)) span = (best_hsp.env_end - best_hsp.env_start) / record.seq_len if genome_id not in hmm_results[record.id]: hmm_results[record.id][genome_id] = (frame, genomic_coords[0], genomic_coords[1], best_hsp.acc_avg, span, genomic_coords[2]) return hmm_results def get_mean_from_df(subset_list, df): df_subset = df.loc[subset_list, subset_list] return df_subset.values.mean() def get_pvogs_ids(profiles_file): all_pvogs = [] with open(profiles_file, 'r') as fin: for line in fin: if line.startswith('NAME'): pvog = line.split()[1] all_pvogs.append(pvog) return all_pvogs def get_shortest_distance(startA, endA, startB, endB): start_to_start = abs(startA - startB) start_to_end = abs(startA - endB) end_to_start = abs(endA - startB) end_to_end = abs(endA - endB) all_distances = [start_to_start, start_to_end, end_to_start, end_to_end] return min(all_distances) def calculate_scores(interaction_tuple, hmm_results, ani_df, aai_df): same_score = 0 inwards_score = 0 outwards_score = 0 avg_distance = 1000000 # 1000000 for pairs that are never on the same genome js = 0 # include the number of genomes for each participating pvog mean_ani = 0 mean_aai = 0 pvogA = interaction_tuple[0] pvogB = interaction_tuple[1] genomesA = set((hmm_results[pvogA].keys())) genomesB = set(hmm_results[pvogB].keys()) common_genomes = genomesA.intersection(genomesB) if len(common_genomes) > 0: all_genomes = genomesA.union(genomesB) # Jaccard score js = len(common_genomes) / len(all_genomes) # Mean ANI mean_ani = get_mean_from_df(common_genomes, ani_df) # Mean AAI mean_aai = get_mean_from_df(common_genomes, aai_df) # Distances sum_of_distances = 0 for genome in common_genomes: hitA = hmm_results[pvogA][genome] hitB = hmm_results[pvogB][genome] # Get the proper starts for distance calculation if hitA[-1] == '+': startA = hitA[1] endA = hitA[2] else: startA = hitA[2] endA = hitA[1] if hitB[-1] == '+': startB = hitB[1] endB = hitB[2] else: startB = hitB[2] endB = hitB[1] ## 1. Start to start #sum_of_distances += abs(startA - startB) ## 2. Shortest distance sum_of_distances += get_shortest_distance(startA, endA, startB, endB) # If they have the same orientation # Regardless of '+' or '-' if hitA[-1] == hitB[-1]: same_score += 1 if hitA[-1] != hitB[-1]: dstarts = abs(startA - startB) dends = abs(endA - endB) if dstarts >= dends: inwards_score += 1 else: outwards_score += 1 same_score = same_score / len(common_genomes) inwards_score = inwards_score / len(common_genomes) outwards_score = outwards_score / len(common_genomes) avg_distance = sum_of_distances / len(common_genomes) return len(genomesA), len(genomesB), len(common_genomes), js, same_score, inwards_score, outwards_score, avg_distance, mean_ani, mean_aai #def get_ani_for_genomes(genomes, ani_df): # """ # Calculate mean ani for the genomes list from the # ani df # """ # genomes_df = ani_df.loc[genomes, genomes] # return genomes_df.values.mean() #def get_aai_for_genomes(genomes, aai_df): # genomes_df = aai_df.loc[genomes, genomes] # return genomes_df.values.mean() def main(): # Read in the arguments args = parse_args() print("Loading data...") # Store the genome sizes genome_sizes = get_seq_sizes(args.genomes_fasta) print("Loaded sequence size info for {} input sequences".format(len(genome_sizes))) print("Reading hmmsearch information...") # Get the hmmsearch results info for all pvogs in hmm_results = collect_hmmsearch_info(args.hmmer_in, genome_sizes) print("Done!") print("Reading ANI matrix...") # Read in the matrices ani_df = pd.read_csv(args.ani_matrix, index_col=0, header=0, sep="\t") print("Done!") print("Reading AAI matrix...") aai_df = pd.read_csv(args.aai_matrix, index_col=0, header=0, sep="\t") print("Done!") # Create a list that holds all pvog ids all_pvogs = get_pvogs_ids(args.profiles_file) # Create all pairs of pvogs all_combos = list(combinations(all_pvogs, 2)) print("Created {} possible combinations".format(len(all_combos))) # Calculate scores print("Caclulating and writing to file...") counter = 0 with open(args.outfile, 'w') as fout: fout.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format('pvog1', 'pvog2', 'genomes1', 'genomes2', 'overlap_genomes', 'jaccard_score', 'same_score', 'inwards_score', 'outwards_score', 'avg_distance', 'mean_ani', 'mean_aai')) for combo in all_combos: int_string = '{}\t{}\t'.format(combo[0], combo[1]) scores = calculate_scores(combo, hmm_results, ani_df, aai_df) scores_string = '\t'.join(map(str, scores)) fout.write(int_string + scores_string + '\n') counter += 1 if counter % 1000000 == 0: print("{}/{} processed".format(counter, len(all_combos))) print("Scores are written to {}".format(str(args.outfile))) if __name__ == '__main__': main()
Block-modular constructor as a tool of the learning community of teachers for designing educational events The purpose of the article is to substantiate a block-modular constructor as a tool to overcome the costs of the traditional approach to design lessons. It is noted that it is applicable not only in the individual work of a teacher, but also in the activities of learning communities of teachers. The authors explain that an educational event as opposed to a traditional formal event can become personally significant and saturate educational activity with new discoveries, meanings and values. It can actualize the subjectivity of its participants. This is made possible by the selection of original and relevant content, which launches the engines for the appropriation of new knowledge – interest and surprise. The design and organization of the educational event, the revision of the ways of exchanging information and communicating with participants are also important. The article describes an approximate algorithm for working with a block-modular constructor, and also offers a navigator for designing an educational event, which allows teachers to work out its organizational, content and motivational components. A plan is presented that teachers can rely on in the joint development of an educational event, techniques of reflection and feedback are given to help the mentor of the learning community of teachers. In general, the idea of our article is to interest learning communities of teachers and their mentors in the possibilities of turning traditional formal events into personally significant educational events and collaboratively design educational events using a block-modular constructor.
# -*- coding: utf-8 -*- import os import re import time import os.path as path from sys import argv cid_list = [52267372,52267430,52267483,52267549,52267604,52267668,52267742,52267813,52267917,52267967,52268008,52268067,52268136,52268220,52268272,52268352,52268401,52268468,52268517,52268570,52268627,52268687,52268823] patStr = '<d p="(.+?)">.*?</d>' danmakuDir = "danmaku_cl_2014/" xmlList = [] danmakuMap = {} outputFile = "danmaku_cl_2014_his/" xmlPre = \ xmlSuf = '</i>' def getDanmakuID(param): pList = param.split(',') return pList[7] for cid in range(22,26): xmlList = [] for root, _, files in os.walk(danmakuDir + str(cid)): for file in files: xmlList.append(path.join(root, file)) outStr = '' danmakuMap = {} for xmlFile in xmlList: cnt = 0 xml = '' with open(xmlFile, 'r', encoding='utf8') as reader: print(xmlFile) xml = reader.read() for mat in re.finditer(patStr, xml): danmakuID = getDanmakuID(mat.group(1)) if danmakuID in danmakuMap: continue danmakuMap[danmakuID] = mat.group(0) cnt += 1 print('Add {: >4} danmaku(s) from file "{}"'.format(cnt, xmlFile)) print('There are {} danmaku(s) in total.'.format(len(danmakuMap))) print('Save result.') time.sleep(5) outStr = xmlPre.format(min(1000, len(danmakuMap))) for v in danmakuMap.values(): outStr += ' ' + v + '\n' outStr += '\n' + xmlSuf with open(outputFile+str(cid)+'.xml', 'w', encoding='utf8') as writer: writer.write(outStr) print('Done.') # print('Done.')
By Joe Conason - February 21, 2013 Savvy Republicans know that something is deeply wrong with the GOP -- frequently mocked these days by Republicans themselves as "the stupid party" -- which has lost the popular vote in five of the last six presidential elections. Some have noticed as well that their congressional majority is so widely despised -- its main achievement being historically low public approval ratings -- as to be sustainable only by gerrymandering. During the last election cycle, those fearsome Republican super PACs, funded by the overlords of Wall Street and Las Vegas, spent hundreds of millions of dollars -- with no discernible impact on an alienated electorate. The result is a burgeoning self-improvement movement on the right, generating introspective articles and interviews in which Republicans ask: "What is wrong with us? How can we change? What must we do to avoid partisan extinction?" But like many troubled people grappling with serious life issues, they aren't truly ready for change. They want to maintain the status quo while giving lip service to reform -- and changing as little as possible beyond the superficial. They would do anything to project a fresher image, more attractive and effective, without confronting their deeper problems. The deceptions involved in this process are perfectly exposed in Robert Draper's fascinating excursion among the urbane young Republicans whose frustration he skillfully reported in last Sunday's New York Times Magazine. His account is well worth reading, if only to observe these self-consciously "hip" conservatives confronting the reality of last November -- and failing utterly to comprehend its meaning. Early in Draper's article, a GOP technology consultant notes that the youth vote for President Obama grew by 1.25 million in 2012 over 2008 (precisely the opposite of what most pundits and pollsters predicted). But he doesn't seem to realize that the youth gap cannot be remedied by stronger social media or updated voter files. The young Republicans bitterly mock the Romney campaign's technological ineptitude, and complain more broadly about the party's repellent reputation among young voters, minorities, gays, immigrants, women and everyone sympathetic to them. They largely seem to believe that if the Republican National Committee would hire people like them -- and if Rush Limbaugh and Todd Akin would simply shut the eff up -- then the party could expand beyond its narrow, aging, white, and religiously conservative base. As they hasten to assure Draper, these dissidents would adopt a friendlier attitude toward those who are different and are even eager to engineer a few minor platform alterations to accommodate immigrants or gays. But why would they make such concessions to decency? Not out of any sense of justice or shame. They are not interested in social justice and they only feel ashamed of losing. Rather than honestly confronting the harm done by pandering to bigotry and division, they'd prefer to paper it over with a smiley face and move on. By proclaiming that their defeats are due mainly to technological inferiority or bad messaging, the young Republicans ignore the underlying source of popular disdain for their party. It is true that their technology was feeble, their candidate and consultants were incompetent, and their messaging was often repellent. But the self-styled hipsters of the right are in fact not much different from the Tea Party octogenarians in their hostility to government investment, social insurance, health care, education, and industry - and both are in conflict with the evolving attitudes of young Americans across all demographic lines. The disgruntled figures who spoke with Draper represent almost nobody in the GOP, compared with the legions commanded by Limbaugh and the religious right. But if their fantasy could be made real, what shape would it take? A tech-savvy, gay-friendly, 21st-century Calvin Coolidge? A composite of Marco Rubio, Chris Christie and Rand Paul? Good luck with that.
<filename>sql/dml/update_test.go /* Sniperkit-Bot - Status: analyzed */ // Copyright 2015-present, Cyrill @ Schumacher.fm and the CoreStore contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dml import ( "context" "testing" "github.com/corestoreio/errors" "github.com/corestoreio/log" "github.com/sniperkit/snk.fork.corestoreio-pkg/storage/null" "github.com/sniperkit/snk.fork.corestoreio-pkg/util/assert" ) func TestUpdate_Basics(t *testing.T) { t.Parallel() t.Run("all rows", func(t *testing.T) { qb := NewUpdate("a").Set( Column("b").Int64(1), Column("c").Int(2)) compareToSQL(t, qb, errors.NoKind, "UPDATE `a` SET `b`=1, `c`=2", "") }) t.Run("single row", func(t *testing.T) { compareToSQL(t, NewUpdate("a"). Set( Column("b").Int(1), Column("c").Int(2), ).Where(Column("id").Int(1)), errors.NoKind, "UPDATE `a` SET `b`=1, `c`=2 WHERE (`id` = 1)", "UPDATE `a` SET `b`=1, `c`=2 WHERE (`id` = 1)") }) t.Run("order by", func(t *testing.T) { qb := NewUpdate("a").Set(Column("b").Int(1), Column("c").Int(2)). OrderBy("col1", "col2").OrderByDesc("col2", "col3").Unsafe().OrderBy("concat(1,2,3)") compareToSQL(t, qb, errors.NoKind, "UPDATE `a` SET `b`=1, `c`=2 ORDER BY `col1`, `col2`, `col2` DESC, `col3` DESC, concat(1,2,3)", "UPDATE `a` SET `b`=1, `c`=2 ORDER BY `col1`, `col2`, `col2` DESC, `col3` DESC, concat(1,2,3)") }) t.Run("limit offset", func(t *testing.T) { compareToSQL(t, NewUpdate("a").Set(Column("b").Int(1)).Limit(10), errors.NoKind, "UPDATE `a` SET `b`=1 LIMIT 10", "UPDATE `a` SET `b`=1 LIMIT 10") }) t.Run("same column name in SET and WHERE", func(t *testing.T) { compareToSQL(t, NewUpdate("dml_people").Set(Column("key").Str("6-revoked")).Where(Column("key").Str("6")), errors.NoKind, "UPDATE `dml_people` SET `key`='6-revoked' WHERE (`key` = '6')", "UPDATE `dml_people` SET `key`='6-revoked' WHERE (`key` = '6')") }) t.Run("placeholder in columns", func(t *testing.T) { u := NewUpdate("dml_people").Set( Column("key").PlaceHolder(), ).Where(Column("key").Str("6")).WithArgs().String("Ke' --yX") compareToSQL(t, u, errors.NoKind, "UPDATE `dml_people` SET `key`=? WHERE (`key` = '6')", "UPDATE `dml_people` SET `key`='Ke\\' --yX' WHERE (`key` = '6')", "Ke' --yX") }) } func TestUpdate_SetExprToSQL(t *testing.T) { t.Parallel() t.Run("no placeholder", func(t *testing.T) { compareToSQL(t, NewUpdate("a"). Set( Column("foo").Int(1), Column("bar").Expr("COALESCE(bar, 0) + 1"), ).Where(Column("id").Int(9)), errors.NoKind, "UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 1 WHERE (`id` = 9)", "UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 1 WHERE (`id` = 9)", ) }) t.Run("with slice in WHERE clause", func(t *testing.T) { compareToSQL(t, NewUpdate("a"). Set( Column("foo").Int(1), Column("bar").Expr("COALESCE(bar, 0) + 1"), ).Where(Column("id").In().Int64s(10, 11)), errors.NoKind, "UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 1 WHERE (`id` IN (10,11))", "UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 1 WHERE (`id` IN (10,11))", ) }) t.Run("with placeholder", func(t *testing.T) { u := NewUpdate("a"). Set( Column("fooNULL").PlaceHolder(), Column("bar99").Expr("COALESCE(bar, 0) + ?"), ). Where(Column("id").Int(9)). WithArgs().NullString(null.String{}).Uint(99) compareToSQL(t, u, errors.NoKind, "UPDATE `a` SET `fooNULL`=?, `bar99`=COALESCE(bar, 0) + ? WHERE (`id` = 9)", "", //"UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 2 WHERE (`id` = 9)", nil, int64(99)) assert.Exactly(t, []string{"fooNULL", "bar99"}, u.base.qualifiedColumns) }) } func TestUpdateKeywordColumnName(t *testing.T) { s := createRealSessionWithFixtures(t, nil) defer testCloser(t, s) // Insert a user with a key _, err := s.InsertInto("dml_people").AddColumns("name", "email", "key"). WithArgs().Raw("Benjamin", "<EMAIL>", "6").ExecContext(context.TODO()) assert.NoError(t, err) // Update the key res, err := s.Update("dml_people").Set(Column("key").Str("6-revoked")).Where(Column("key").Str("6")).WithArgs().ExecContext(context.TODO()) assert.NoError(t, err) // Assert our record was updated (and only our record) rowsAff, err := res.RowsAffected() assert.NoError(t, err) assert.Equal(t, int64(1), rowsAff) var person dmlPerson _, err = s.SelectFrom("dml_people").AddColumns("id", "name", "key"). Where(Column("email").Str("<EMAIL>")).WithArgs().Load(context.TODO(), &person) assert.NoError(t, err) assert.Equal(t, "Benjamin", person.Name) assert.Equal(t, "6-revoked", person.Key.String) } func TestUpdateReal(t *testing.T) { s := createRealSessionWithFixtures(t, nil) defer testCloser(t, s) // Insert a George res, err := s.InsertInto("dml_people").AddColumns("name", "email"). WithArgs().ExecContext(context.TODO(), "George", "<EMAIL>") assert.NoError(t, err) // Get George'ab ID id, err := res.LastInsertId() assert.NoError(t, err) // Rename our George to Barack _, err = s.Update("dml_people"). Set(Column("name").Str("Barack"), Column("email").Str("<EMAIL>")). Where(Column("id").In().Int64s(id, 8888)).WithArgs().ExecContext(context.TODO()) // Meaning of 8888: Just to see if the SQL with place holders gets created correctly assert.NoError(t, err) var person dmlPerson _, err = s.SelectFrom("dml_people").Star().Where(Column("id").Int64(id)).WithArgs().Load(context.TODO(), &person) assert.NoError(t, err) assert.Equal(t, id, int64(person.ID)) assert.Equal(t, "Barack", person.Name) assert.Equal(t, true, person.Email.Valid) assert.Equal(t, "<EMAIL>", person.Email.String) } func TestUpdate_Prepare(t *testing.T) { t.Parallel() t.Run("ToSQL Error", func(t *testing.T) { in := &Update{} in.Set(Column("a").Int(1)) stmt, err := in.Prepare(context.TODO()) assert.Nil(t, stmt) assert.True(t, errors.Empty.Match(err)) }) t.Run("Prepare Error", func(t *testing.T) { u := &Update{} u.DB = dbMock{ error: errors.AlreadyClosed.Newf("Who closed myself?"), } u.Table.Name = "tableY" u.Set(Column("a").Int(1)) stmt, err := u.Prepare(context.TODO()) assert.Nil(t, stmt) assert.True(t, errors.AlreadyClosed.Match(err), "%+v", err) }) } func TestUpdate_ToSQL_Without_Column_Arguments(t *testing.T) { t.Parallel() t.Run("with condition values", func(t *testing.T) { u := NewUpdate("catalog_product_entity").AddColumns("sku", "updated_at") u.Where(Column("entity_id").In().Int64s(1, 2, 3)) compareToSQL(t, u, errors.NoKind, "UPDATE `catalog_product_entity` SET `sku`=?, `updated_at`=? WHERE (`entity_id` IN (1,2,3))", "", ) }) t.Run("without condition values", func(t *testing.T) { u := NewUpdate("catalog_product_entity").AddColumns("sku", "updated_at") u.Where(Column("entity_id").In().PlaceHolder()) compareToSQL(t, u, errors.NoKind, "UPDATE `catalog_product_entity` SET `sku`=?, `updated_at`=? WHERE (`entity_id` IN ?)", "", ) }) } func TestUpdate_Events(t *testing.T) { t.Parallel() t.Run("Stop Propagation", func(t *testing.T) { d := NewUpdate("tableA") d.Set(Column("y").Int(25), Column("z").Int(26)) d.Log = log.BlackHole{EnableInfo: true, EnableDebug: true} d.Listeners.Add( Listen{ Name: "listener1", EventType: OnBeforeToSQL, ListenUpdateFn: func(b *Update) { b.Set(Column("a").Int(1)) }, }, Listen{ Name: "listener2", EventType: OnBeforeToSQL, ListenUpdateFn: func(b *Update) { b.Set(Column("b").Int(1)) b.PropagationStopped = true }, }, Listen{ Name: "listener3", EventType: OnBeforeToSQL, ListenUpdateFn: func(b *Update) { panic("Should not get called") }, }, ) compareToSQL(t, d, errors.NoKind, "UPDATE `tableA` SET `y`=25, `z`=26, `a`=1, `b`=1", "UPDATE `tableA` SET `y`=25, `z`=26, `a`=1, `b`=1", // each call ToSQL appends more columns ) }) t.Run("Missing EventType", func(t *testing.T) { up := NewUpdate("tableA") up.Set(Column("a").Int(1), Column("b").Bool(true)) up.Listeners.Add( Listen{ Name: "c=pi", ListenUpdateFn: func(u *Update) { u.Set(Column("c").Float64(3.14159)) }, }, ) compareToSQL(t, up, errors.Empty, "", "", ) }) t.Run("Should Dispatch", func(t *testing.T) { up := NewUpdate("tableA") up.Set(Column("a").Int(1), Column("b").Bool(true)) up.Listeners.Add( Listen{ Name: "c=pi", EventType: OnBeforeToSQL, ListenUpdateFn: func(u *Update) { u.Set(Column("c").Float64(3.14159)) }, }, Listen{ Name: "d=d", EventType: OnBeforeToSQL, ListenUpdateFn: func(u *Update) { u.Set(Column("d").Str("d")) }, }, ) up.Listeners.Add(Listen{ Name: "e", EventType: OnBeforeToSQL, ListenUpdateFn: func(u *Update) { u.Set(Column("e").Str("e")) }, }) compareToSQL(t, up, errors.NoKind, "UPDATE `tableA` SET `a`=1, `b`=1, `c`=3.14159, `d`='d', `e`='e'", "UPDATE `tableA` SET `a`=1, `b`=1, `c`=3.14159, `d`='d', `e`='e'", // each call ToSQL appends more columns ) assert.Exactly(t, `c=pi; d=d; e`, up.Listeners.String()) }) } func TestUpdate_SetRecord(t *testing.T) { t.Parallel() pRec := &dmlPerson{ ID: 12345, Name: "Gopher", Email: null.MakeString("<EMAIL>"), } t.Run("without where", func(t *testing.T) { u := NewUpdate("dml_person").AddColumns("name", "email").WithArgs().Record("", pRec) compareToSQL(t, u, errors.NoKind, "UPDATE `dml_person` SET `name`=?, `email`=?", "UPDATE `dml_person` SET `name`='Gopher', `email`='<EMAIL>'", "Gopher", "<EMAIL>", ) }) t.Run("with where", func(t *testing.T) { u := NewUpdate("dml_person").AddColumns("name", "email"). Where(Column("id").PlaceHolder()).WithArgs().Record("", pRec) compareToSQL(t, u, errors.NoKind, "UPDATE `dml_person` SET `name`=?, `email`=? WHERE (`id` = ?)", "UPDATE `dml_person` SET `name`='Gopher', `email`='<EMAIL>' WHERE (`id` = 12345)", "Gopher", "<EMAIL>", int64(12345), ) assert.Exactly(t, []string{"name", "email", "id"}, u.base.qualifiedColumns) }) t.Run("fails column `key` not in entity object", func(t *testing.T) { u := NewUpdate("dml_person").AddColumns("name", "email"). Set(Column("keyXXX").PlaceHolder()). Where(Column("id").PlaceHolder()). WithArgs().Record("", pRec) compareToSQL(t, u, errors.NotFound, "", "", ) }) } func TestUpdate_DisableBuildCache(t *testing.T) { t.Parallel() up := NewUpdate("a"). Set( Column("foo").Int(1), Column("bar").Expr("COALESCE(bar, 0) + ?").Int(2)). Where(Column("id").PlaceHolder()). DisableBuildCache() const cachedSQLPlaceHolder = "UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 2 WHERE (`id` = ?)" const cachedSQLInterpolated = "UPDATE `a` SET `foo`=1, `bar`=COALESCE(bar, 0) + 2 WHERE (`id` = 987654321)" for i := 0; i < 3; i++ { compareToSQL(t, up.WithArgs().Uint(987654321), errors.NoKind, cachedSQLPlaceHolder, cachedSQLInterpolated, int64(987654321), ) assert.Empty(t, up.cachedSQL) } }
<reponame>deanandreakis/atl-demo-ios-app // // ATRKalmanSmoothingStrategy.h // AtriusLocation // // Created by <NAME> on 5/18/17. // Copyright © 2017 Acuity Brands. All rights reserved. // #import <UIKit/UIKit.h> #import "ATRISmoothingStrategy.h" @interface ATRKalmanSmoothingStrategy : NSObject<ATRISmoothingStrategy> /** Process noise covariance */ @property (nonatomic) CGFloat processNoiseCovariance; /** Measurement noise covariance */ @property (nonatomic) CGFloat measurementNoiseCovariance; /** The initial estimation error covariance */ @property (nonatomic) ATRCoordinate* _Nullable initialEstimationErrorCovariance; /** Returns a new smoothing strategy instance with default values Default: processNoiseCovariance = 0.625; measurementNoiseCovariance = 15.0; initialEstimationErrorCovariance = [[ATRCoordinate alloc] initWithX:10000 y:10000]; */ - (nullable id) init; /** Initializes the filter object with custom variables @param processNoiseCovariance - Process noise covariance @param measurementNoiseCovariance - Measurement noise covariance @param initialEstimationErrorCovariance - The initial estimation error covariance */ - (nullable id) initWithProcessNoiseCovariance:(CGFloat)processNoiseCovariance andMeasurementCovariance:(CGFloat)measurementNoiseCovariance andInitialEstimationErrorCovariance:(ATRCoordinate* _Nullable)initialEstimationErrorCovariance; @end
import * as React from "react"; import { makeStyles } from "@material-ui/core/styles"; import { Theme } from "@material-ui/core/styles"; import { Icon, Grid, IconButton } from "@material-ui/core"; import { DragHandle } from "@material-ui/icons"; import { subscribeChapterById_chapter_components } from "queries/__generated__/subscribeChapterById"; import ActionMenu from "./ActionMenu"; const useStyles = makeStyles((theme: Theme) => ({ container: {}, preview: { flex: 1, }, })); interface Props { provided?: any; data: subscribeChapterById_chapter_components; preview?: React.ReactNode; } const ComponentHeader = ({ provided, data, preview }: Props) => { const classes = useStyles(); const [isOver, setIsOver] = React.useState(false); return ( <Grid item container justify="space-between" alignItems="stretch" className={classes.container} > {provided ? ( <Grid item onMouseEnter={() => setIsOver(true)} onMouseLeave={() => setIsOver(false)} > <IconButton {...provided.dragHandleProps}> {isOver ? <DragHandle /> : <Icon>{data.type.icon}</Icon>} </IconButton> </Grid> ) : null} <Grid item container className={classes.preview} justify="center" alignItems="center" > {preview} </Grid> <Grid item> <ActionMenu data={data} /> </Grid> </Grid> ); }; export default ComponentHeader;
Blogger Mohamed Cheikh Ould Mohamed in 2012 in Nouadhibou, Mauritania. (Photo provided by Aysha Ould Mohamed) Sherif Mansour is the Middle East and North Africa Program Coordinator for the Committee to Protect Journalists. This week’s assassination of Jordanian commentator Nahed Hattar as he went to court to face blasphemy accusations was shocking. But is a shooting death by a suspected extremist more, or less, barbaric than a government putting one of its own citizens to death on the same charge? Authorities in Mauritania plan to do just that. Mohamed Cheikh Ould Mohamed, a blogger also known as Mohamed Ould M’Kaitir, is on death row in the western North African country, convicted of blasphemy in 2014. His sentence was upheld in April this year, and he awaits a final decision from Mauritania’s Supreme Court. Mohamed had frequently written articles that criticized some Islamic religious beliefs and conservative practices in Mauritania. The piece that got him arrested in January 2014 and landed him on death row was published on the news website Aqlame on Dec. 31, 2013, and titled, “Religion, religiosity, and craftsmen.” It criticized Mauritania’s caste system — an extremely delicate subject in a country that still practices slavery. In a letter he wrote from prison to the Committee to Protect Journalists this month, Mohamed said his only crime was “expressing my ideas in an article about the reality we live in — a reality that is known to all but few dare to expose.” Mohamed cannot rely on support inside his own country. His article prompted nationwide demonstrations in January 2014, and a local preacher announced he had three days to repent or the preacher would pay 4,000 euros to anyone who killed the blogger, according to news reports. Those who have dared to speak out on his behalf, including leading women’s rights activist Aminetou Mint El-Moctar, are labeled infidels. The head of a radical Islamist movement threatened El-Moctar with death, declared it lawful to seize her family members and assets and said, “Those who kill or poke out her eyes will be rewarded by Allah.” Mohamed’s father has publicly denounced him, and lawyers have shied away from his case. Even backing by the United Nations and international human rights groups may not be enough to save Mohamed’s life. In his letter to the CPJ, he appealed for support from the American public and the U.S. government: “I believe this in my heart; freedom will not be defeated. Hence, I direct my appeal to a country that has shown the world the meaning of humanity, the meaning of the beauty of life, and the meaning of freedom.” Yet the case has global implications, particularly in the wake of Hattar’s assassination. Blasphemy accusations have landed Saudi blogger Raif Badawi with 10 years imprisonment and 1,000 lashes. The liberal blogger, who advocated against terrorism, has already been flogged 50 times and his flogging could resume anytime, endangering his life. Blasphemy also already led extremists to hack to death at least five bloggers in Bangladesh in recent years. Both state and nonstate actors leverage the accusation to commit censorship. The attack on Charlie Hebdo last year, in which 12 people — including eight journalists — were killed, set off a fierce worldwide debate about the correct balance between protecting freedom of expression and countering violent extremism. The irony of Mauritanian authorities condemning the killings, which were carried out in the name of blasphemy, even as they sentenced to death a blogger on the same charge, was lost on many. As this debate continues, and as the standards of freedom of expression struggle to stay afloat, the focus should be at least on saving the lives of those expressing their opinions. This is why it is essential that Mauritania get the message: Violent extremism won’t be tolerated even when — especially when — perpetrated by government.
Low Profile Dual Band H-Slotted DGS Based Antenna Design Using ANN for K/Ku Band Applications In this manuscripts, H-slotted DGS based antenna configuration for K/Ku applications is designed, fabricated and measured using ROHDE & SCHWARZ ZVB20 network analyzer. Simulation studies are performed using Electromagnetic Simulation Software CST Studio Suite. Moreover, a development of Artificial Neural Networks (ANN) model is implemented based on feedforward back-propagation and Bayesian regularization learning algorithm. The optimal position for H-slotted DGS antenna performance is predicted in terms of reflection coefficient and resonance frequency. The antenna is implemented on Rogers RT/Duroid 5880 with relative dielectric constant of 0.0009 and thickness 0.38 mm. It operates at both 15.04 GHz (from 14.87 GHz to 15.208 GHz) and 24.77 GHz (from 24.404 GHz to 25.137 GHz). The antenna achieves gains of 5.59 dB and 6.10 dB and efficiencies of 87 % and 80 % at both frequencies, respectively. The simulation using CST, predicted ANN and measurement results are in good agreement.
<gh_stars>0 ''' hardcoded values match Example 6.5 in the book ''' from matplotlib.animation import FuncAnimation import matplotlib.pyplot as plt import numpy as np class WindyGridworld: def __init__(self) -> None: self.alpha = 0.1 self.epsilon = 0.1 # set start state self.start = (3,0) # set terminal state self.terminal_state = (3,7) # initialize state-action values; 7x10 grid with 4 actions self.action_values = np.zeros(shape=(7,10,4)) # set wind values; applies "upward" to each column self.wind = np.array([0,0,0,1,1,1,2,2,1,0]) self.frames = np.ones(shape=(1,7,10)) def randomArgMax(self, arr): # returns the index of the highest value with ties broken randomly # _maxs is a boolean array where True values are occurrences of the max value max_as_bool = arr == arr.max() max_indices = np.flatnonzero(max_as_bool) return np.random.choice(max_indices) def takeAction(self, agent_pos, agent_action): # the action values are ordered such that 0-up, 1-right, 2-down, 3-left; like a clock r, c = agent_pos if agent_action == 0: r -= 1 elif agent_action == 1: c += 1 elif agent_action == 2: r += 1 elif agent_action == 3: c -= 1 # check whether agent tried to leave grid if r in [-1, self.action_values.shape[0]]: # agent tried to move off top/bottom, so return original pos return agent_pos elif c in [-1, self.action_values.shape[1]]: # same, but left/right return agent_pos else: # valid move return r,c def episode(self): # defines the steps that the agent takes from the starting point to the terminal state # set agent to start state agent_pos = self.start agent_path = [agent_pos] # choose a from s based on eps-greedy policy (max of action values for state s) if np.random.random() < self.epsilon: agent_action = np.random.randint(4) else: agent_action = self.randomArgMax(self.action_values[agent_pos]) while True: # loop until agent finds terminal state if agent_pos == self.terminal_state: break # apply wind in agent's column gust = self.wind[agent_pos[1]] # init s' next_agent_pos = agent_pos for _ in range(gust): # move up as many times as the gust is strong next_agent_pos = self.takeAction(next_agent_pos, 0) # take action a with application of wind next_agent_pos = self.takeAction(next_agent_pos, agent_action) # store reward = -1 for all non-terminal moves if next_agent_pos == self.terminal_state: reward = 0 else: reward = -1 # choose a' from s' using greedy next_agent_action = self.randomArgMax(self.action_values[next_agent_pos]) # q(s', a') q_sp_ap = self.action_values[next_agent_pos][next_agent_action] # q(s, a) q_s_a = self.action_values[agent_pos][agent_action] # update q(s,a) += alpha (reward + q(s',a') - q(s,a)); undiscounted, off-policy SARSA self.action_values[agent_pos][agent_action] += self.alpha * (reward + q_sp_ap - q_s_a) # s = s' agent_pos = next_agent_pos agent_path.append(agent_pos) # a = a' # off-policy means we only use the greedy action for our prediction: q(s', a') # instead of follow our eps-greedy policy # but we include epsilon when actually making a move if np.random.random() < self.epsilon: agent_action = np.random.randint(4) else: agent_action = next_agent_action # return the agent's path on this episode return agent_path def simulate(self, n_episodes): # calls each episode and stores analysis data structures avg_steps = 0 for e in range(n_episodes): if e % 1000 == 0: print(f"average steps after {e} episodes = {avg_steps}") # self.printGreedyPolicy() agent_path = self.episode() if e % 1000 == 0: self.animationBuilder(agent_path) avg_steps += (len(agent_path)-avg_steps) / (e+1) def printGreedyPolicy(self): print(f"policy:") for r in self.action_values: for c in r: a = self.randomArgMax(c) if a == 0: a = "^" elif a == 1: a = ">" elif a == 2: a = "v" elif a == 3: a = "<" print(a, end=" ") print() print() def animationBuilder(self, agent_path): episode_frames = np.ones(shape=(len(agent_path),7,10)) for f, pos in enumerate(agent_path): episode_frames[f][pos] = 0 self.frames = np.concatenate((self.frames, episode_frames), axis=0) def showAnimation(self): fig = plt.figure("Windy Gridworld",figsize=(20,15)) ax = fig.add_subplot(111) def animate(frame): im = ax.matshow(self.frames[frame], cmap='plasma') return im, ani = FuncAnimation(fig, animate, frames=self.frames.shape[0], interval=10, blit=True) plt.show() if __name__ == "__main__": wgw = WindyGridworld() wgw.simulate(10000) wgw.showAnimation()
<gh_stars>0 import { Signal2D, Values } from '../../types'; import { PathLength } from '../../types/spectrum/pathLength'; import { findLinksBySignalID } from './findLinksBySignalID'; export function setPathLength( correlations: Values, signalID: string, pathLength: PathLength | undefined, ): Values { const links = findLinksBySignalID(correlations, signalID); links.forEach((link) => { const signal = link.signal as Signal2D; if (pathLength) { signal.pathLength = pathLength; } else { delete signal.pathLength; } }); return correlations; }
Lateral Collateral Ligament and Proximal Tibiofibular Joint Reconstruction for Tibiofibular Instability Instability of the proximal tibiofibular joint (PTFJ) can be post-traumatic or due to accumulative injuries and may also be underdiagnosed pathology that can present with symptoms of lateral and/or medial knee pain. It can be associated with subtle instability and subluxation or frank dislocation of both the PTFJ and the native knee joint. Previously described techniques have been either nonanatomic, require secondary hardware removal, disrupt native anatomy, or fail to account for the inherent stabilizing effect of the lateral collateral ligament, which is likely additionally injured or lax in these patients. The purpose of this Technical Note is to present an open anatomic reconstruction of the PTFJ and lateral collateral ligament using a single semitendinosus allograft, thus restoring all anatomic constraints to the PTFJ and lateral knee. I nstability of the proximal tibiofibular joint (PTFJ) is a rare, yet likely underdiagnosed, pathology that can present with symptoms of lateral and/or medial knee pain. 1 It can be associated with subtle instability and subluxation or frank dislocation of both the PTFJ and the native knee joint. 1,2 Notably, PTFJ instability has been reported to occur in up to 9% of multiligamentous knee injuries. 2 The traditionally described instability pattern of an injured PTFJ is anterolateral due to the stronger anterior versus posterior ligaments of the joint. Given the anatomic relationship of the lateral collateral ligament (LCL) to the PTFJ, we propose that the LCL provides an impactful role in anterolateral stability of the PTFJ and, thus, injuries to the PTFJ can inherently produce laxity in the LCL. First-line treatment of PTFJ instability includes a period of activity modification, physical therapy, and bracing and/or taping. 1 Immobilization may be indicated in acute dislocations as well. 6 When patients do not respond to nonoperative treatment, surgical intervention may be warranted. Multiple approaches have been described, including temporary screw stabilization, allograft reconstruction, split biceps femoris reconstruction, dynamic suture button fixation, fibular head excision, arthrodesis, and direct repair. 2,5, However, these techniques are either nonanatomic, require secondary hardware removal, disrupt native anatomy, or fail to account for the inherent stabilizing effect of the LCL, which is likely additionally injured or lax in these patients. The purpose of this Technical Note is to present an open anatomic reconstruction of the PTFJ and LCL using a single semitendinosus allograft, thus restoring all anatomic constraints to the PTFJ and lateral knee. Surgical Technique (With Video Illustration) A narrated video with demonstration of the surgical technique described in the following may be reviewed (Video 1). The patient is positioned supine with a lateral post on the operating room table. We prefer to use regional anesthetics in additional to general anesthesia, including a sciatic indwelling catheter that is placed dry in the preoperative holding area. This allows adequate examination of the peroneal nerve in the postoperative recovery unit and then subsequent dosing thereafter. Prophylactic intravenous antibiotics are administered. Anatomical landmarks are identified including Gerdy's tubercle, the fibular head, the lateral joint line, lateral epicondyle of the femur, and the posterior apex of the lateral femoral condyle. The leg is exsanguinated, and a thigh tourniquet inflated. The knee is flexed to 90 (an assistant can manually hold the foot, the foot can be propped against the surgeon's leg, or a commercial foot holder can be used) and an incision is made from the lateral epicondyle of the femur in a trajectory that intersects Gerdy's tubercle and fibular head (FH). Skin flaps are elevated anteriorly, posteriorly, superiorly and inferiorly, taking care to note that the common peroneal nerve (CPN) that will be exiting from deep and posterior to the biceps femoris (BF) as it courses across the proximal fibular neck. We first identify the CPN as it crosses the fibular neck (Fig 1). A thorough neurolysis is performed including releasing the proximal-most aspect of overlying fascia of the lateral leg compartment distally, and then also more proximally on the undersurface of the BF. A vessel loop is placed around the nerve without instruments so that it can be readily identified but is not on traction at any point. Next, we identify the native LCL in the interval anterior to the BF insertion and posterior to the iliotibial band (ITB) ( Table 1). It is isolated and tagged with a #2 nonabsorbable stitch (FiberWire; Arthrex, Naples, FL). Pulling tension on the stitch will demonstrate the laxity in the LCL and/or the mobility in the FH (Fig 2). Using manual pressure or a clamp on the FH will demonstrate this LCL laxity in the lateral plane, in addition to the anteroposterior instability of the PTFJ. Next, the LCL origin is identified at the lateral epicondyle of the femur (Fig 3). Pulling and releasing tension repeatedly on the LCL tag stitch assists in identification, and a 2-cm split can be established in the ITB at the origin. The origin is marked and a Beath pin is passed from lateral in an anteromedial trajectory out the medial femoral skin. A 6-mm reamer is used over the pin and drilled to a depth of 30 mm. A passing suture is shuttled through the tunnel with the Beath pin and it is snapped and set aside. The LCL insertion on the anterolateral FH is identified through palpation of its anatomic bony ridge as well as with assistance of pulling and releasing the LCL tag stitch (Fig 4). The site is marked. Next, the lateral head of the gastrocnemius is elevated of the posterolateral FH using a combination of Bovie and a Freer elevator, with care taken to note the location of the CPN. A window is created with blunt finger dissection across the posterior FH until the posterior PTFJ and lateral tibial metaphyseal flare are readily palpable. A threaded 2.4-mm guide pin is next drilled Avoid injuries to the posterior neurovascular structures and soft tissues of the knee. Avoid overconstraining or a nonanatomic reduction of the joint by reducing with light manual pressure. Dissection/work on the lateral metaphyseal flare of the tibia is often by feel due to poor visualization of this area. Perform a diagnostic knee arthroscopy after to assess for countercoup lesions in the medial compartment. e252 from the LCL insertion that was previously marked, in a posteromedial trajectory through the FH taking care to protect the posterior soft tissues of the knee. A 6-mm reamer is used over the pin through the FH, and a wire suture passing device is used to shuttle a passing stitch through the FH so that passing of the future graft can occur from anterolateral to posteromedial. The passing stitch is snapped and set aside. Hemostats are used to fashion a tunnel under the BF and ITB in the trajectory of the native LCL for lateral anatomic graft passage. The PTFJ tibial tunnel is prepared next. Gerdy's tubercle is marked, and a 2.4-mm threaded pin is passed from anterior to posterior, in a trajectory matching the posterior slope of the tibia, to the position of the posterior PTFJ on the lateral metaphyseal flare of the tibia (Fig 5). This can be done using a commercial guide (Arthrex), or by hand with finger palpation. Care is taken to protect the posterior soft tissues of the knee. A 6-mm reamer is used over the wire to complete the tunnel. A wire suture passing device is used to shuttle a passing stitch through the FH so that passing of the future graft can occur from posterior to anterior through the tibia. The passing stitch is snapped and set aside. A semitendinosus allograft is prepared on the back table during tunnel preparation with nonabsorbable #2 high strength Krakow suture in each end. For implantation, one limb is shuttled and docked into the LCL origin tunnel on the femur. Pulling medial tension on the sutures, and a gentle downward tension on the graft in the trajectory of the native LCL, a 7 Â 23-mm PEEK (polyether ether ketone) interference screw is inserted securing the graft in the femur (Fig 6). Hemostats are used to shuttle the graft under the previously established tunnel deep to the ITB and BF. The graft is next shuttled through the FH from anterior to posterior. The knee is positioned in 70 of flexion, with internal rotation of the tibia, and a valgus stress applied to the knee, and a 7 Â 23-mm PEEK interference screw is placed from anterior to posterior securing the graft in the FH and completing the LCL stabilization component of the procedure. Manual manipulation of the FH will demonstrate increased instability even before reconstruction of the PTFJ. To complete anatomic reconstruction of the PTFJ, the residual posterior allograft is shuttled from posterior to anterior through the tibial tunnel. A bump is placed under the knee holding it at approximately 20 to 30 of flexion. While pulling anterior tension on the graft, an 8 Â 30-mm biocomposite interference screw is placed over a Nitinol wire securing the graft and completing the PTFJ stabilization component of the procedure (Fig 6). At this juncture, we recommend performing a diagnostic knee arthroscopy as it is not uncommon to encounter countercoup lesions of the medial meniscus and medial compartment chondral surface. For closure, the defect in the ITB and ITB-BF interval are closed with 0-VICRYL (Ethicon, Somerville, NJ) figure of 8 suture. Layered closure of preference is then performed. Postoperatively, the patient is made toe-touch weight-bearing for 4 to 6 weeks with a range of motion knee brace that allows 0 to 90 of flexion. They transition to a functional brace thereafter with light strengthening and mobility exercises through the first 12 weeks to allow for graft incorporation. This is followed by progressive strengthening and eventually agility and sport-specific exercises with expected return to play at 8 to 9 months. Discussion PTFJ anatomic reconstruction can result in improved patient outcomes with low complications rates 1 ; however, it is critical to acknowledge the additional injuries to the lateral stabilizing structures (i.e., LCL) of the knee. This proposed technique provides a complete anteroposterior and medial-lateral stabilization of the PTFJ by adding the LCL component and using a single allograft semitendinosus tendon. It is anatomic in principle and does not require any secondary surgeries for hardware removal unlike other previously described techniques (Table 2). In 2017, Kruckeberg et al. 1 performed a systematic review of treatment and approach to the PTFJ. They identified 44 studies with 96 patients who were eligible to be included in the study. The treatments included nonoperative (18 studies, 35 patients), open reduction (3 studies, 4 patients), fixation (11 studies, 25 patients), proximal fibular resection (4 studies, 10 patients), adjustable cortical button fixation (3 studies, 11 patients), ligament reconstruction (2 studies, 3 patients), and biceps femoris tendon rerouting (5 studies, 8 patients). All studies reported improved outcomes; however, there were high complications rates associated with PTFJ fixation (28%) and FH resection (20%). Nonoperative treatment was associated with persistent symptoms despite improved outcomes, whereas biceps rerouting and anatomic reconstruction had the best outcomes, with low complications rates. However, it should be noted that all these findings were limited to level IV case series with very low patient numbers. While the proposed technique described here has the advantages of being a complete, multiplanar reconstruction of the PTFJ, it is not without disadvantages (Table 2). This technique is more technically demanding than previously described techniques and requires a more thorough understanding of the posterolateral anatomy of the knee. By using the aforementioned pearls where the peroneal nerve is isolated and the course of the native LCL is clearly defined, the anatomical reconstruction is easily reproducible and can be done so in an efficient manner. Second, this technique is more costly than others described with the addition of the LCL reconstruction and the use of allograft, however, we feel that achieving the multiplanar stability is necessary for eliminating symptoms and improving function and outcomes. In conclusion, this multiplanar reconstruction of with semitendinosus allograft provides reproducible, anatomic restoration of stability to the PTFJ. It provides lateral knee constraint through reconstruction of the LCL, which is often additionally injured or lax. Adding the LCL reconstruction not only provides varus knee stability, but also adds inherent stability to the PTFJ. Adding the LCL reconstruction requires additional surgical planning and tunnel drilling. The technique provides anteroposterior and mediallateral stability of the PTFJ. It is more costly than using the biceps femoris split or only reconstructing the PTFJ (without LCL). There is no disruption of native anatomy or need for hardware removal. It is more technically demanding than a suture button technique.
Rick Westhead TSN Senior Correspondent Follow|Archive Several of hockey's most prominent player agents — whose companies collectively represent about one-third of active NHLers — say the Canadian Hockey League needs to relax its policy of voiding educational scholarships 18 months after a player's over-age season in major-junior hockey. As things now stand, a player who finishes CHL hockey at age 19 and plays in the ECHL, the AHL, or for a professional team in Europe for three years would be ineligible — at age 22 — to take advantage of his scholarship. Ontario Hockey League Commissioner David Branch, who also heads the CHL, says that when the OHL put restrictions on scholarships, he consulted with the parents of OHL players. Those parents, Branch said, support the limiting of scholarships for players so that they are pressured to enter post-secondary school when they are "age appropriate." Branch said he was traveling and was unable to provide the contact information for any of the parents involved in the consultation process. "The CHL's dirty little secret is they don't want players using these packages," said Octagon Hockey's Allan Walsh, whose NHL clients include Marc-Andre Fleury and Jonathan Huberdeau. "They're severely limiting the number of former players that can make use of their earned school money." Two other agents, CAA's Pat Brisson and Newport Sports' Don Meehan, also say the CHL should be open to changing its policy to encourage more former players to go to college or university. Brisson and Walsh say players should be allowed to use their full education scholarships at least three years after their major-junior eligibility. Meehan said the issue deserves more discussion. "Fair is fair and this isn't fair," Walsh said in an interview. "These players are not being given a fair shake and it's time for all of us to do something about it." Walsh said the overwhelming majority of junior players do not advance to the NHL. "That's who we're talking about," he said. "This is their life, their future. They earned this money. The CHL should give it to them." Major-junior hockey across Canada is facing an unprecedented time of change. Former players have sued the CHL and its three affiliated leagues, the Ontario, Western and Quebec Major-Junior Hockey Leagues, charging that while the junior hockey industry has become a huge business, most players are not sharing in the financial gains. Besides the lawsuit, in which players are asking the leagues for backpay of nearly $200 million and asking the court to order leagues to pay players at least minimum wage, authorities in Washington state are investigating whether four teams there have violated child labour laws by allegedly paying players as little as $35 per week. Washington's state senate on Tuesday approved 47-0 a motion to exempt junior teams there from paying minimum wage. The bill now moves to the state house of representatives for another vote before it becomes law. Canada's biggest private-sector union, Unifor, meantime, has spent months lobbying the Ontario government to establish a task force to investigate the business of the OHL. That some of hockey's most prominent agents - rivals who all enjoy close ties to CHL team executives - are beginning to speak out in favour of additional player rights, is another challenge for Branch to navigate. "The CHL is very proud of our scholarship package for a number of reasons, not the least of which is the number of players who leave the CHL to go on to play CIS hockey," CHL commissioner David Branch told TSN. "The 18-month window commences at the conclusion of a players' junior eligibility. In reality, it is a 30-month window for the majority of players save and except for a maximum of three overage players that may dress and play for a team in any given year. "After consultation with our parents, alumni and the CIS, the 18-month rule was put into effect in an effort to ensure the players are utilizing their scholarships and accessing the educational dollars which are afforded to them in a timely fashion. The longer that a student remains out of post-secondary education, the less likely it is they will ever attend. In addition the 18-month rule ensures that our players are entering post-secondary education at a time when it is age appropriate for them. The 18-month rule ensures that more scholarship dollars are spent by our teams, not less as has been implied by some third parties. Brisson, who represents NHL stars such as Sidney Crosby, played four years of junior hockey himself from 1982 to 1986. "Things haven't changed much since then," he said. "It would be nice to see the window expand from 18 months to perhaps three years," Brisson said. "It'd be good to see a player have the option to go back to school later." Brisson, who also played professionally in Europe - he says he was paid perhaps $14,000 for an entire season - said he wants changes to protect the vast majority of junior players who don't make it to the NHL. No more than 20 per cent of players selected in the first three rounds of the NHL entry draft go on to an "actual career" in the NHL, Brisson said. "I understand not all teams are making money in the CHL," Brisson said. "But at the same time we're trying tp promote education as a whole outside development. Less than two per cent of junior players have an actual NHL career with 150 to 200 games in the NHL...you become a better ambassador of hockey if you have a better education." Meehan said he supports the move to expand the education package window. "Does there deserve to be some kind of change or amendment of that existing rule? It seems to be reasonable to me," Meehan said. Walsh said the CHL lures players away from the NCAA with a promise. The CHL sells itself as the best development league in hockey. And it tells players that if they don't succeed as pro, they'll get roughly one year of university expenses paid for every year they play in the CHL. "That's the covenant, but it's not true," Walsh said. "It's a blatant, intentional misrepresentation of the way these packages operate in the real world." Walsh cited as an example a hypothetical case of a player who plays four years in junior hockey and then goes to the AHL, ECHL or Europe for three or four years. At 24, it's become clear that player will not make the NHL and wants to go to school. "Why is his CHL education package no longer available to him?" Walsh said. "He played in the CHL, he did his time and earned his education money. It's his money. Why has it been forfeited? No one from the CHL has ever been able to give me a logical justification for this."
def addFriendship(self, userID, friendID): if userID == friendID: print("WARNING: You cannot be friends with yourself") elif friendID in self.friendships[userID] or userID in self.friendships[friendID]: print("WARNING: Friendship already exists") else: self.friendships[userID].add(friendID) self.friendships[friendID].add(userID)
/** * Contains common attributes to a local variable table attribute structure. * * @author <a href="mailto:[email protected]">Vitor Carreira</a> * @version $Revision: 1.1 $ $Date: 2005/11/01 13:18:24 $ */ public abstract class LocalVariableCommonAttribute extends AttributeInfo { protected static final int INITIAL_LENGTH = 2; protected LocalVariableCommonEntry[] localVariableTable; /** * Get the list of local variable associations of the parent <tt>Code</tt> * structure as an array of <tt>LocalVariableCommonEntry</tt> structures. * * @return the array */ public LocalVariableCommonEntry[] getLocalVariableEntries() { return localVariableTable; } /** * Set the list of local variable associations of the parent <tt>Code</tt> * structure as an array of <tt>LocalVariableCommonEntry</tt> structures. * * @param localVariableEntries the array */ public void setLocalVariableEntries(LocalVariableCommonEntry[] localVariableEntries) { this.localVariableTable = localVariableEntries; } public void write(DataOutput out) throws InvalidByteCodeException, IOException { super.write(out); int localVariableTableLength = getLength(localVariableTable); out.writeShort(localVariableTableLength); for (int i = 0; i < localVariableTableLength; i++) { localVariableTable[i].write(out); } if (debug) debug("wrote "); } }
Ready to fight back? Sign up for Take Action Now and get three actions in your inbox every week. You will receive occasional promotional offers for programs that support The Nation’s journalism. You can read our Privacy Policy here. Sign up for Take Action Now and get three actions in your inbox every week. Thank you for signing up. For more from The Nation, check out our latest issue Subscribe now for as little as $2 a month! Support Progressive Journalism The Nation is reader supported: Chip in $10 or more to help us continue to write about the issues that matter. The Nation is reader supported: Chip in $10 or more to help us continue to write about the issues that matter. Fight Back! Sign up for Take Action Now and we’ll send you three meaningful actions you can take each week. You will receive occasional promotional offers for programs that support The Nation’s journalism. You can read our Privacy Policy here. Sign up for Take Action Now and we’ll send you three meaningful actions you can take each week. Thank you for signing up. For more from The Nation, check out our latest issue Travel With The Nation Be the first to hear about Nation Travels destinations, and explore the world with kindred spirits. Be the first to hear about Nation Travels destinations, and explore the world with kindred spirits. Sign up for our Wine Club today. Did you know you can support The Nation by drinking wine? After a recent readers’ poll which didn’t determine but helped define the choices, Time magazine named Mark Zuckerberg its Person of the Year for 2010 for "changing how we all live our lives." (Time readers, not the most radical bunch, picked Julian Assange as the winner and had Zuckerberg ranked 10th.) Time‘s runner-ups included Assange, Hamid Karzai and Sarah Palin. Ad Policy Since we here at The Nation consider our readership the most politically informed and intelligent of any publication in the country, we thought it would be informative to conduct a Nation readers’ poll to determine the Person of the Year for our own community. The response was strong and, interestingly, WilkiLeaks founder Assange was also our clear-cut winner because, as Lorna Singh pointed out, "we need to see how we were lied to," and, as Mike Pribula wrote, "he has reminded us about the importance of integrity in diplomacy and democratic ideals in our republic." Finishing a strong second was the Senate’s only socialist Bernie Sanders, a great friend of The Nation, because, as Carol Kivi nicely put it, "he spoke up for what he thinks is right, which is to oppose the tax breaks for the wealthy and ensure that the successful social security program remains intact." The third runner-up, Bradley Manning, is the US Army soldier who was charged in July 2010 with the unauthorized disclosure of US classified information and being held in solitary confinement at the Marine Corps Brig, Quantico, Virginia. As Tom Baurain noted, Manning "broke a very serious oath to the military to help shed light on our ‘New Dark Age’ of information." Elizabeth Warren, the Harvard professor who conceived of the Consumer Financial Protection Bureau ("and consistently challenged the pro-business economic orthodoxy of Tim Geithner and Larry Summers") and MSNBC host Rachel Maddow ("the smartest and most effective progressive on television") rounded out the top five. Numerous other candidates received multiple votes, including Reps. Nancy Pelosi ("the most effective speaker for many years who has stayed true to her progressive supporters"), Barney Frank ("because he stood up for working Americans against the egregious tax cuts for the wealthy") and Alan Grayson ("for being a bold progressive when and where most people wouldn’t"), Secretary of State Hillary Clinton, radio commentator Amy Goodman and Dan Choi, the former American infantry officer who presented the most visible public challenge to the Don’t Ask, Don’t Tell policy, which, until its recent repeal, forbid lesbian, bisexual and gay service members from serving openly in the US armed forces. Finally, honorable mention, courtesy of a reader who chose to remain anonymous, goes to "The Unemployed. Why? Because it’s time they get something, even if it’s just an award." Thank you for voting. Here’s to a progressive 2011!
/* The execution, it returns * when code runs out */ int Interpreter::Execute() { ObjectInstance *Instance = NULL; CodeObject *EntryObj = m_pPool->LookupObject("__maciaentry"); if (EntryObj == NULL) { printf("Failed to locate program entry point\n"); return -1; } return ExecuteCode(Instance, EntryObj->GetCode()); }
<gh_stars>0 import os import zipfile from pathlib import Path import streamlit as st import pandas as pd import shutil import pickle import numpy as np from source.bracket_pdf_parser import load_or_generate_bracket_data, create_empty_score_csv_from_directory from source.bracket_outcomes import load_or_generate_outcome_matrix from source.bracket_pool_analyzer import load_or_generate_bracket_pool_scores, print_money_chances, \ print_sweet_16_case_probabilities, print_elite_8_case_probabilities from pages.utils import page_header, downloads_path, upload_folder, save_uploadedfile, markdown_file_downloader, \ static_path, all_games_dict, optional_winner_selectbox, game_type_dict, team_id_dict, get_outcome_matrix, \ index_list_2021, elite_8_dict from source.utils import generate_round_array def sweet_16_individual_brackets(score_array, bracket_matrix, current_score_array, bracket_index, selected_outcome_matrix, likelihood_array): winner_dict = {} round_array = generate_round_array(16) for i in range(15): winner_dict[i] = None for i in range(0, 8): print(i) game_dict = all_games_dict[i] # winner = optional_winner_selectbox(game_name=game_dict['name'], team0=game_dict['team0'], # team1=game_dict['team1'], i=i, index=2) option_list = [game_dict['team0'], game_dict['team1'], 'None'] index = index_list_2021[i] if index == -1: index = len(option_list) - 1 winner = optional_winner_selectbox(game_name=game_dict['name'], game_type=game_type_dict[round_array[i]], option_list=option_list, i=-1, index=index) if winner == 'None': winner = None winner_dict[i] = winner for i in range(8, 15): print(i) game_dict = all_games_dict[i] print(game_dict) team0 = winner_dict[game_dict['team0']] team1 = winner_dict[game_dict['team1']] print(team0) print(team1) print(game_dict['name'].format(team0=team0, team1=team1)) print(team0 is not None) print(team1 is not None) if team0 is not None or team1 is not None: option_list = [] if team0 is None: team0 = 'Other' else: option_list.append(team0) if team1 is None: team1 = 'Other' else: option_list.append(team1) option_list.append('None') index = index_list_2021[i] if index == -1: index = len(option_list) - 1 winner = optional_winner_selectbox(game_name=game_dict['name'].format(team0=team0, team1=team1), game_type=game_type_dict[round_array[i]], option_list=option_list, i=-1, index=index) if winner == 'None': winner = None winner_dict[i] = winner # score_array = np.array([4,8,16,32]) for i in range(15): winner_key = winner_dict[i] if winner_key is not None: winner_key = winner_key.lower() winner_id = team_id_dict[winner_key] winner_idx = winner_id * 4 + round_array[i] winner_value = score_array[round_array[i]] print(selected_outcome_matrix.shape) selected_outcomes = selected_outcome_matrix[winner_idx, :] == winner_value likelihood_array = likelihood_array[selected_outcomes] selected_outcome_matrix = selected_outcome_matrix[:, selected_outcomes] print(selected_outcome_matrix.shape) st.subheader('Who should I root for?') with st.spinner('Calculating Outcomes...'): total_outcomes = selected_outcome_matrix.shape[1] bracket_pool_scores = load_or_generate_bracket_pool_scores(static_path, bracket_matrix, selected_outcome_matrix, current_score_array, force_generation=False) # df_money_chances = print_money_chances(bracket_list, bracket_pool_scores) likelihood_array = likelihood_array / np.sum(likelihood_array) # df_money_chances = print_money_chances(bracket_list, bracket_pool_scores[bracket_index, :], likelihood_array) sweet_16_case_dict = print_sweet_16_case_probabilities(bracket_index, bracket_matrix, bracket_pool_scores, selected_outcome_matrix, likelihood_array) game = 0 count_tracker = 0 # # team0_dict['win_paths'] = team_0_win_count # team0_dict['win_paths_delta'] = team_0_win_count - base_total_paths # team0_dict['win_percent'] = team_0_win_count / total_outcomes * 100 # team0_dict['win_percent_delta'] = team0_dict['win_percent'] - base_paths_percent # team0_dict['win_likelihood'] = np.sum(team_0_likeliood) * 100 # team0_dict['win_likelihood_delta'] = team0_dict['win_likelihood'] - base_likelihood # st.write('<p style="color:green">THIS TEXT WILL BE RED</p>', unsafe_allow_html=True)6 for key in sweet_16_case_dict.keys(): if count_tracker == 0: st.subheader('Game {game}: {game_name}'.format( game=game, game_name=all_games_dict[game]['name'] )) win_string = 'Win paths left if {team_name} wins: {win_paths} (Change: {win_paths_delta})'.format( team_name=key, win_paths=sweet_16_case_dict[key]['win_paths'], win_paths_delta=sweet_16_case_dict[key]['win_paths_delta'], ) st.write(win_string) if sweet_16_case_dict[key]['win_percent_delta'] > 0: color = 'green' elif sweet_16_case_dict[key]['win_percent_delta'] < 0: color = 'red' else: color = 'black' win_string = 'Win path percentages left if {team_name} wins: {win_percent:2f}% (Change: <font style="color:{color}">{win_percent_delta:2f}%</font>)'.format( team_name=key, win_percent=sweet_16_case_dict[key]['win_percent'], win_percent_delta=sweet_16_case_dict[key]['win_percent_delta'], color=color ) # st.write(win_string) print(win_string) st.markdown(win_string, unsafe_allow_html=True) if sweet_16_case_dict[key]['win_likelihood_delta'] > 0: color = 'green' elif sweet_16_case_dict[key]['win_likelihood_delta'] < 0: color = 'red' else: color = 'black' win_string = 'Win likelihood left if {team_name} wins: {win_likelihood:2f}% (Change: <font style="color:{color}">{win_likelihood_delta:2f}%</font>)'.format( team_name=key, win_likelihood=sweet_16_case_dict[key]['win_likelihood'], win_likelihood_delta=sweet_16_case_dict[key]['win_likelihood_delta'], color=color ) print(win_string) st.write(win_string, unsafe_allow_html=True) game += count_tracker if count_tracker == 0: count_tracker = 1 else: count_tracker = 0 def elite_8_individual_brackets(score_array, bracket_matrix, current_score_array, bracket_index, selected_outcome_matrix, likelihood_array): winner_dict = {} round_array = generate_round_array(16) sweet_16_winners = ['Gonzaga', 'USC', 'Michigan', 'UCLA', 'Baylor', 'Arkansas', 'Oregon St.', 'Houston'] for i in range(15): winner_dict[i] = None for i in range(0, 8): winner_dict[i] = sweet_16_winners[i] for i in range(8, 15): print(i) game_dict = all_games_dict[i] print(game_dict) team0 = winner_dict[game_dict['team0']] team1 = winner_dict[game_dict['team1']] print(team0) print(team1) print(game_dict['name'].format(team0=team0, team1=team1)) print(team0 is not None) print(team1 is not None) if team0 is not None or team1 is not None: option_list = [] if team0 is None: team0 = 'Other' else: option_list.append(team0) if team1 is None: team1 = 'Other' else: option_list.append(team1) option_list.append('None') index = index_list_2021[i] if index == -1: index = len(option_list) - 1 winner = optional_winner_selectbox(game_name=game_dict['name'].format(team0=team0, team1=team1), game_type=game_type_dict[round_array[i]], option_list=option_list, i=-1, index=index) if winner == 'None': winner = None winner_dict[i] = winner # score_array = np.array([4,8,16,32]) for i in range(15): winner_key = winner_dict[i] if winner_key is not None: winner_key = winner_key.lower() winner_id = team_id_dict[winner_key] winner_idx = winner_id * 4 + round_array[i] winner_value = score_array[round_array[i]] print(selected_outcome_matrix.shape) selected_outcomes = selected_outcome_matrix[winner_idx, :] == winner_value likelihood_array = likelihood_array[selected_outcomes] selected_outcome_matrix = selected_outcome_matrix[:, selected_outcomes] print(selected_outcome_matrix.shape) st.subheader('Who should I root for?') with st.spinner('Calculating Outcomes...'): total_outcomes = selected_outcome_matrix.shape[1] bracket_pool_scores = load_or_generate_bracket_pool_scores(static_path, bracket_matrix, selected_outcome_matrix, current_score_array, force_generation=False) # df_money_chances = print_money_chances(bracket_list, bracket_pool_scores) likelihood_array = likelihood_array / np.sum(likelihood_array) # df_money_chances = print_money_chances(bracket_list, bracket_pool_scores[bracket_index, :], likelihood_array) elite_8_case_dict = print_elite_8_case_probabilities(bracket_index, bracket_matrix, bracket_pool_scores, selected_outcome_matrix, likelihood_array) game = 0 count_tracker = 0 # # team0_dict['win_paths'] = team_0_win_count # team0_dict['win_paths_delta'] = team_0_win_count - base_total_paths # team0_dict['win_percent'] = team_0_win_count / total_outcomes * 100 # team0_dict['win_percent_delta'] = team0_dict['win_percent'] - base_paths_percent # team0_dict['win_likelihood'] = np.sum(team_0_likeliood) * 100 # team0_dict['win_likelihood_delta'] = team0_dict['win_likelihood'] - base_likelihood # st.write('<p style="color:green">THIS TEXT WILL BE RED</p>', unsafe_allow_html=True)6 for key in elite_8_case_dict.keys(): if count_tracker == 0: st.subheader('Game {game}: {game_name}'.format( game=game, game_name=elite_8_dict[game]['name'] )) win_string = 'Win paths left if {team_name} wins: {win_paths} (Change: {win_paths_delta})'.format( team_name=key, win_paths=elite_8_case_dict[key]['win_paths'], win_paths_delta=elite_8_case_dict[key]['win_paths_delta'], ) st.write(win_string) if elite_8_case_dict[key]['win_percent_delta'] > 0: color = 'green' elif elite_8_case_dict[key]['win_percent_delta'] < 0: color = 'red' else: color = 'black' win_string = 'Win path percentages left if {team_name} wins: {win_percent:2f}% (Change: <font style="color:{color}">{win_percent_delta:2f}%</font>)'.format( team_name=key, win_percent=elite_8_case_dict[key]['win_percent'], win_percent_delta=elite_8_case_dict[key]['win_percent_delta'], color=color ) # st.write(win_string) print(win_string) st.markdown(win_string, unsafe_allow_html=True) if elite_8_case_dict[key]['win_likelihood_delta'] > 0: color = 'green' elif elite_8_case_dict[key]['win_likelihood_delta'] < 0: color = 'red' else: color = 'black' win_string = 'Win likelihood left if {team_name} wins: {win_likelihood:2f}% (Change: <font style="color:{color}">{win_likelihood_delta:2f}%</font>)'.format( team_name=key, win_likelihood=elite_8_case_dict[key]['win_likelihood'], win_likelihood_delta=elite_8_case_dict[key]['win_likelihood_delta'], color=color ) print(win_string) st.write(win_string, unsafe_allow_html=True) game += count_tracker if count_tracker == 0: count_tracker = 1 else: count_tracker = 0 def individual_brackets(bracket_list, bracket_matrix, current_score_array, df_bracket_pool, score_array): page_header() if bracket_list is not None and bracket_matrix is not None and current_score_array is not None: st.header('4.0 Individual Brackets') outcome_matrix, likelihood_array = get_outcome_matrix(score_array) selected_outcome_matrix = outcome_matrix.copy() names = df_bracket_pool.sort_values('name').loc[:, 'name'].tolist() bracket_name = st.selectbox('Select a row to edit: ', names) print(bracket_name) df_bracket = df_bracket_pool[df_bracket_pool['name'] == bracket_name].copy() bracket_index = df_bracket['idx'].values[0] data_mode = st.selectbox('What do you want to do with the bracket pool data?', ['Inspect Data', 'Look at Scenarios']) if data_mode == 'Look at Scenarios': data_mode = st.selectbox('Which round?', ['Sweet 16', 'Elite 8']) if data_mode == 'Sweet 16': sweet_16_individual_brackets(score_array, bracket_matrix, current_score_array, bracket_index, selected_outcome_matrix, likelihood_array) elif data_mode == 'Elite 8': elite_8_individual_brackets(score_array, bracket_matrix, current_score_array, bracket_index, selected_outcome_matrix, likelihood_array)
import { ICollisionResolution } from "./CollisionResolution"; interface ICollisionResponse { resolveA: ICollisionResolution; resolveB: ICollisionResolution; } export { ICollisionResponse }
Warp speed design: a rapid design method for use with children This paper introduces a new design method - Warp Speed Design - for use with older children (aged 9+) for the design of workable tangible games. The rationale for the method is presented and then a workshop, in which the method was evaluated, is described. The method introduced children to basic programming concepts and worked surprisingly well. Almost all of the designs made by the children were so well specifies at the end of the brief workshop that they were able to be developed with very little uncertainty.
package unicap.uber.model; import java.util.List; public interface IRepositorioCorridas { public double criaCorrida(IPassageiro passDaCorrida, IMotorista motDaCorrida, String origem, String destino); public List<String> listarCorridas(int telefone); }
/** The configuration mode for the robot. */ class ConfigState : public State { protected: Flag m_reread; Flag m_strengthLo, m_strengthMd, m_strengthHi; Flag m_kickRest, m_kickCocked; Flag m_shoulderStowed, m_shoulderRaised, m_shoulderGTFU; Flag m_elbowTarget, m_elbowMin, m_elbowMax; public: ConfigState(BossRobot *r); virtual void Enter(); virtual void Exit(); virtual void Step(); protected: void HandleStrengthPresetting(); void HandleKickPresetting(); void HandleShoulderPresetting(); void HandleElbowPresetting(); }
The video will start in 8 Cancel Get the biggest Everton FC stories by email Subscribe Thank you for subscribing We have more newsletters Show me See our privacy notice Could not subscribe, try again later Invalid Email Ronald Koeman has only been at Everton five minutes - but he is already falling victim to the transfer rumour mill. Indeed, the Blues boss is now being linked with players from far and wide - but reports today reckon he could dip into the Championship for his first signing. But it's reported that Newcastle could want up to £25m for Georginio Wijnaldum! Elsewhere today, Bill Kenwright has speaking about Everton's hunt for a director of football. He told the Daily Mail : “We are talking to various Directors of Football and Ronald has worked with a lot of them. He will embrace that, as well as the Everton way." The Manchester Evening News reckon Everton are ready to stand firm in the face of interest from Manchester United and Man City for John Stones. And finally, Everton are to speak to Romelu Lukaku's agent in a bid to persuade the Belgian striker to stay put at Goodison Park. Watch the video above to see the latest round-up of transfer rumours.
def bbox_flip(bboxes, img_shape, direction='horizontal'): version = 'oc' assert bboxes.shape[-1] % 5 == 0 assert direction in ['horizontal', 'vertical', 'diagonal'] flipped = bboxes.clone() if direction == 'horizontal': flipped[:, 0] = img_shape[1] - bboxes[:, 0] - 1 elif direction == 'vertical': flipped[:, 1] = img_shape[0] - bboxes[:, 1] - 1 else: flipped[:, 0] = img_shape[1] - bboxes[:, 0] - 1 flipped[:, 1] = img_shape[0] - bboxes[:, 1] - 1 if version == 'oc': rotated_flag = (bboxes[:, 4] != np.pi / 2) flipped[rotated_flag, 4] = np.pi / 2 - bboxes[rotated_flag, 4] flipped[rotated_flag, 2] = bboxes[rotated_flag, 3] flipped[rotated_flag, 3] = bboxes[rotated_flag, 2] else: flipped[:, 4] = norm_angle(np.pi - bboxes[:, 4], version) return flipped
def from_zipped( cls, *setters_and_weights: Union[StateSetter, Tuple[RewardFunction, float]] ) -> "WeightedSampleSetter": rewards = [] weights = [] for value in setters_and_weights: if isinstance(value, tuple): r, w = value else: r, w = value, 1. rewards.append(r) weights.append(w) return cls(tuple(rewards), tuple(weights))
Feilhaber ready to make most of a national team second chance he had given up on A year after seemingly giving up on a future with the U.S. national team, Benny Feilhaber is back in the fold after Jurgen Klinsmann's departure. CARSON, Calif. — Almost exactly a year after he appeared to light a match and torch his chances of ever again playing for the U.S. national team, Benny Feilhaber was on the field, wearing the familiar U.S. colors, smiling and enjoying an opportunity he had essentially given up on. The U.S. January camp began last week and Feilhaber was back in a setting he knew all too well, but one he didn't figure to be in again after going after then-U.S. coach Jurgen Klinsmann around this same time last year. Klinsmann is gone now, having been fired in November and replaced by Bruce Arena. The coaching change figured to benefit a host of players who Klinsmann didn't rate, and Feilhaber was at the top of that list. Article continues below "It's something that I wasn't sure would ever happen again," Feilhaber said of returning to the national team. "It feels like a second chance, and because of that, it's very motivating. I'm excited to be back and I want to make my stamp in this camp. "It didn't feel like I would be back," Feilhaber said. "I had turned that page in my career and tried to focus on the things I could and putting together good seasons. I always watched the national team and it's something that I looked back on fondly, when I had my national team chances with Bob. I closed that book, and to have it reopen again unexpectedly is an amazing feeling." Arena identifed Feilhaber as someone he planned on bringing in shortly after being hired to replace Klinsmann. The former coach saw Feilhaber as someone who could help fill the team's need for playmaking midfielders, and sees a player who has evolved and matured in the six years since being a part of the 2010 U.S. World Cup team. "I think he's a better player. He's more experienced. I just think he's a very good player, and to be honest I haven't checked his history with the national team after 2010, and that's with all of them," Arena said. "I haven't really looked into what happened with Benny from 2010 on, although I've been around him in Major League Soccer and always thought he was a good player. My impressions in 2010 was that he did a good job, and he's still a player that deserves consideration here." Feilhaber never did manage to break through during Klinsmann's tenure, only earning a handful of looks. Those looks ended after the January camp in 2014, even though Feilhaber flourished with KC, even becoming a finalist for MLS MVP after the 2015 season. It was Feilhaber's frustration with that prolonged absence that led him to rip into Klinsmann's personnel decisions. “I don’t think that Jurgen calls in the best players that are available to him,” Feilhaber said when asked by Goal about missing out on another national team camp. “That, for me, is a problem. There’s players that are better than other players that don’t get an opportunity with the national team. That, for me, is a much bigger deal than anything else. Everybody points fingers at certain things. But for me, that’s the most important thing.” When asked about those comments as he found himself back with the national team, Feilhaber made no apologies for his remarks. "I think it was just me being honest," Feilhaber said. "I think at that point, I had already closed the book on that chapter of my career and it was something that was just an honest answer. I don't want to harp too much on that, but it was an honest answer and I'm looking forward to the opportunity at hand right now and not look back at what has happened in the past years." Feilhaber's comments about Klinsmann were surprising to some, but not to those fully aware of Feilhaber's frustrations with missing out on the national team during the prime of his career. "I always knew he was frustrated by the national team situation," coach Peter Vermes told Goal. "At the end, a coach is going to make their decisions, and they're either going to be correct or people are going to think they're wrong. I think enough was enough at the end and told him he didn't have to talk about it anymore. Him saying anything else wasn't going to help. After that, he realized there was nothing more to say." Vermes believes Feilhaber is returning to the national team as a player who can absolutely help the side in its quest to qualify for the 2018 World Cup. "He's one of the best technical players, but technical is one thing," Vermes said of Feilhaber. "He's a great decision maker on the field. He sees the game in ways that a lot of players don't, and the bigger the game, the more he'll take the ball. He's not afraid in those moments. He actually wants it." D.C., Dallas among big winners on MLS draft day Feilhaber has rounded out his game since joining Sporting KC four years ago, improving the defensive aspects of his game, and his overall work rate, which isn't a surprise given Sporting KC's high-pressure style. "With us, we've put a demand on him on the defensive side and he's taken that and owned the responsibility of it," Vermes said. "He's been accountable for it. Every year he's gotten better. A lot of times he doubted himself early on in his time with us, he would get to a point where he didn't think he could push himself further, and we let him know he could push himself further. He has more in him than he thinks he does from the physical aspect." Feilhaber is one of several talented midfielders in the January camp vying for playing time under Arena, joining Sacha Kljestan and Darlington Nagbe in a competition to provide the creative midfielder Arena believes has been missing from the national team. After more than two years in what felt like national team exile, and six years as a national team afterthought, Feilhaber is determined to make the most of this opportunity he had previously given up on. "It's been six years and I've changed a lot," Feilhaber said. "I've become a better player. That's one of the reasons why I'm so motivated now. I think when I played on the national team with Bob (Bradley), I was still developing as a player, I wasn't at my best. I feel the last two or three seasons, I played at my best pretty consistently and it's exciting to have the opportunity to play at the highest level, the international level, with your national team when you are playing at the highest level of your own career."
Molecular analysis of the androgen receptor gene in 52 patients with complete or partial androgen insensitivity syndrome: a collaborative study. In patients with androgen insensitivity syndrome (AIS), RFLP study of the androgen receptor gene made it possible to analyze whether deletions or mutations could be responsible for abnormalities in androgen responsiveness. We studied RFLPs of DNA from 25 46,XY patients with partial AIS (PAIS), defined as a concentration of androgen receptor in genital-skin fibroblasts less than 340 fmol/mg DNA, and DNA from 27 46,XY patients with complete AIS (CAIS) with no detectable androgen receptor site. DNA samples were digested with BamHI, EcoRI, HindIII and TaqI restriction enzymes and hybridized with three cDNA probes covering the three domains of the androgen receptor. When we had the maternal and an unaffected brother's DNA, we analyzed the two androgen receptor gene polymorphisms described, the HindIII and the exon 1 CAG repeat polymorphisms, in order to distinguish the two maternal X chromosomes, and to detect carriers of AIS. We did not find any large deletion among the 52 patients. We observed a heterozygous mother in 3 of 14 families studied with the HindIII polymorphism, and in 12 of 25 families using the exon 1 CAG repeat polymorphism. This study suggests that in AIS, abnormalities in androgen receptor response could be related to point mutations or microdeletions rather than to gross structural alterations of the androgen receptor gene. Furthermore, unless the point mutation has been described, exon 1 and HindIII polymorphism studies would enable the identification of carriers in 50% of families, and the prenatal diagnosis of AIS.
/** * {@code assert} that no errors have been logged. */ public void assertNoErrors() { if (!getErrors().isEmpty()) { throw new Error(getErrors().toString()); } }
Global biogeography of autotroph chemistry: is insolation a driving force? Synthesis The tissue chemistry of plants can influence ecosystem processes including growth, herbivory, and decomposition. Our comparison of nitrogen and phosphorus in over 1700 autotroph taxa demonstrates that latitudinal trends in tissue chemistry are consistent across non-vascular and vascular species in freshwater, terrestrial, and marine ecosystems. Tissue chemistry varies most within species and taxonomic lineages, yet the nitrogen to phosphorus ratio within individuals is strikingly similar among species in different ecosystems. These results shed new light on existing hypotheses, suggesting that light (e.g. photon flux) and growing season duration are primary drivers of latitudinal gradients in tissue chemistry, but providing little support for temperature, nutrient supply, or soil substrate age. Photoautotroph nitrogen (N) and phosphorus (P) tissue concentrations can influence ecosystem function via processes including growth, decomposition, and consumption, and may reflect traits maintaining coexistence. Studies in terrestrial systems have led to hypotheses that latitudinal trends in the N and P content of leaves may be driven by soil substrate age, environmental temperature, or season length; however, terrestrial patterns alone cannot differentiate these mechanisms. Here, we demonstrate that broad geographical patterns of N and P in freshwater and marine multicellular photoautotrophs are concordant with those in terrestrial ecosystems. Our > 6800 record database reveals that mean tissue N and P increase with latitude in all ecosystems, but P increases more rapidly, causing N:P to decline; mean N:P scaling within individuals also is identical among systems, despite very different evolutionary environments. A partitioning of the variance in these data suggests that species composition and local environmental context likely lead to the variation observed within a latitudinal band. However, the consistency of trends in photosynthetic tissue chemistry across Earth’s ecosystems suggests that biogeographical gradients in insolation and growing season length may constrain tissue N and P, whereas global trends in temperature, nutrient supply, and soil substrate age are unlikely to generate the consistent latitudinal trends among ecosystems. Thus, this cross-ecosystem comparison suggests a new hypothesis, global patterns of insolation, while also providing a new perspective on other mechanisms that have been hypothesized to underlie latitudinal trends in photosynthetic tissue chemistry.
// ********************************************************************** // // Copyright (c) 2001-2004 // StarMiddleware.net // www.StarMiddleware.net // // All Rights Reserved // // Author: <NAME> <EMAIL> // Author: <NAME> <EMAIL> // Author: <NAME> <EMAIL> // // ********************************************************************** // Version: 1.0.0 #ifndef __SessionContext_impl_h__ #define __SessionContext_impl_h__ #include <CORBA.h> #include <EnterpriseComponent.h> #include <CCMContext_impl.h> namespace Container { class SessionContext_impl : virtual public ::Components::SessionContext, virtual public CCMContext_impl { protected: public: SessionContext_impl(); virtual ~SessionContext_impl(); // void setCompositionManager(CompositionManager_ptr current); // virtual ::Components::CCMHome_ptr get_CCM_home(); CORBA::Object_ptr get_CCM_object(); }; } #endif
/* * Author: <NAME> * Nearest neighbor search algorithm demo */ #ifndef SORT_H #define SORT_H #include <globals.hpp> #include <vector> struct KeyValuePair { int cellID; // Grid cell int index; // Particle index }; class NNS { private: // Depending on use case make more things private and use getters and setters public: int cellLength; int bufferSize; // as a buffer and to handle truncation from dividing by cell size problem int cellCount; int nonBufferCellEstimate; int cellDimx; int cellDimy; float simDimx_buffered; float simDimy_buffered; std::vector<uint32_t> cellStart; std::vector<uint32_t> cellEnd; int particleCount; KeyValuePair makeKeyValue(int cell, int idx); std::vector<KeyValuePair> cellIndexPair; void init(int count, int dimx, int dimy, int cell, int buffer); void hash(std::vector<float>& locations); int hash(float2 location); void kvSort(); void findCellStartEnd(); void reorder(std::vector<float>& locations, std::vector<float>& sortedLoc); // Printing main data strutures used in the NNS void printCellIndexPair(int printCount = 0); void printCellStartEnd(int printCount = 0); int getCellCount(); int getNonBuffCellCount(); }; #endif // SORT_H
/* * Return the ext insert location. This will also validate the input xid * if latest insert point is not for the same transaction id then this will * return Invalid Undo pointer. */ UndoRecPtr UndoLogGetNextInsertPtr(UndoLogNumber logno, TransactionId xid) { UndoLogControl *log = get_undo_log_by_number(logno); TransactionId logxid; UndoRecPtr insert; LWLockAcquire(&log->mutex, LW_SHARED); insert = log->meta.insert; logxid = log->xid; LWLockRelease(&log->mutex); if (TransactionIdIsValid(logxid) && !TransactionIdEquals(logxid, xid)) return InvalidUndoRecPtr; return MakeUndoRecPtr(logno, insert); }
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package driver import ( "context" "strings" "github.com/container-storage-interface/spec/lib/go/csi" "github.com/kubernetes-sigs/aws-fsx-csi-driver/pkg/cloud" "github.com/kubernetes-sigs/aws-fsx-csi-driver/pkg/util" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "k8s.io/klog" ) var ( // controllerCaps represents the capability of controller service controllerCaps = []csi.ControllerServiceCapability_RPC_Type{ csi.ControllerServiceCapability_RPC_CREATE_DELETE_VOLUME, } ) func (d *Driver) CreateVolume(ctx context.Context, req *csi.CreateVolumeRequest) (*csi.CreateVolumeResponse, error) { klog.V(4).Infof("CreateVolume: called with args %#v", req) volName := req.GetName() if len(volName) == 0 { return nil, status.Error(codes.InvalidArgument, "Volume name not provided") } volCaps := req.GetVolumeCapabilities() if len(volCaps) == 0 { return nil, status.Error(codes.InvalidArgument, "Volume capabilities not provided") } if !d.isValidVolumeCapabilities(volCaps) { return nil, status.Error(codes.InvalidArgument, "Volume capabilities not supported") } // create a new volume with idempotency // idempotency is handled by `CreateFileSystem` capRange := req.GetCapacityRange() var volumeSizeGiB int64 if capRange == nil { volumeSizeGiB = cloud.DefaultVolumeSize } else { volumeSizeGiB = util.RoundUp3600GiB(capRange.GetRequiredBytes()) } volumeParams := req.GetParameters() subnetId := volumeParams["subnetId"] securityGroupIds := volumeParams["securityGroupIds"] fsOptions := &cloud.FileSystemOptions{ CapacityGiB: volumeSizeGiB, SubnetId: subnetId, SecurityGroupIds: strings.Split(securityGroupIds, ","), } if val, ok := volumeParams["s3ImportPath"]; ok { fsOptions.S3ImportPath = val } if val, ok := volumeParams["s3ExportPath"]; ok { fsOptions.S3ExportPath = val } fs, err := d.cloud.CreateFileSystem(ctx, volName, fsOptions) if err != nil { switch err { case cloud.ErrFsExistsDiffSize: return nil, status.Error(codes.AlreadyExists, err.Error()) default: return nil, status.Errorf(codes.Internal, "Could not create volume %q: %v", volName, err) } } err = d.cloud.WaitForFileSystemAvailable(ctx, fs.FileSystemId) if err != nil { return nil, status.Errorf(codes.Internal, "Filesystem is not ready: %v", err) } return newCreateVolumeResponse(fs), nil } func (d *Driver) DeleteVolume(ctx context.Context, req *csi.DeleteVolumeRequest) (*csi.DeleteVolumeResponse, error) { klog.V(4).Infof("DeleteVolume: called with args: %#v", req) volumeID := req.GetVolumeId() if len(volumeID) == 0 { return nil, status.Error(codes.InvalidArgument, "Volume ID not provided") } if err := d.cloud.DeleteFileSystem(ctx, volumeID); err != nil { if err == cloud.ErrNotFound { klog.V(4).Infof("DeleteVolume: volume not found, returning with success") return &csi.DeleteVolumeResponse{}, nil } return nil, status.Errorf(codes.Internal, "Could not delete volume ID %q: %v", volumeID, err) } return &csi.DeleteVolumeResponse{}, nil } func (d *Driver) ControllerPublishVolume(ctx context.Context, req *csi.ControllerPublishVolumeRequest) (*csi.ControllerPublishVolumeResponse, error) { return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) ControllerUnpublishVolume(ctx context.Context, req *csi.ControllerUnpublishVolumeRequest) (*csi.ControllerUnpublishVolumeResponse, error) { return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) ControllerGetCapabilities(ctx context.Context, req *csi.ControllerGetCapabilitiesRequest) (*csi.ControllerGetCapabilitiesResponse, error) { klog.V(4).Infof("ControllerGetCapabilities: called with args %#v", req) var caps []*csi.ControllerServiceCapability for _, cap := range controllerCaps { c := &csi.ControllerServiceCapability{ Type: &csi.ControllerServiceCapability_Rpc{ Rpc: &csi.ControllerServiceCapability_RPC{ Type: cap, }, }, } caps = append(caps, c) } return &csi.ControllerGetCapabilitiesResponse{Capabilities: caps}, nil } func (d *Driver) GetCapacity(ctx context.Context, req *csi.GetCapacityRequest) (*csi.GetCapacityResponse, error) { klog.V(4).Infof("GetCapacity: called with args %#v", req) return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) ListVolumes(ctx context.Context, req *csi.ListVolumesRequest) (*csi.ListVolumesResponse, error) { klog.V(4).Infof("ListVolumes: called with args %#v", req) return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) ValidateVolumeCapabilities(ctx context.Context, req *csi.ValidateVolumeCapabilitiesRequest) (*csi.ValidateVolumeCapabilitiesResponse, error) { klog.V(4).Infof("ValidateVolumeCapabilities: called with args %#v", req) volumeID := req.GetVolumeId() if len(volumeID) == 0 { return nil, status.Error(codes.InvalidArgument, "Volume ID not provided") } volCaps := req.GetVolumeCapabilities() if len(volCaps) == 0 { return nil, status.Error(codes.InvalidArgument, "Volume capabilities not provided") } if _, err := d.cloud.DescribeFileSystem(ctx, volumeID); err != nil { if err == cloud.ErrNotFound { return nil, status.Error(codes.NotFound, "Volume not found") } return nil, status.Errorf(codes.Internal, "Could not get volume with ID %q: %v", volumeID, err) } confirmed := d.isValidVolumeCapabilities(volCaps) if confirmed { return &csi.ValidateVolumeCapabilitiesResponse{ Confirmed: &csi.ValidateVolumeCapabilitiesResponse_Confirmed{ // TODO if volume context is provided, should validate it too // VolumeContext: req.GetVolumeContext(), VolumeCapabilities: volCaps, // TODO if parameters are provided, should validate them too // Parameters: req.GetParameters(), }, }, nil } else { return &csi.ValidateVolumeCapabilitiesResponse{}, nil } } func (d *Driver) isValidVolumeCapabilities(volCaps []*csi.VolumeCapability) bool { hasSupport := func(cap *csi.VolumeCapability) bool { for _, c := range volumeCaps { if c.GetMode() == cap.AccessMode.GetMode() { return true } } return false } foundAll := true for _, c := range volCaps { if !hasSupport(c) { foundAll = false } } return foundAll } func (d *Driver) CreateSnapshot(ctx context.Context, req *csi.CreateSnapshotRequest) (*csi.CreateSnapshotResponse, error) { return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) DeleteSnapshot(ctx context.Context, req *csi.DeleteSnapshotRequest) (*csi.DeleteSnapshotResponse, error) { return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) ListSnapshots(ctx context.Context, req *csi.ListSnapshotsRequest) (*csi.ListSnapshotsResponse, error) { return nil, status.Error(codes.Unimplemented, "") } func (d *Driver) ControllerExpandVolume(ctx context.Context, req *csi.ControllerExpandVolumeRequest) (*csi.ControllerExpandVolumeResponse, error) { return nil, status.Error(codes.Unimplemented, "") } func newCreateVolumeResponse(fs *cloud.FileSystem) *csi.CreateVolumeResponse { return &csi.CreateVolumeResponse{ Volume: &csi.Volume{ VolumeId: fs.FileSystemId, CapacityBytes: util.GiBToBytes(fs.CapacityGiB), VolumeContext: map[string]string{ "dnsname": fs.DnsName, }, }, } }
Differential Expression of Sphingosine-1-Phosphate Receptors in Abdominal Aortic Aneurysms Objective. Inflammation plays a key role in the pathophysiology of abdominal aortic aneurysms (AAAs). Newly discovered Sphingosine-1-Phosphate Receptors (S1P receptors) are critical in modulating inflammatory response via prostaglandin production. The aim of the current study was to investigate the expression of different S1P receptors in AAAs and compared with normal aortas at the protein level. Materials and Methods. Aortic specimens were harvested during aortic reconstructive surgery for the AAA group or during organ transplant for the control group. The protein expression of S1P1, 2 and 3 in AAAs and normal aortas was assessed by Western blotting and immunohistochemical analysis. Results. There were 40 AAAs and 20 control aortas collected for the receptor analysis. For Western blot analysis, S1P1 expression was not detected in either group; S1P2 protein was constitutively detected in both types of aortas but its expression level was significantly decreased by 73% (P < 0.05) in AAAs compared with the control group. In contrast, strong S1P3 expression was detected in AAAs aortas but not in normal aortas. Immumohistochemical staining showed similar results, except a weak S1P3 signal was detectable in normal aortas. Conclusions. Western blot and staining results consistently showed the down-regulation of the S1P2 protein with simultaneous up-regulation of the S1P3 protein in AAAs. Since those newly discovered receptors play an important role in the inflammatory cascade, the modulating of S1P signaling, particularly via S1P2 and S1P3, could represent novel therapeutic targets in future AAA treatments. Introduction Abdominal aortic aneurysm (AAA) is the localized dilation of the infrarenal aorta. If surgical treatment is not applicable, an AAA progresses to rupture with a high mortality rate and causes 1%-3% of elderly male deaths in developed countries each year . Analysis of the aneurysmal wall has demonstrated that connective tissue degradation, increased atherosclerosis, and chronic inflammation are the common pathological features of AAAs . Sphingosine-1-phosphate (S1P) is a newly discovered low-molecular-weight zwitterionic lysophospholipid molecule that is generated from the metabolism of sphingomyelin by a series of enzymes including sphingosine kinase, S1P phosphatase, and S1P lyase in mammals . The main sources of S1P are platelet cells in plasma, while other cell types such as erythrocytes, neutrophils, and mononuclear cells can also produce and release S1P upon activation . S1P exerts a wide range of physiological activities, particularly inflammatory reactions through the interactions with five different receptor subtypes 1, 2, 3, 4, and 5. They are the members of the endothelial differentiation gene family of G protein-coupled receptors , and differential expressions of S1P receptors are thought to modulate the cellular inflammatory response . A precise S1P/S1P receptor balance is found to be responsible for the signaling of cell growth and regulation of cell metabolism in mammal . An imbalance of this system also participates in pathologic conditions such as cancer and inflammatory diseases . S1P2 is the major expressed S1P receptor, while S1P1 or S1P3 is only weakly expressed in healthy vascular endothelial cells and vascular smooth muscle cells (VSMCs) . Consistently with this receptor multiplicity and pleiotropic signaling mechanisms, S1P receptors influence numerous cell functions. Particularly, differential expressions of S1P receptors have been demonstrated to either promote or inhibit the inflammatory infiltration in diverse cell types by inducing cyclooxygenase 2 (COX-2) expression 2 Mediators of Inflammation with subsequent prostaglandin E 2 (PGE 2 ) or prostacyclin (PGI 2 ) production . Our previous study showed that inflammatory mediators such as Cox-2 and prostaglandin E 2 are also widely expressed in AAA explants . Those phenomena implicating S1P receptors may play some roles in the pathogenesis of AAA. S1P1, S1P2, and S1P3 receptors are the major S1P receptor subtypes in the vascular system . However, the expressions of these three S1P receptors in AAA remain unknown. In the present study, we aimed to investigate the S1P1, 2 and 3 receptor protein expressions in AAAs and compare them with healthy aortas. Human Abdominal Aorta Tissues Collection. Cross-sections of aneurysm wall, which were dissected of luminal thrombus, were removed from the AAA patients who underwent open surgical aneurysmal repair in a local hospital. Control aortic tissues were obtained from the corresponding location of healthy organ donors without known cardiovascular diseases and connective tissue disorders during the transplant operation. Aneurysmal patients with the Marfan syndrome and other connective tissue disorders were excluded from this study. Each collected specimen was thoroughly washed with normal saline solution, and then the tunica intima elimination procedure was conducted using a scalpel and forceps. All specimens were divided into two parts for the western blotting which was kept in a −80 • C freezer and immunohistochemical analysis, respectively. All experiments were performed with the approval from the local institution's ethics committee. Informed consent was obtained from AAA patients and organ donors' relatives. Western Blot Analysis. Frozen tissues were first thawed and then lysed with cell lysis buffer (Cell Signaling. Technology, Danvers, MA, USA) containing protease inhibitor (Roche, Basel, Switzerland). The protein concentration of each specimen was measured based on the Bradford method utilizing the Bio-Rad Protein Assay Kit (Bio-Rad Laboratories, Hercules, CA, USA) with bovine serum albumin (BSA) as the standard. After the protein denaturing procedure with loading buffer (pH 6.8 24 mM Tris-HCl, 684 mM glycerol, 14 mM SDS, 142 mM beta-mercaptoethanol, 0.3 mM bromophenol blue), each sample (50 µg) was resolved on 12% SDS-polyacrylamide gel electrophoresis (PAGE) gel (Bio-Rad Laboratories) at room temperature then transferred onto a polyvinylidene fluoride (PVDF) membrane (Bio-Rad Laboratories) at 4 • C. After blocking in 10% TBS-0.01% Tween 20 (TBST) diluting nonfatty milk (Bio-Rad Laboratories) for two hours at room temperature, the membranes were then incubated overnight at 4 • C with primary antibodies against S1P1 receptor (Catalogue no. sc-48356, dilution 1 : 100, Santa Cruz Biotechnology, Santa Cruz, CA, USA) or S1P2 receptor (Catalogue no. sc-25491, dilution 1 : 200, Santa Cruz Biotechnology) or S1P3 receptor (Catalogue no. sc-30024, dilution 1 : 100, Santa Cruz Biotechnology) with GAPDH (1 : 1000, Cell Signaling Technology) as the positive control. After membrane washing using TBST solution, HRP-conjugated goat anti-mouse (1 : 2000) or goat anti-rabbit (1 : 4000) secondary antibody (Dako, Glostrup, Denmark) was added and the membranes were then incubated for 1 hour at room temperature. After washing, signals were visualized by luminol reagents (Bio-Rad Laboratories) and the densitometry of each exposing blotting was analyzed by ImageJ 1.44 software (National Institutes of Health, Bethesda, MD, USA). The relative expression of the studied receptors' protein was calculated by the detected signal divided by the internal positive control (GAPDH) expression signal in each sample. Immunohistochemical Study. The staining procedure was performed on paraffin-embedded aortic tissue (5 µm) sections according to the manufacturer's instructions (Dako-Cytomation EnVision + System-HRP (DAB) Kit (Dako)). Specificity of S1P receptor antibodies was firstly validated by positive and negative tests using healthy adult rat brain paraffin sections. Briefly, all sections were antigen retrieved with boiling sodium citrate buffer (pH 6) and incubated with either mouse anti-S1P1 receptor antibody (1 : 25), rabbit anti-S1P2 receptor antibody (1 : 100), or rabbit anti-S1P3 receptor antibody (1 : 100) (Santa Cruz Biotechnology) overnight at 4 • C. After staining, all specimens were subjected to the dehydration procedure and sealed for microscopy observation. To avoid staining underestimation due to considerable regional variations, 5 continuous × 200 microscopy views of each stained specimen, which had the largest amount of positive stained VSMCs, were captured and recorded (Nikon, Tokyo, Japan). Two researchers scored the positive immunostaining using the scoring system according to the Wang and colleagues study . Briefly, a proportion subscore from 0 to 4 (i.e., 0-0% positive stained, 1-1%-25% positive stained, 2-26%-50% positive stained, 3-51%-75% positive stained, 4-76%-100% positive stained) and an intensity subscore from 0 to 3 (i.e., 0: no staining, 1: weak staining, 2: moderate staining, 3: intense staining) were first assigned by each observer for each slide. A weighted score was then determined by multiplying the proportion subscore and the intensity subscore. Finally, a mean value of the five weighted scores for each specimen was calculated. Statistical Analysis. All data were expressed as means ± SD. Statistical analysis was performed by SPSS 18.0 software (SPSS, Chicago, IL, USA). Any statistical differences between the two groups' experimental results were determined by independent sample t-tests. Correction for ages and sex of patients on receptor expression levels were made using a linear model. A P value < 0.05 was considered as statistically significant. Figure 1: Representative pictures of western blot analysis of S1P receptors in AAA and control aortic tissues. S1P1 expression was undetectable in both AAA and normal aortas, while it became detectable in rat brain tissues (a). Expression level of S1P2 protein was significantly decreased in AAAs compared with control aortas (b). S1P3 expression was detectable in AAAs only (c). GAPDH antibody was used as internal positive control in each WB experiment. Patient surgical patients and organ donors. Most of patients were male, and the control patients were younger than the AAA patients. Patient characteristics are listed in Table 1. Western Blot Analysis. S1P1 receptor protein (38 kDa) was undetectable in both tissues (Figure 1(a)). For S1P2 receptor protein (45 kDa), positive signals were detected in both AAA and control aortic tissues, with AAA tissues had a significantly lower protein expression level compared with control aortas (Figure 1(b)). In contrast, S1P3 receptor protein (45 kDa) was highly expressed in AAA aortas but was undetectable in control aortas (Figure 1(c)). The relative intensities of S1P1 and 2 receptors expression by western blot analysis are shown in Figure 2 upper and lower panels, respectively. The protein level of S1P2 receptor was decreased by 73% (P < 0.05) in the AAA tissues (mean relative intensity of 0.29) compared with the control aortic tissues (mean relative intensity of 1.08). S1PR3 protein levels were significantly upregulated in AAA tissues with average relative intensity of 0.65, whereas it was undetectable in normal aortas. (lower panel) protein expression levels in western blot analysis. The relative S1P2 protein level was decreased by 73% (P < 0.05) in AAAs (relative expression intensity of 0.29) compared with normal aortas (relative expression intensity of 1.08). S1PR3 protein levels were significantly upregulated in AAA tissues with average relative intensity of 0.65, whereas it was undetectable in normal aortas. Immunohistochemical Staining Analysis . S1P1 receptor expression level was undetectable in both AAA (a) and normal aortas (b), as shown in Figure 3. A Positive control of S1P1 receptor was performed in healthy adult rat brain for validating its specificity (Figure 3(c)). S1P2 receptor protein was expressed in both types of aortas, with pronounced S1P2 receptor staining observed in control aortas ( Figure 4). S1P3 receptor protein was found in the AAA tissues ( Figure 5(a)), but it was almost undetectable in normal aortas ( Figure 5(b)). The staining scores of both types of tissues sections are shown in Table 2. Positive staining of S1P2 and S1P3 receptors in the aortic walls showed that they were localized in VSMC plasma membrane and cytoplasm but absent in the nucleus (Figures 4 and 5). Figure 3: Representative staining pictures of S1P1 protein in AAA (a), control aortic (b) sections, and healthy adult rat brain paraffin sections (c) (×200). S1P1 protein was undetectable in both types of tissues. Correction for Ages and Sex. Giving the age and sex discrepancy existed in the patients, corrections for ages and sex of the receptor protein expressions was made. The association between S1P2 and S1P3 protein (both IHC and WB expression levels) in the patients were robust to correction in a linear model with age and sex (P < 0.008). Discussion Among the studied S1P receptors, only S1P2 and S1P3 receptor proteins were differentially expressed in AAA tissues Figure 4: Representative staining pictures of S1P2 in AAAs and control aortic sections (×200). S1P2 protein was more pronounced in the normal aortas (b) than that in the AAAs (a). compared with the control aortas, while S1P1 receptor protein was absent in both types of tissues. Differential S1P receptors expressions have been shown to participate in diverse physiological processes, such as cell survival and apoptosis , and pathological processes, such as angiogenesis, inflammation, cancerogenesis, and immune regulation . Inflammation is one of the common pathological features of AAAs . Thus, the present novel findings may implicate the importance of these receptors in Figure 5: Representative staining pictures of S1P3 in AAA and control aortic sections (×200). More S1P3 protein was found in the AAA (a) than that in the normal aortas, in which it was almost undetectable. the inflammation attribute to AAA pathogenesis. Nevertheless, atherosclerosis is indeed regarded as a chronic inflammatory disease with atherosclerotic plaques containing inflammatory infiltrates, which implicated in the formation of AAA . Thus, the possibility of S1P receptor expression related to the atherosclerotic event cannot be excluded. S1P1 receptor is undetectable in both types of aortic tissues. Other researchers found that only some specific cell types, such as endothelial cells, cardiomyocytes, neural stem cells, as well as B cells, and T cells, express marked S1P1 . Healthy rat adults' VSMCs express S1P1 receptor weakly , and the deletion of S1P1 receptor is embryonically lethal since it causes the failure of dorsal migration of VSMCs to form the tunica media layer of arteries , suggesting that S1P1 receptor should be critical in vascular development rather than in maintaining VSMCs metabolism . A more recent study suggested that S1P1 is involved in the phenotype regulation of adult smooth muscle cells . They utilized a rat carotid artery balloon injury model and demonstrated that there was a transient over expression and activation of S1P1 receptor after injury. This action can facilitate VSMCs transfer into the proliferative and migratory phenotype. However, such S1P1 receptor over expression will be restored to the basal value by 7 days after injury, suggesting that this irritable S1P1 receptor activation may be a short-term injury response. Thus, we postulated that the S1P1 receptor protein may be transiently increased during early AAA lesion, but its high expression will subsequently return to the basal level or become undetectable at the advanced stage of AAA development. In addition, S1P1 receptor is possibly responsible for the development of circulation system and expressed in the endothelial cells, rather than expressed in smooth muscle cells in mature aortas which may explain for absent of S1P1 levels in the late stages of aneurysm. S1P2 receptor protein was detected in both types of aortic tissues, particularly in control aortas. Indeed, S1P2 receptor has been previously shown to be the major S1P receptor expressing population in a wide variety of tissues in humans, like vascular endothelial and smooth muscle cells , but not in inflammatory infiltrates . This particular receptor can facilitate VSMCs' contractile phenotype expressions and negatively regulate their proliferation and migration . Moreover, S1P2 receptor is capable of inducing Cox-2 expression and producing prostacyclin (PGI 2 ) in response to exogenous S1P stimulus . PGI 2 possesses antiinflammatory functions and simultaneously relaxes VSMCs and suppresses their proliferation and migration . Thus, the decreased expression of anti-inflammatory S1P2 receptor in VSMCs of AAAs, and probably not expressed in the inflammatory infiltrates, may impair the production of PGI 2 and ultimately lead to the pronounced inflammation response in AAA patients . Therefore, the S1P2 receptor downregulation of VSMCs is obviously an important etiological factor in AAA development. Markedly S1P3 receptor protein was found in AAA tissues. S1P3 receptor possesses a promoting inflammatory response property as it can induce Cox-2 expression and concomitant PGE 2 production in various cell types . As a pronounced inflammatory infiltrate, PGE 2 was also found in AAA explants in our laboratory previously though the extent of its involvement in vascular inflammation is still unclear. In addition, a very recent study suggested that S1P3 mediates the chemotactic effect of its ligand-S1P in macrophages in vitro and in vivo, which plays a crucial role in atherosclerosis by promoting inflammatory monocyte/macrophage recruitment and altering smooth muscle cell behavior . We suggested that the S1P3 receptor protein may be critical in the strengthened inflammatory response and thus atheroslcerosis via the chemotactic property and the PGE 2 pathway during AAA development. However, the causal relationship between the inflammatory cells recruitment and the prostaglandins needed to be further explored. In the present study, a weak S1P3 staining signal was found in normal aortas, though it became undetectable using western blot analysis. This staining result was consistent with Ryu et al. study that S1P3 is found to be weakly expressed in human healthy VSMCs . Though age and sex discrepancy existed in the studied patients, the present analysis found that the expression of the S1P2 and 3 receptor proteins are not age and sex dependent. Thus, the up-or downregulated receptor probably related to the inflammatory cascade underlying the AAA pathogenesis but is not simply a feature of aging or sex difference. The limitation of the present study is only an observational analysis on surgical aneurysmal aortas. Thus, we cannot extrapolate the observations to the initiation or promotion of aneurysm formation. Nevertheless, the present work has provided evidence that aneurysmal aortic tissue exhibits a decreased activity of S1P2 and enhanced S1P3 receptor, which may contribute to the inflammation of aortic walls involved in AAA pathology. Our findings of the differential expression of S1P receptors in AAA compared with normal aortas are novel and may be helpful to delineate the important inflammatory mechanisms in AAA development. This investigation has provided a new concept in the inflammatory response in the lesions, and the regulation of S1P receptor via S1P2 and S1P3 may open a new regime for AAA treatment in the future.
import { Vec3, SkillId, Customize, PacketBase } from '../lib/Common' export class C_VISIT_NEW_SECTION_1 extends PacketBase { mapId: number guardId: number unk: number }
def symbols(self, symbols): if not symbols: return {} assert type(symbols) == dict return symbols
Using a molybdenite complex and thePY5Me2 ligand, Berkeley Lab researchers synthesized a molecule that mimics catalytically active triangular molybdenum disulfide edge-sites. The result is an entire layer of catalytically active material. Molybdenum atoms are shown as green, sulfur as yellow. Credit: Courtesy of Berkeley Lab A technique for creating a new molecule that structurally and chemically replicates the active part of the widely used industrial catalyst molybdenite has been developed by researchers with the Lawrence Berkeley National Laboratory (Berkeley Lab). This technique holds promise for the creation of catalytic materials that can serve as effective low-cost alternatives to platinum for generating hydrogen gas from water that is acidic. Christopher Chang and Jeffrey Long, chemists who hold joint appointments with Berkeley Lab and the University of California (UC) Berkeley, led a research team that synthesized a molecule to mimic the triangle-shaped molybdenum disulfide units along the edges of molybdenite crystals, which is where almost all of the catalytic activity takes place. Since the bulk of molybdenite crystalline material is relatively inert from a catalytic standpoint, molecular analogs of the catalytically active edge sites could be used to make new materials that are much more efficient and cost-effective catalysts. "Using molecular chemistry, we've been able to capture the functional essence of molybdenite and synthesize the smallest possible unit of its proposed catalytic active site," says Chang, who is also an investigator with the Howard Hughes Medical Institute (HHMI). "It should now be possible to design new catalysts that have a high density of active sites so we get the same catalytic activity with much less material." Says Long, "Inorganic solids, such as molybdenite, are an important class of catalysts that often derive their activity from sparse active edge sites, which are structurally distinct from the inactive bulk of the molecular solid. We've demonstrated that it is possible to create catalytically active molecular analogs of these sites that are tailored for a specific purpose. This represents a conceptual path forward to improving future catalytic materials." Chang and Long are the corresponding authors of a paper in the journal Science describing this research titled "A Molecular MoS2 Edge Site Mimic for Catalytic Hydrogen Generation." Other authors are Hemamala Karunadasa, Elizabeth Montalvo, Yujie Sun and Marcin Majda. Jeffrey Long, Christopher Chang and Hemamala Karunadasa are paving the way for the creation of catalytic materials that can serve as effective low-cost alternatives to platinum for generating hydrogen gas from water. Credit: Photo by Roy Kaltschmidt, Berkeley Lab Public Affairs Molybdenite is the crystalline sulfide of molybdenum and the principal mineral from which molybdenum metal is extracted. Although commonly thought of as a lubricant, molybdenite is the standard catalyst used to remove sulfur from petroleum and natural gas for the reduction of sulfur dioxide emissions when those fuels are burned. Recent studies have shown that in its nanoparticle form, molybdenite also holds promise for catalyzing the electrochemical and photochemical generation of hydrogen from water. Hydrogen could play a key role in future renewable energy technologies if a relatively cheap, efficient and carbon-neutral means of producing it can be developed. Currently, the best available technique for producing hydrogen is to split water molecules into molecules of hydrogen and oxygen using platinum as the catalyst. However, with platinum going for more than $2,000 an ounce, the market is wide open for a low cost alternative catalyst. Molybdenite is far more plentiful and about 1/70th the cost of platinum, but poses other problems. "Molybdenite has a layered structure with multiple microdomains, most of which are chemically inert," Chang says. "High-resolution scanning tunneling microscopy studies and theoretical calculations have identified the triangular molybdenum disulfide edges as the active sites for catalysis; however, preparing molybdenite with a high density of functional edge sites in a predictable manner is extremely challenging." Chang, Long and their research team met this challenge using a pentapyridyl ligand known as PY5Me2 to create a molybdenum disulfide molecule that, while not found in nature, is stable and structurally identical to the proposed triangular edge sites of molybdenite. It was shown that these synthesized molecules can form a layer of material that is analogous to constructing a sulfide edge of molybdenite. "The electronic structure of our molecular analog can be adjusted through ligand modifications," Long says. "This suggests we should be able to tailor the material's activity, stability and required over-potential for proton reduction to improve its performance." In 2010, Chang and Long and Hemamala Karunadasa, who is the lead author on this new Science paper, used the PY5Me2 ligand to create a molybdenum-oxo complex that can effectively and efficiently catalyze the generation of hydrogen from neutral buffered water or even sea water. Molybdenite complexes synthesized from this new molecular analog can just as effectively and efficiently catalyze hydrogen gas from acidic water. "We're now looking to develop molecular analogs of active sites in other catalytic materials that will work over a range of pH conditions, as well as extend this work to photocatalytic systems" Chang says. Adds Long, "Our molecular analog for the molybdenite active site might not be a replacement for any existing catalytic materials but it does provide a way to increase the density of active sites in inorganic solid catalytic materials and thereby allow us to do more with less." Explore further: Chemists spray their way to better catalysts More information: www.sciencemag.org/content/335/6069/698.abstract
<gh_stars>1-10 package com.uicode.smallchat.smallchatserver.messaging; import com.uicode.smallchat.smallchatserver.model.messagingnotice.AbstractNotice; import io.vertx.core.eventbus.MessageConsumer; import io.vertx.core.json.JsonObject; public class PackageMsgNotice<T extends AbstractNotice> { private T notice; private MessageConsumer<JsonObject> consumer; public PackageMsgNotice(T notice, MessageConsumer<JsonObject> consumer) { this.notice = notice; this.consumer = consumer; } public T getNotice() { return notice; } public void unsubscribe() { this.consumer.unregister(); } }
class ImgurEndpoints: """Class to get the endpoint URLs of imgur API.""" URL = "https://api.imgur.com/3/" @staticmethod def get_upload_url(): """ Get the full URL of the image uploading endpoint. :return: full endpoint URL to upload an image """ return ImgurEndpoints.URL + "upload" @staticmethod def get_delete_url(delete_hash: str): """ Get the full URL of the image deleting endpoint. :param delete_hash: image deleting hash :return: full URL to delete the image with `delete_hash` """ return ImgurEndpoints.URL + "image/" + delete_hash
import java.util.*; public class schedule{ public static void main(String[] args){ Scanner sc = new Scanner(System.in); int d = sc.nextInt(); int sumhrs = sc.nextInt(); int minm[] = new int[d]; int maxm[] = new int[d]; int minsum = 0,maxsum = 0; for(int i=0;i<d;i++){ minm[i] = sc.nextInt(); minsum += minm[i]; maxm[i] = sc.nextInt(); maxsum += maxm[i]; } if(minsum<=sumhrs && sumhrs<=maxsum){ System.out.println("YES"); for(int i=0;i<d;i++){ int t = Math.min(minm[i]+sumhrs-minsum,maxm[i]); if(i+1<d) System.out.print(t+" "); else System.out.print(t); sumhrs -= t - minm[i]; } }else{ System.out.println("NO"); } } }
<filename>crypto/RsaContext.cpp /* * * Copyright 2013 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "RsaContext.h" #include <assert.h> #include <openssl/err.h> #include <openssl/obj_mac.h> #include <openssl/rsa.h> #include <openssl/x509.h> #include <base/DebugUtil.h> #include <crypto/NtbaUtil.h> #include <crypto/BigNum.h> #include <crypto/OpenSSLException.h> #include <crypto/DigestAlgo.h> #include <crypto/Digester.h> #include <crypto/ScopedOpenSSL.h> using namespace std; using namespace cadmium::base; namespace { int cad2NidSha(cadmium::crypto::RsaContext::ShaAlgo shaAlgo) { switch(shaAlgo) { case cadmium::crypto::RsaContext::SHA1: return NID_sha1; break; case cadmium::crypto::RsaContext::SHA224: return NID_sha224; break; case cadmium::crypto::RsaContext::SHA256: return NID_sha256; break; case cadmium::crypto::RsaContext::SHA384: return NID_sha384; break; case cadmium::crypto::RsaContext::SHA512: return NID_sha512; break; default: assert(false); break; } return NID_sha256; // required to make compiler happy } } // anonymous namespace namespace cadmium { namespace crypto { RsaContext::RsaContext() : pOsslRsa_(NULL) , hasPrivateKey_(false) , hasPublicKey_(false) { } RsaContext::~RsaContext() { if (pOsslRsa_) RSA_free(pOsslRsa_); } bool RsaContext::generate(uint32_t nBits, uint64_t publicExponent) { // this method clobbers any existing context if (pOsslRsa_) RSA_free(pOsslRsa_); // The OpenSSL spec says the publicExponent must be odd. Yung found that // RSA_generate_key hangs if this is not true, so we better make it true. // Typical values are 3, 17 or 65537, with 65537 the most common. publicExponent |= (uint64_t)1; bool keygenSuccess = false; uint32_t retryCount = 0; const uint32_t MAX_RETRIES=4; while (!keygenSuccess && (retryCount < MAX_RETRIES)) { pOsslRsa_ = RSA_generate_key(nBits, publicExponent, 0, 0); if (pOsslRsa_) keygenSuccess = (RSA_check_key(pOsslRsa_) == 1); if (!keygenSuccess && pOsslRsa_) RSA_free(pOsslRsa_); retryCount++; } if (keygenSuccess) { hasPrivateKey_ = true; hasPublicKey_ = true; } return keygenSuccess; } bool RsaContext::setRaw(const Vuc & pubMod, const Vuc & pubExp, const Vuc & privMod) { // I'm not sure how setting values directly in the RSA structure affects the // integrity of any existing data there, so best to start empty. if (pOsslRsa_) RSA_free(pOsslRsa_); pOsslRsa_ = RSA_new(); // The RSA struct takes ownership of the input bignums, so we need to dup pOsslRsa_->n = BN_dup(BigNum(pubMod).getBIGNUM()); pOsslRsa_->e = BN_dup(BigNum(pubExp).getBIGNUM()); if (!privMod.empty()) { pOsslRsa_->d = BN_dup(BigNum(privMod).getBIGNUM()); hasPrivateKey_ = true; } else { pOsslRsa_->d = NULL; } pOsslRsa_->p = NULL; pOsslRsa_->q = NULL; hasPublicKey_ = true; return true; } bool RsaContext::getPublicRaw(Vuc & pubMod, Vuc & pubExp) { if (!hasPublicKey_) return false; pubMod = BigNum(pOsslRsa_->n).encode(); pubExp = BigNum(pOsslRsa_->e).encode(); return true; } bool RsaContext::setPublicPkcs1(const Vuc & pubKeyDer) { #ifdef BUILD_DEBUG DLOG() << "RsaContext::setPublicPkcs1: key =\n" << truncateLong(NtbaUtil::toHexString(pubKeyDer, "")) << endl; #endif const unsigned char * buf = &pubKeyDer[0]; RSA *rsa = d2i_RSAPublicKey(NULL, &buf, pubKeyDer.size()); if (rsa == NULL) return false; if (pOsslRsa_) RSA_free(pOsslRsa_); pOsslRsa_ = rsa; hasPublicKey_ = true; return true; } bool RsaContext::setPrivatePkcs1(const Vuc & privKeyDer) { #ifdef BUILD_DEBUG DLOG() << "RsaContext::setPrivatePkcs1: key =\n" << truncateLong(NtbaUtil::toHexString(privKeyDer, "")) << endl; #endif const unsigned char * buf = &privKeyDer[0]; RSA *rsa = d2i_RSAPrivateKey(NULL, &buf, privKeyDer.size()); if (rsa == NULL) { OPENSSLERROR_MSG("RsaContext::setPrivatePkcs1: d2i_RSAPrivateKey failed"); return false; } if (pOsslRsa_) RSA_free(pOsslRsa_); pOsslRsa_ = rsa; hasPrivateKey_ = true; return true; } bool RsaContext::getPublicPkcs1(Vuc& pubKeyDer) const { if (!hasPublicKey_) return false; int keyLen = i2d_RSAPublicKey(pOsslRsa_, NULL); pubKeyDer.resize(keyLen); unsigned char * buf = &pubKeyDer[0]; i2d_RSAPublicKey(pOsslRsa_, &buf); return true; } bool RsaContext::getPrivatePkcs1(Vuc& privKeyDer) const { if (!hasPrivateKey_) return false; int keyLen = i2d_RSAPrivateKey(pOsslRsa_, NULL); privKeyDer.resize(keyLen); unsigned char * buf = &privKeyDer[0]; i2d_RSAPrivateKey(pOsslRsa_, &buf); #ifdef BUILD_DEBUG DLOG() << "RsaContext::getPrivatePkcs1: key =\n" << truncateLong(NtbaUtil::toHexString(privKeyDer, "")) << endl; #endif return true; } bool RsaContext::setPublicSpki(const Vuc & pubKeySpkiDer) { #ifdef BUILD_DEBUG DLOG() << "RsaContext::setPublicSpki: key =\n" << truncateLong(NtbaUtil::toHexString(pubKeySpkiDer, "")) << endl; #endif const unsigned char * buf = &pubKeySpkiDer[0]; RSA *rsa = d2i_RSA_PUBKEY(NULL, &buf, pubKeySpkiDer.size()); if (rsa == NULL) return false; if (pOsslRsa_) RSA_free(pOsslRsa_); pOsslRsa_ = rsa; hasPublicKey_ = true; return true; } bool RsaContext::getPublicSpki(Vuc & pubKeySpkiDer) const { if (!hasPublicKey_) return false; int keyLen = i2d_RSA_PUBKEY(pOsslRsa_, NULL); pubKeySpkiDer.resize(keyLen); unsigned char * buf = &pubKeySpkiDer[0]; i2d_RSA_PUBKEY(pOsslRsa_, &buf); return true; } bool RsaContext::setPrivatePkcs8(const Vuc & pkcs8) { // OpenSSL does not make it easy to import a private key in PKCS#8 format. // Must go through some monkey-motions. // make a mem BIO pointing to the incoming PKCS#8 data char* const data = reinterpret_cast<char*>(const_cast<uint8_t*>(&pkcs8[0])); ScopedOpenSSL<BIO, BIO_free_all> bio(BIO_new_mem_buf(data, pkcs8.size())); if (!bio.get()) { OPENSSLERROR_MSG("RsaContext::setPrivatePkcs8: BIO_new_mem_buf() failed"); return false; } // get a PKCS8_PRIV_KEY_INFO struct from the BIO ScopedOpenSSL<PKCS8_PRIV_KEY_INFO, PKCS8_PRIV_KEY_INFO_free> p8inf( d2i_PKCS8_PRIV_KEY_INFO_bio(bio.get(), NULL)); if (!p8inf.get()) { OPENSSLERROR_MSG("RsaContext::setPrivatePkcs8: d2i_PKCS8_PRIV_KEY_INFO_bio() failed"); return false; } // create a EVP_PKEY from the PKCS8_PRIV_KEY_INFO ScopedOpenSSL<EVP_PKEY, EVP_PKEY_free> pkey(EVP_PKCS82PKEY(p8inf.get())); if (!pkey.get()) { OPENSSLERROR_MSG("RsaContext::setPrivatePkcs8: EVP_PKCS82PKEY() failed"); return false; } // get the RSA struct from the EVP_PKEY RSA * const rsa = EVP_PKEY_get1_RSA(pkey.get()); if (!rsa) { OPENSSLERROR_MSG("RsaContext::setPrivatePkcs8: EVP_PKEY_get1_RSA() failed"); return false; } // save the RSA struct to this pOsslRsa_ = rsa; hasPrivateKey_ = true; return true; } bool RsaContext::getPrivatePkcs8(Vuc & pkcs8) const { if (!hasPrivateKey_) return false; ScopedOpenSSL<EVP_PKEY, EVP_PKEY_free> pkey(EVP_PKEY_new()); if (pkey.get() == NULL) { OPENSSLERROR_MSG("RsaContext::getPrivatePkcs8: EVP_PKEY_new() failed"); return false; } int ret = EVP_PKEY_set1_RSA(pkey.get(), pOsslRsa_); if (!ret) { OPENSSLERROR_MSG("RsaContext::getPrivatePkcs8: EVP_PKEY_set1_RSA() failed"); return false; } ScopedOpenSSL<PKCS8_PRIV_KEY_INFO, PKCS8_PRIV_KEY_INFO_free> p8inf(EVP_PKEY2PKCS8(pkey.get())); if (p8inf.get() == NULL) { OPENSSLERROR_MSG("RsaContext::getPrivatePkcs8: EVP_PKEY2PKCS8() failed"); return false; } int outLen = i2d_PKCS8_PRIV_KEY_INFO(p8inf.get(), NULL); if (outLen <= 0) { OPENSSLERROR_MSG("RsaContext::getPrivatePkcs8: i2d_PKCS8_PRIV_KEY_INFO() returned bad length"); return false; } pkcs8.resize(outLen); unsigned char * buf = &pkcs8[0]; ret = i2d_PKCS8_PRIV_KEY_INFO(p8inf.get(), &buf); if (!ret) { OPENSSLERROR_MSG("RsaContext::i2d_PKCS8_PRIV_KEY_INFO: EVP_PKEY_set1_RSA() failed"); return false; } return true; } bool RsaContext::publicEncrypt(const Vuc & in, Vuc & out, Padding padding) { if (!hasPublicKey_) return false; int osslPadding = RSA_NO_PADDING; switch (padding) { case NONE: osslPadding = RSA_NO_PADDING; break; case PKCS1: osslPadding = RSA_PKCS1_PADDING; break; case PKCS1_OAEP: osslPadding = RSA_PKCS1_OAEP_PADDING; break; default: assert(false); break; } const size_t rsaSize = RSA_size(pOsslRsa_); if (rsaSize <= 0) return false; if (in.size() > (rsaSize - 11)) { DLOG() << "RsaContext::publicEncrypt: input message too long\n"; return false; } out.resize(rsaSize); int res = RSA_public_encrypt(in.size(), &in[0], &out[0], pOsslRsa_, osslPadding); if (res == -1) { OPENSSLERROR_MSG("RsaContext::publicEncrypt: RSA_public_encrypt() failed"); return false; } out.resize(res); Vuc(out.begin(), out.end()).swap(out); return true; } bool RsaContext::privateDecrypt(const Vuc& in, Vuc& out, Padding padding) { if (!hasPrivateKey_) return false; int osslPadding = RSA_NO_PADDING; switch (padding) { case NONE: osslPadding = RSA_NO_PADDING; break; case PKCS1: osslPadding = RSA_PKCS1_PADDING; break; case PKCS1_OAEP: osslPadding = RSA_PKCS1_OAEP_PADDING; break; default: assert(false); break; } const int rsaSize = RSA_size(pOsslRsa_); if (rsaSize <= 0) return false; out.resize(rsaSize); int res = RSA_blinding_on(pOsslRsa_, NULL); // blinding for private key ops only if (res != 1) { OPENSSLERROR_MSG("RsaContext::privateDecrypt: RSA_blinding_on() failed"); return false; } res = RSA_private_decrypt(in.size(), &in[0], &out[0], pOsslRsa_, osslPadding); RSA_blinding_off(pOsslRsa_); if (res == -1) { OPENSSLERROR_MSG("RsaContext::privateDecrypt: RSA_private_decrypt() failed"); return false; } out.resize(res); Vuc(out.begin(), out.end()).swap(out); return true; } bool RsaContext::privateSign(const Vuc & inVuc, ShaAlgo shaAlgo, Vuc & outVuc) { if (!hasPrivateKey_) return false; // first need to calculate the hash, using the specified ShaAlgo const Vuc hashVuc = computeDigest(inVuc, shaAlgo); // now sign the computed hash const int rsaSize = RSA_size(pOsslRsa_); if (rsaSize <= 0) return false; outVuc.resize(rsaSize); // use blinding for all private key operations if (RSA_blinding_on(pOsslRsa_, NULL) != 1) { OPENSSLERROR_MSG("RsaContext::privateSign: RSA_blinding_on() failed"); return false; } unsigned int outLen; int res = RSA_sign(cad2NidSha(shaAlgo), &hashVuc[0], hashVuc.size(), &outVuc[0], &outLen, pOsslRsa_); RSA_blinding_off(pOsslRsa_); if (res != 1) { OPENSSLERROR_MSG("RsaContext::privateSign: RSA_sign() failed"); return false; } // size and return the signature outVuc.resize(outLen); Vuc(outVuc.begin(), outVuc.end()).swap(outVuc); return true; } bool RsaContext::publicVerify(const Vuc& inVuc, ShaAlgo shaAlgo, const Vuc& sig) { if (!hasPublicKey_) return false; // first need to calculate the hash on the input data, using the specified ShaAlgo const Vuc hashVuc = computeDigest(inVuc, shaAlgo); // now verify the computed hash if (RSA_blinding_on(pOsslRsa_, NULL) != 1) { OPENSSLERROR_MSG("RsaContext::privateSign: RSA_blinding_on() failed"); return false; } int res = RSA_verify(cad2NidSha(shaAlgo), &hashVuc[0], hashVuc.size(), &sig[0], sig.size(), pOsslRsa_); RSA_blinding_off(pOsslRsa_); if (res != 1) { OPENSSLERROR_MSG("RsaContext::publicVerify: RSA_verify() failed"); return false; } return true; } RsaContext::Vuc RsaContext::computeDigest(const Vuc& inVuc, ShaAlgo shaAlgo) { shared_ptr<const DigestAlgo> digestAlgo; switch (shaAlgo) { case SHA1: digestAlgo = DigestAlgo::SHA1(); break; case SHA224: digestAlgo = DigestAlgo::SHA224(); break; case SHA256: digestAlgo = DigestAlgo::SHA256(); break; case SHA384: digestAlgo = DigestAlgo::SHA384(); break; case SHA512: digestAlgo = DigestAlgo::SHA512(); break; default: assert(false); break; } Digester digester(digestAlgo); digester.init(); digester.update(inVuc); return digester.final(); } }} // namespace cadmium::crypto
/*========================================================================= Program: ParaView Module: vtkCPTestDriver.h Copyright (c) Kitware, Inc. All rights reserved. See Copyright.txt or http://www.paraview.org/HTML/Copyright.html for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ // .NAME vtkCPTestDriver - Class for creating a co-processor test driver. // .SECTION Description // Class for creating a co-processor test driver. It is intended // as a framework for creating custom inputs replicating a simulation for // the co-processing library. #ifndef vtkCPTestDriver_h #define vtkCPTestDriver_h #include "vtkObject.h" #include "vtkPVCatalystTestDriverModule.h" // needed for export macros class vtkCPBaseGridBuilder; class VTKPVCATALYSTTESTDRIVER_EXPORT vtkCPTestDriver : public vtkObject { public: static vtkCPTestDriver * New(); vtkTypeMacro(vtkCPTestDriver, vtkObject); void PrintSelf(ostream& os, vtkIndent indent); // Description: // Run the test driver. Returns 0 if there were no errors. virtual int Run(); // Description: // Set/get NumberOfTimeSteps. vtkSetMacro(NumberOfTimeSteps, unsigned long); vtkGetMacro(NumberOfTimeSteps, unsigned long); // Description: // Given a TimeStep, return the simulation time corresponding to // that time step. This implementation has constant time // steps between StartTime and EndTime. virtual double GetTime(unsigned long timeStep); // Description: // Set/get GridBuilder. void SetGridBuilder(vtkCPBaseGridBuilder* gridBuilder); vtkCPBaseGridBuilder* GetGridBuilder(); // Description: // Set/get the start and end times of the simulation. vtkSetMacro(StartTime, double); vtkGetMacro(StartTime, double); vtkSetMacro(EndTime, double); vtkGetMacro(EndTime, double); protected: vtkCPTestDriver(); ~vtkCPTestDriver(); private: vtkCPTestDriver(const vtkCPTestDriver&); // Not implemented void operator=(const vtkCPTestDriver&); // Not implemented // Description: // The grid builder for creating the input grids to the coprocessing library. vtkCPBaseGridBuilder* GridBuilder; // Description: // The total number of time steps the test driver will compute. // The time steps are numbered 0 through NumberOfTimeSteps-1. unsigned long NumberOfTimeSteps; // Description: // The start and end times of the simulation. double StartTime; double EndTime; }; #endif
<filename>spincast-plugins/spincast-plugins-date-formatter-parent/spincast-plugins-date-formatter/src/main/java/org/spincast/plugins/dateformatter/RelativeDateFormatType.java package org.spincast.plugins.dateformatter; /** * The type of format from PrettyTime. */ public enum RelativeDateFormatType { DEFAULT, DURATION, UNROUNDED }
n = int(input()) ai = [-1] + list(map(int,input().split())) ai2 = [str(int(ai[i] > ai[i-1])) for i in range(1,n+1)] ai2 = "".join(ai2) num = ai2.find("0") num2 = num + 1 + ai2[num+1:].find("1") num3 = ai2[num2+1:].find("0") if num == -1: print("yes") print(1,1) elif num2 == num: num2 = n ai = ai[:num] + list(reversed(ai[num:num2+1])) + ai[num2+1:] ai2 = [str(int(ai[i] > ai[i-1])) for i in range(1,n+1)] ai2 = "".join(ai2) if ai2.find("0") == -1: print("yes") print(num,num2) else: print("no") elif num3 == -1: ai = ai[:num] + list(reversed(ai[num:num2+1])) + ai[num2+1:] ai2 = [str(int(ai[i] > ai[i-1])) for i in range(1,n+1)] ai2 = "".join(ai2) if ai2.find("0") == -1: print("yes") print(num,num2) else: print("no") else: print("no")
// register all available, exported methods to handlers if possible. func (c *ControllerActivator) parseMethods() { n := c.Type.NumMethod() for i := 0; i < n; i++ { m := c.Type.Method(i) c.parseMethod(m) } }
// Min returns the minimum integer in the slice. func Min(n []int) (int, error) { if len(n) == 0 { return 0, fmt.Errorf("slice %#v has no elements", n) } if len(n) == 1 { return n[0], nil } min := n[0] for _, num := range n[1:] { if num < min { min = num } } return min, nil }
// GetListRecommendationsSortByEnumValues Enumerates the set of values for ListRecommendationsSortByEnum func GetListRecommendationsSortByEnumValues() []ListRecommendationsSortByEnum { values := make([]ListRecommendationsSortByEnum, 0) for _, v := range mappingListRecommendationsSortBy { values = append(values, v) } return values }
import { Card } from '../../../interfaces' import Set from '../Darkness Ablaze' const card: Card = { name: { en: "Ariados", fr: "Migalos", es: "Ariados", it: "Ariados", pt: "Ariados", de: "Ariados" }, illustrator: "<NAME>", rarity: "Uncommon", category: "Pokemon", set: Set, hp: 110, types: [ "Darkness", ], evolveFrom: { en: "Spinarak", fr: "Mimigal" }, abilities: [ { type: "Ability", name: { en: "Spider Net", fr: "Rets Arachnéens", es: "Red de Araña", it: "Rete del Ragno", pt: "Rede de Aranha", de: "Spinnengewebe" }, effect: { en: "When you play this Pokémon from your hand to evolve 1 of your Pokémon during your turn, you may switch 1 of your opponent’s Benched Evolution Pokémon with their Active Pokémon.", fr: "Lorsque vous jouez ce Pokémon de votre main pour faire évoluer l’un de vos Pokémon pendant votre tour, vous pouvez échanger l’un des Pokémon évolutifs de Banc de votre adversaire contre son Pokémon Actif.", es: "Cuando juegas este Pokémon de tu mano para hacer evolucionar a 1 de tus Pokémon durante tu turno, puedes cambiar 1 de los Pokémon Evolución en Banca de tu rival por su Pokémon Activo.", it: "Quando giochi questo Pokémon dalla tua mano per far evolvere uno dei tuoi Pokémon durante il tuo turno, puoi scambiare uno dei Pokémon Evoluzione nella panchina del tuo avversario con il suo Pokémon attivo.", pt: "Quando você jogar este Pokémon da sua mão para evoluir 1 dos seus Pokémon durante o seu turno, você poderá trocar 1 dos Pokémon de Evolução no Banco do seu oponente pelo Pokémon Ativo dele(a).", de: "Wenn du dieses Pokémon aus deiner Hand spielst, um 1 deiner Pokémon während deines Zuges zu entwickeln, kannst du 1 Entwicklungs-Pokémon auf der Bank deines Gegners gegen sein Aktives Pokémon austauschen." }, }, ], attacks: [ { cost: [ "Darkness", "Colorless", ], name: { en: "<NAME>", fr: "Dard-Venin", es: "<NAME>", it: "Velenospina", pt: "<NAME>", de: "Giftstachel" }, effect: { en: "Your opponent’s Active Pokémon is now Poisoned.", fr: "Le Pokémon Actif de votre adversaire est maintenant Empoisonné.", es: "El Pokémon Activo de tu rival pasa a estar Envenenado.", it: "Il Pokémon attivo del tuo avversario viene avvelenato.", pt: "O Pokémon Ativo do seu oponente agora está Envenenado.", de: "Das Aktive Pokémon deines Gegners ist jetzt vergiftet." }, damage: 30, }, ], weaknesses: [ { type: "Fighting", value: "×2" }, ], retreat: 2, regulationMark: "D", variants: { normal: true, reverse: true, holo: false, firstEdition: false }, stage: "Stage1", description: { en: "It spews threads from its mouth to catch its prey. When night falls, it leaves its web to go hunt aggressively." } } export default card
<reponame>GithubActionTest/rssbot use std::sync::Arc; use either::Either; use pinyin::{Pinyin, ToPinyin}; use tbot::{contexts::Command, types::parameters}; use tokio::sync::Mutex; use crate::data::Database; use crate::messages::{format_large_msg, Escape}; use super::{check_channel_permission, update_response, MsgTarget}; pub async fn rss( db: Arc<Mutex<Database>>, cmd: Arc<Command>, ) -> Result<(), tbot::errors::MethodCall> { let chat_id = cmd.chat.id; let channel = &cmd.text.value; let mut target_id = chat_id; let target = &mut MsgTarget::new(chat_id, cmd.message_id); if !channel.is_empty() { let channel_id = check_channel_permission(&cmd, channel, target).await?; if channel_id.is_none() { return Ok(()); } target_id = channel_id.unwrap(); } let feeds = db.lock().await.subscribed_feeds(target_id.0); let mut msgs = if let Some(mut feeds) = feeds { feeds.sort_by_cached_key(|feed| { feed.title .chars() .map(|c| { c.to_pinyin() .map(Pinyin::plain) .map(Either::Right) .unwrap_or_else(|| Either::Left(c)) }) .collect::<Vec<Either<char, &str>>>() }); format_large_msg(tr!("subscription_list").to_string(), &feeds, |feed| { format!( "<a href=\"{}\">{}</a>", Escape(&feed.link), Escape(&feed.title) ) }) } else { vec![tr!("subscription_list_empty").to_string()] }; let first_msg = msgs.remove(0); update_response(&cmd.bot, target, parameters::Text::with_html(&first_msg)).await?; let mut prev_msg = target.message_id; for msg in msgs { let text = parameters::Text::with_html(&msg); let msg = cmd .bot .send_message(chat_id, text) .in_reply_to(prev_msg) .is_web_page_preview_disabled(true) .call() .await?; prev_msg = msg.id; } Ok(()) }
# File : espnet.py # Author : Zhengkun Tian # Email : [email protected] import json import torch import logging import kaldiio as kio from otrans.data import load_vocab, UNK_TOKEN from otrans.data.augment import spec_augment from torch.utils.data import Dataset class ESPNetDataset(Dataset): def __init__(self, params, datadict, is_eval=False): self.params = params self.datadict = datadict self.is_eval = is_eval self.apply_spec_augment = True if params['spec_augment'] and not self.is_eval else False if self.apply_spec_augment: logging.info('Apply SpecAugment!') self.spec_augment_config = params['spec_augment_config'] logging.info('Config: %s' % ' '.join([key+':'+str(value) for key, value in self.spec_augment_config.items()])) self.unit2idx = load_vocab(params['vocab']) with open(self.datadict['json'], 'r') as f: self.utts = [(k, v) for k, v in json.load(f)['utts'].items()] def __getitem__(self, index): utt_id, infos = self.utts[index] path = infos['input'][0]['feat'] feature = kio.load_mat(path) feature_length = feature.shape[0] feature = torch.FloatTensor(feature) if self.apply_spec_augment: feature = spec_augment(feature, **self.spec_augment_config) targets = infos['output'][0]['token'] targets = self.encode(targets) targets_length = len(targets) return utt_id, feature, feature_length, targets, targets_length def __len__(self): return len(self.utts) def index_length_pair(self): length_list = [] for index in range(len(self)): _, infos = self.utts[index] length = int(infos['input'][0]['shape'][0]) length_list.append((index, length)) return length_list def encode(self, seq): ids = [] for s in seq.split(): ids.append(self.unit2idx[s] if s in self.unit2idx else self.unit2idx[UNK_TOKEN]) return ids @property def idx2unit(self): return {i: c for (c, i) in self.unit2idx.items()}
Most guns used in crimes in New York state between 2010 and 2015 came from other places. That is the biggest takeaway from a report released recently by Eric Schneiderman, New York’s attorney general. “Target on Trafficking: Analysis of New York Gun Crimes” found that 74 percent of guns used in a crime recovered in New York came from outside this state. Of these, 70 percent originated “in just six states with weak gun laws — the states along I-95 that make up the Iron Pipeline,” says an online presentation on the report. Mr. Schneiderman uses those numbers to argue that the federal government and other states should enact tougher gun laws. Doing so, he reasons, would keep guns out of the hands of criminals committing crimes in our state. “Critics of gun regulations often say that criminals don’t obey the law, so why bother?” the report’s website says. “The data refutes that argument. It shows that New York’s laws requiring universal background checks and permits for handguns are working to keep criminals from purchasing these weapons within the State.” However, the report’s data shows that the percentage of crime guns from out of state remained remarkably even before and after New York’s SAFE Act was passed in 2013. From 2010 to 2015, it only fluctuated between 73 and 75 percent. The number of recovered crime guns went down, along with the violent crime rate throughout the Northeast, but the out-of-state percentage stayed the same. Obviously, it’s desirable and worth some degree of government action to keep guns out of the hands of those who would do harm. New York’s SAFE Act was supposed to do that, but by Mr. Schneiderman’s measure, it hasn’t worked. If the out-of-state gun percentage had spiked since the new restrictions took effect, that might indicate criminals were being foiled here and turning more often to other states, but that doesn’t seem to have happened. Perhaps New York lawmakers didn’t realize the length to which those who want to do harm will go to do so, or maybe the SAFE Act was always more symbolic than pragmatic. Three years after its passage, the gun-related freedoms of people who have done nothing wrong have been restricted while people who have no business owning a gun find ways to get one. Mr. Schneiderman is right that a piecemeal, state-by-state approach to gun laws is doomed to fail. Any changes should be federal in nature. But we caution our federal representatives about using Mr. Schneiderman’s report to justify further infringements on legal gun owners without being sure new laws will actually keep guns out of the hands of criminals.
The researchers deploy concrete slabs to support the heated settlement panels. Credit: Gail Ashton Credit: Sabrina Heiser Featured image courtesy of Gail Ashton. In the next 50 years, ocean temperatures are expected to increase by 1°C. To get a sense of how rising temperatures will impact marine communities, researchers lowered heated plates into the Antarctic ocean. These plates warmed some patches of seabed by 1°C and others by 2°C, which is the projected temperature increase over the next 100 years. After nine months of slightly warmer temperatures, the balance of species living in the areas was significantly disrupted. A type of marine worm grew on average 70 percent larger than normal under the warmer conditions. One species of bryozoan, a tiny colonial organism, flourished as well, ultimately crowding out other species. Gail Ashton , the researcher behind the study , tells us more about it and what it can tell us about the impact of warming oceans more broadly.We want to better understand the implications of projected ocean warming on marine communities. We were excited to be able to warm the sea floor in situ and observe a whole community response.One degree is projected as a likely scenario for ocean warming in 50 years, and therefore keeps the experiment within the realms of reality and ecological relevance. It also represents a small but significant jump for the animals. You can imagine that if we warmed by four degrees, just the shock of that immediate change may have affected them negatively, and we would not have known whether it was the shock or the absolute temperature that was driving responses.Essentially, we placed heating elements within a plastic block on the seafloor. Imagine taking your electric stove to the bottom of the ocean and setting the temperature to +1. Then imagine the logistics and infrastructure that might be associated with that whilst working in Antarctica! Both the surface of the panels and a layer of water about 2mm thick from the panels is reliably warmed to the set temperature.Nine months. We wanted to run the experiments for longer, but icebergs threatened to destroy the panels and associated results, so we decided to cut the experiment there.We observed increased growth coverage by organisms in the 1°C and 2°C treatments. Results were most clear on the 1°C panels, and more variable in the 2°C ones.Me, no. I was expecting to have to interrogate the results statistically to tease out any significant changes. I was not expecting such a visible difference. My co-authors who have spent longer working at polar latitudes did expect these differences, but again, not to the same degree.It's difficult to extrapolate with confidence beyond the experimental setting. But, I would expect increased growth by organisms living on the seafloor. I would expect the composition of that community to change, with some winners and some losers. I expect these impacts to ripple up the food chain with changes in grazers, scavengers, and predators.More broadly, the results add to increasing evidence of ecosystem level change with a one-degree temperature increase. This research points to more extensive change at the poles than we would have predicted.The temperature increase is consistently projected for a 50-year timescale. So we may see these rates of increase on that same 50-year horizon. The reality is that we are already seeing increases in growth associated with ocean warming. Who knows what other natural or anthropogenic drivers may also come into play in that timescale.If you step back even further, seafloor communities are a vital part of ocean ecosystems, holding the bulk of biodiversity in the oceans and providing important functions like nutrient cycling, carbon capture, and habitat provision. If you value the functions that the ocean provides – whether that's your food, recreational opportunities, biodiversity, resilience to change, marine products – then it all affects you.
async def _status(self): while not self._response_status.empty(): self._response_status.get_nowait() retries = 3 response_status = ResponseStatus.FAILURE while retries: await self._get_status_command.async_send() try: response_status = await asyncio.wait_for( self._response_status.get(), TIMEOUT ) return response_status except asyncio.TimeoutError: _LOGGER.error("Exception _status timed out, retries: %d", retries) retries -= 1 return response_status
def vis_layer(model, layer, channel): num_channels = dla_lucid.LAYERS[layer][1] all_vis = [] for i in range(num_channels): if channel is True: vis = vis_channel(model, layer, i) else: vis = vis_neuron(model, layer, i) all_vis.append(vis) all_vis_array = np.array(all_vis) return all_vis_array