content
stringlengths
10
4.9M
TFW @qmagazineuk decide a person would CHOOSE to suffer through years of vertigo, nausea, occular migranes and seizures from Meineres Disease. RockandRoll is a lifestyle when you have lived your entire life making it. It's a weird business where songs and content can be misconstrued. Growing up in the public eye as an alcoholic was horrible. I survived it. I survived and have done my best to better myself for being ignorant and foolish but I am openly candid that I don't believe my songcraft should begin with a motive to aquire money or quest for fame. After 10 years of trying to grow into a life away from madness, from the chains of addiction, from chronic Menieres it's bizarre to be interviewed by some man who is polite to my face but then chooses to BLAME me for my disease. To ignore any of the amazing things I have been able to achieve. I have played carneige hall 3 times, I have toured the world over fighting severe compression in my ears and played 2 hour shows to thousands of people no matter if I was sick of not. I didn't cancel one show. I fought for myself. I'm 42 and when I am not playing guitar I am a person with friends and animals and a life. This article is horrible. What a horrible person who would have the balls to be such an asshole. I play shows when I am sick for anyone who has Menieres but can't get up. I think about people suffering from alcoholism and who choose to rise up from that. I work hard to be better. My struggle with Menieres is NOT my choice. It's a disease. My father has it. I talk to people who suffer and reach out to be helpful. I get up and I work. My supposed time away from music is characterized as some farce. I was sick for years. I was sober and fought hard to find treatment for constant vertigo and nausea and pain. Your journalist didn't fact check much. I dont live in NYC. Nor did I ever in the basement of @electricladystudios I was friends with Meg White who is an awesome person. You WANT me to be something more dramatic than I am. For instance... I was never onstage with @taylorswift - you doctored a photo to make it seem like I was... I wonder how she will feel about that. A photo posted by Ryan Adams (@misterryanadams) on Jan 17, 2017 at 3:28am PST
import { Component, OnInit } from "@angular/core"; import { FormBuilder, FormGroup, Validators } from "@angular/forms"; import { QuotesModel } from "app/core/models/quotes.model"; import { AdminLayoutComponent } from "app/layouts/admin-layout/admin-layout.component"; import { MatTableDataSource } from "@angular/material/table"; import { QuotesService } from "app/core/services/quotes.service"; import { ClientService } from "app/core/services/client.service"; import { MatDialog } from "@angular/material/dialog"; import { ModalDeleteUserComponent } from "app/partials/modal-delete-user/modal-delete-user.component"; import { ModalEditQuoteComponent } from "app/partials/modal-edit-quote/modal-edit-quote.component"; import { ModalAttentQuoteComponent } from "app/partials/modal-attent-quote/modal-attent-quote.component"; import { AttentionsService } from "app/core/services/attentions.service"; import { MatSnackBar } from "@angular/material/snack-bar"; import { ClientsModel } from "app/core/models/client.model"; import { debounceTime } from "rxjs-compat/operator/debounceTime"; import { TimeLocalService } from "app/core/services/time.service"; @Component({ selector: "app-user-profile", templateUrl: "./user-profile.component.html", styleUrls: ["./user-profile.component.css"], }) export class UserProfileComponent implements OnInit { public formSearch: FormGroup; public formScheduleQuote: FormGroup; public quotes = []; public clients: any; public clientsIds: any[]; public formScheduleQuoteErrors: QuotesModel = new QuotesModel(); public filteredOptions: any; public dateNow: Date; displayedColumns: string[] = [ "_id", "date_quote", "hour_quote", "state", "id_client", "edit", "delete", "atent", ]; public dataSource; public selectedDate: Date | null = null; public selectedHour: String | null = null; public hourAvaibles: string[] = [ "07:00", "07:30", "08:00", "08:30", "09:00", "09:30", "10:00", "10:30", "11:00", "11:30", "12:00", "12:30", "13:00", "13:30", "14:00", "14:30", "15:00", "15:30", "16:00", "16:30", "17:00", ]; constructor( public father: AdminLayoutComponent, private formBuilder: FormBuilder, private quotesService: QuotesService, private clientService: ClientService, private timeLocalService: TimeLocalService, private attentionsServices: AttentionsService, public dialog: MatDialog, private _snackBar: MatSnackBar ) {} ngOnInit() { this.GetQuotes(); this.GetAllClients(); this.GetDateNow(); this.CreateForm(); } public applyFilter(event: Event) { const filterValue = (event.target as HTMLInputElement).value; this.dataSource.filter = filterValue.trim().toLowerCase(); } public CreateForm() { this.formSearch = this.formBuilder.group({ number_id: [ { value: null, disabled: false }, [Validators.required, this.father.ValidIDFormControl], ], }); this.formScheduleQuote = this.formBuilder.group({ date_quote: [{ value: null, disabled: false }, [Validators.required]], hour_quote: [{ value: null, disabled: false }, [Validators.required]], state: [{ value: "P", disabled: false }, [Validators.required]], id_client: [ { value: null, disabled: false }, [Validators.required, this.father.ValidIDFormControl], ], }); this.formScheduleQuote.controls["id_client"].valueChanges.subscribe( (response: string) => { if (response && response.length) { this.filterData(response); } else { this.filteredOptions = []; } } ); } public filterData(enteredData) { this.filteredOptions = this.clients.filter((item) => { return ( item.number_id.toLowerCase().indexOf(enteredData.toLowerCase()) > -1 ); }); } public GetDateNow() { this.timeLocalService.getDateNow().subscribe((data) => { console.log(data); // this.dateNow = new Date(data.data); }); } public GetAllClients() { this.clientService.getClients().subscribe((data) => { this.clients = data.data; }); } public SelectedDate(date: any) { this.selectedDate = date; this.formScheduleQuote.controls["date_quote"].setValue(this.selectedDate); } public SelectHour(hour: any) { this.selectedHour = hour; this.formScheduleQuote.controls["hour_quote"].setValue(this.selectedHour); } public GetQuotes() { this.quotesService.getQuotes().subscribe((data) => { this.quotes = data.data; this.dataSource = new MatTableDataSource(this.quotes); }); } public EditQuotes(element: QuotesModel) { const dialogRef3 = this.dialog.open(ModalEditQuoteComponent, { disableClose: true, maxWidth: "600px", data: element, }); dialogRef3.afterClosed().subscribe((result) => { if (result !== false) { this.quotesService.updateQuotes(result).subscribe((data) => { if (data) { this.openSnackBar(data.mensaje, "Continuar"); } if (data.transaccion) { this.GetQuotes(); } }); } }); } public openSnackBar(message: string, action: string) { this._snackBar.open(message, action, { duration: 5000, }); } public DeleteQuotes(element: QuotesModel) { this.quotesService.deleteQuotes(element).subscribe((data) => { if (data) { this.openSnackBar(data.mensaje, "Continuar"); } if (data.transaccion) { this.GetQuotes(); } }); } public CreateQuotes() { if (this.formScheduleQuote.valid) { this.quotesService .createQuotes(this.formScheduleQuote.value) .subscribe((data) => { if (data) { this.openSnackBar(data.mensaje, "Continuar"); } if (data.transaccion) { this.GetQuotes(); } }); } else { this.formScheduleQuote.markAllAsTouched(); } } public CreateAttention(element) { const dialogRef = this.dialog.open(ModalAttentQuoteComponent, { disableClose: true, maxWidth: "600px", data: element, }); dialogRef.afterClosed().subscribe((result) => { if (result !== false) { this.attentionsServices.createAttentions(result).subscribe((data) => { if (data.transaccion) { this.quotesService .updateStateQuotes({ id_quote: result.id_quote }) .subscribe((data1) => { if (data1) { this.openSnackBar(data1.mensaje, "Continuar"); } if (data1.transaccion) { this.GetQuotes(); } }); } }); } }); } }
// buildTrafficShiftingStatus looks at the current state of a cluster regarding // the progression of traffic shifting. It's concerned with how many of the // available pods have been labeled to receive traffic, how many are actually // ready according to the state of the Endpoints object, and the currently // achieved weight for a release. If the current state is different from the // desired one, it also returns which pods need to receive which labels to move // forward. func buildTrafficShiftingStatus( cluster, appName, releaseName string, clusterReleaseWeights clusterReleaseWeights, endpoints *corev1.Endpoints, appPods []*corev1.Pod, ) trafficShiftingStatus { releaseTargetWeights, ok := clusterReleaseWeights[cluster] if !ok { return trafficShiftingStatus{} } releaseSelector := labels.Set(map[string]string{ shipper.AppLabel: appName, shipper.ReleaseLabel: releaseName, }).AsSelector() podsByTrafficStatus, podsInRelease, podsReady, podsNotReady := summarizePods( appPods, endpoints, releaseSelector) releaseTargetWeight := releaseTargetWeights[releaseName] totalTargetWeight := uint32(0) for _, weight := range releaseTargetWeights { totalTargetWeight += weight } podsInApp := len(appPods) podsLabeledForTraffic := len(podsByTrafficStatus[shipper.Enabled]) podsToLabel := calculateReleasePodTarget( podsInRelease, releaseTargetWeight, podsInApp, totalTargetWeight) ready := podsReady == podsToLabel var podsToShift map[string][]*corev1.Pod if !ready { podsToShift = buildPodsToShift(podsByTrafficStatus, podsToLabel) } var achievedPercentage float64 if podsInApp == 0 { achievedPercentage = 0 } else { achievedPercentage = float64(podsReady) / float64(podsInApp) } achievedWeight := uint32(math.Round(achievedPercentage * float64(totalTargetWeight))) return trafficShiftingStatus{ achievedTrafficWeight: achievedWeight, podsReady: podsReady, podsNotReady: podsNotReady, podsLabeled: podsLabeledForTraffic, ready: ready, podsToShift: podsToShift, } }
import maya.utils def execute_deferred(func): """ decorator that allows to execute the passed function deferred Args: func: function callable Returns: result """ def inner(*args, **kwargs): result = maya.utils.executeDeferred(func, *args, **kwargs) return result return inner
def __check_for_resolution(self): if not all(r.res == self.items[0].res for r in self.items[1:]): raise ValueError("Cannot stack rasters with different spacial resolution")
<reponame>gurumobile/RemoteControlGalileoExam // Created by <NAME> on 22/12/2011. // Copyright (c) 2011 Swift Navigation. All rights reserved. // // Implements the NetworkControllerDelegate using a GKSession. Also delegates handling of incoming packets. #import <UIKit/UIKit.h> #import "GKSessionManager.h" #import "GalileoCommon.h" @interface GKNetController : NSObject <SessionManagerGameDelegate, NetworkControllerDelegate> { __weak GKSessionManager *manager; // Vars for ping/pong UInt16 pingCounter; NSMutableDictionary *pingTable; } - (id) initWithManager: (GKSessionManager *) aManager; // Connection start/end code is delegated @property (nonatomic, weak) id <ConnectionStateResponderDelegate> connectionStateResponder; @end
/** * Repository class for ACR model. * * @author Dominik Frantisek Bucik <[email protected]> */ @Repository @Transactional(value = "defaultTransactionManager") public class PerunDeviceCodeAcrRepository { @PersistenceContext(unitName = "defaultPersistenceUnit") private EntityManager manager; public DeviceCodeAcr getActiveByDeviceCode(String deviceCode) { TypedQuery<DeviceCodeAcr> query = manager.createNamedQuery(DeviceCodeAcr.GET_ACTIVE_BY_DEVICE_CODE, DeviceCodeAcr.class); query.setParameter(DeviceCodeAcr.PARAM_DEVICE_CODE, deviceCode); query.setParameter(Acr.PARAM_EXPIRES_AT, now()); try { return query.getSingleResult(); } catch (NoResultException e) { return null; } } public DeviceCodeAcr getByUserCode(String userCode) { TypedQuery<DeviceCodeAcr> query = manager.createNamedQuery(DeviceCodeAcr.GET_BY_USER_CODE, DeviceCodeAcr.class); query.setParameter(DeviceCodeAcr.PARAM_USER_CODE, userCode); try { return query.getSingleResult(); } catch (NoResultException e) { return null; } } public DeviceCodeAcr getById(Long id) { TypedQuery<DeviceCodeAcr> query = manager.createNamedQuery(DeviceCodeAcr.GET_BY_ID, DeviceCodeAcr.class); query.setParameter(DeviceCodeAcr.PARAM_ID, id); query.setParameter(DeviceCodeAcr.PARAM_EXPIRES_AT, now()); try { return query.getSingleResult(); } catch (NoResultException e) { return null; } } @Transactional public DeviceCodeAcr store(DeviceCodeAcr acr) { try { return getActiveByDeviceCode(acr.getDeviceCode()); } catch (NoResultException e) { DeviceCodeAcr tmp = manager.merge(acr); manager.flush(); return tmp; } } @Transactional public void remove(Long id) { DeviceCodeAcr acr = getById(id); if (acr != null) { manager.remove(acr); } } @Transactional public void deleteExpired() { Query query = manager.createNamedQuery(DeviceCodeAcr.DELETE_EXPIRED); query.setParameter(DeviceCodeAcr.PARAM_EXPIRES_AT, now()); query.executeUpdate(); } private long now() { return Instant.now().toEpochMilli(); } }
/* * Copyright 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scheduler import ( "io" "sync" "github.com/mhelmich/calvin/pb" "github.com/mhelmich/calvin/ulid" log "github.com/sirupsen/logrus" "google.golang.org/grpc" ) type Scheduler struct { sequencerChan <-chan *pb.TransactionBatch readyTxnsChan chan<- *pb.Transaction doneTxnChan <-chan *pb.Transaction lockMgr *lockManager lowIsolationReads *sync.Map logger *log.Entry } func NewScheduler(sequencerChan chan *pb.TransactionBatch, readyTxnsChan chan<- *pb.Transaction, doneTxnChan <-chan *pb.Transaction, srvr *grpc.Server, logger *log.Entry) *Scheduler { lowIsolationReads := &sync.Map{} s := &Scheduler{ sequencerChan: sequencerChan, readyTxnsChan: readyTxnsChan, doneTxnChan: doneTxnChan, lockMgr: newLockManager(), lowIsolationReads: lowIsolationReads, logger: logger, } ss := newServer(sequencerChan, lowIsolationReads, logger) pb.RegisterLowIsolationReadServer(srvr, ss) go s.runLocker() go s.runReleaser() return s } func (s *Scheduler) runLocker() { for { batch, ok := <-s.sequencerChan if !ok { c := s.readyTxnsChan s.readyTxnsChan = nil close(c) s.logger.Warningf("Stopping lock locker") return } else if batch == nil { s.logger.Warningf("Received nil txn batch") } for idx := range batch.Transactions { txn := batch.Transactions[idx] if log.GetLevel() == log.DebugLevel { id, _ := ulid.ParseIdFromProto(txn.Id) s.logger.Debugf("getting locks for txn [%s]", id.String()) } numLocksNotAcquired := s.lockMgr.lock(txn) if numLocksNotAcquired == 0 { if log.GetLevel() == log.DebugLevel { id, _ := ulid.ParseIdFromProto(txn.Id) s.logger.Debugf("txn [%s] became ready\n", id.String()) } s.readyTxnsChan <- txn } } } } func (s *Scheduler) runReleaser() { for { txn, ok := <-s.doneTxnChan if !ok { s.logger.Warningf("Stopping lock releaser") return } if log.GetLevel() == log.DebugLevel { id, _ := ulid.ParseIdFromProto(txn.Id) s.logger.Debugf("txn [%s] became done\n", id.String()) } // in addition to the regular stuff, low iso reads need // the response out of the txn object to be sent on the response channel if txn.IsLowIsolationRead { id, _ := ulid.ParseIdFromProto(txn.Id) txnID := id.String() v, ok := s.lowIsolationReads.Load(txnID) if !ok { s.logger.Panicf("can't find low isolation read channel for txn [%s]", txnID) } c := v.(chan *pb.LowIsolationReadResponse) c <- txn.LowIsolationReadResponse close(c) s.lowIsolationReads.Delete(txnID) } newOwners := s.lockMgr.release(txn) for idx := range newOwners { if log.GetLevel() == log.DebugLevel { id, _ := ulid.ParseIdFromProto(newOwners[idx].Id) s.logger.Debugf("txn [%s] became ready\n", id.String()) } if s.readyTxnsChan != nil { s.readyTxnsChan <- newOwners[idx] } } } } func (s *Scheduler) LockChainToASCII(out io.Writer) { s.lockMgr.lockChainToASCII(out) }
<gh_stars>1-10 #include "ExtDisplay.h" #include <stdlib.h> #include <string.h> #include "src/gfxlatin2/gfxlatin2.h" void ExtDisplay::setBbFullWidth() { this->boundingBoxWidth = this->displayWidth - this->posX - 1; } void ExtDisplay::setBbRightMargin( int right ) { this->boundingBoxWidth = this->displayWidth - this->posX - 1 - right; if( this->boundingBoxWidth < 10 ) { this->boundingBoxWidth = 10; } } void ExtDisplay::setPos( int x, int y ) { this->posX = x; this->posY = y; this->setBbFullWidth(); } void ExtDisplay::init( GxEPD2_GFX* display, raLogger * logger ) { this->display = display; this->displayWidth = display->width(); this->displayHeight = display->height(); this->logger = logger; } #define BUFFER_SIZE 512 #define MAX_WORD_SIZE 20 char * curPos; char oneWord[MAX_WORD_SIZE+2]; char delimiter; void initParser( char * text ) { curPos = text; delimiter = 0; } boolean getNextWord() { int outPos = 0; while(true) { char c = *curPos; if( c==0 ) { return outPos>0; } curPos++; if( c==' ' || c==',' || c=='.' ) { oneWord[outPos] = c; oneWord[++outPos] = 0; delimiter = c; break; } oneWord[outPos] = c; oneWord[++outPos] = 0; if( outPos==MAX_WORD_SIZE) { oneWord[outPos] = '-'; oneWord[++outPos] = 0; delimiter = '-'; break; } } return true; } #define DUMP_DEBUG_INFO 0 /** * Vytiskne UTF-8 text na displej vcetne korektniho word-wrapu. * Tiskne se na nastavenou pozici X,Y, ktera je LEVY DOLNI roh prvniho pismene. * x_offset se pouzije jen pro prvni radek. * * Tisk je omezeny nastavenym bounding boxem. * * Vraci aktualni pozici X. * necha posX = puvodni posX; nastavi posY = posledni radek s textem */ int ExtDisplay::printUTF8( const char * text, int x_offset ) { char buffer[BUFFER_SIZE]; strncpy( buffer, text, BUFFER_SIZE ); buffer[BUFFER_SIZE-1] = 0; utf8tocp( buffer ); if( DUMP_DEBUG_INFO ) this->logger->log( "[%s]", buffer ); if( DUMP_DEBUG_INFO ) this->logger->log( " pos=%d,%d bbW=%d xo=%d", this->posX, this->posY, this->boundingBoxWidth, x_offset ); int x,y; x = this->posX + x_offset; y = this->posY; initParser( buffer ); while( true ) { if( ! getNextWord() ) break; if( DUMP_DEBUG_INFO ) this->logger->log( " # '%s'", oneWord ); int16_t x1, y1; uint16_t w, h; this->display->getTextBounds( (const char*)oneWord, x, y, &x1, &y1, &w, &h ); if( DUMP_DEBUG_INFO ) this->logger->log( " max pos %d,%d size %d,%d", x1,y1, w,h ); if( ( x + w ) <= ( this->posX + this->boundingBoxWidth ) ) { this->display->setCursor( x, y ); this->display->print( oneWord ); x += w; if( DUMP_DEBUG_INFO ) this->logger->log( " pokracuju, nova souradnice %d,%d", x,y ); } else { if( DUMP_DEBUG_INFO ) this->logger->log( " @ %d,%d, w=%d", x,y, w ); x = this->posX; y += this->vyskaRadku; // musime znovu, protoze ve vyjimecnem pripade to zalomi text samo a vrati to w treba 392 bodu this->display->getTextBounds( (const char*)oneWord, x, y, &x1, &y1, &w, &h ); if( DUMP_DEBUG_INFO ) this->logger->log( " @ %d,%d, w=%d", x,y, w ); this->display->setCursor( x, y ); this->display->print( oneWord ); x += w; if( DUMP_DEBUG_INFO ) this->logger->log( " novy radek, nova souradnice %d,%d", x,y ); } if( delimiter==' ' ) { x += this->sirkaMezery; } } this->posY = y; return x - this->posX; } void ExtDisplay::setFont( const GFXfont * font ) { this->display->setFont( font ); this->vyskaRadku = font->yAdvance; // predpoklada, ze mezera je prvni znak this->sirkaMezery = font->glyph[0].xAdvance; //D/ this->logger->log( "setFont: mezera %d, radek %d", this->sirkaMezery, this->vyskaRadku ); }
Low power high speed area efficient Error Tolerant Adder using gate diffusion input method In digital VLSI circuits, perfectly accurate outputs are not always needed. So designers have started to design error tolerance circuits which provide good enough output for computation. On the basis of this fact, error tolerant adder (ETA) is designed which provides a way to achieve good power and speed performance. In this paper, an emerging logic style of circuit design, gate diffusion input (GDI) technique is adopted to design a 32-bit ETA. The proposed design reduces area in terms of area the transistor count to a great extent as well as improves the delay and power performance. Simulation results have shown that proposed design achieves 38% improvement in the Power-Delay-Product when compared to the existing design.
// CheckDirExists checked first is a directory exists func CheckDirExists(dir string) error { if _, err := client.ReadDir(dir); err != nil { if err := client.MkdirAll(dir, 0644); err != nil { return err } } return nil }
/** * * @param obj The pickUp object that can boost a unit * @param row The row where has the unit * @param col The col where has the unit */ @Override public void applyAction(Object obj, int row, int col) { Assert.assertNotNull(obj); Assert.assertTrue(obj instanceof PickupObjectDef); GameObject target = myController.getObject(row, col); if (target instanceof Unit) { Unit unit = (Unit)target; PickupObjectDef pick = (PickupObjectDef)obj; myController.displayAnimation(Constant.healAnimationTag, unit.x, unit.y); unit.boostStats(pick.buffs); } }
. . “Get undressed to the nines” is how World Naked Bike Ride is advertising the bare-bottomed bicycle bonanza. The WNBR is a world-wide event that will “celebrate free-body culture, bicycling as an alternative to cars and a generally greener way of living.” There was a ride in March of this year, and there will be another on July 28 of you miss tomorrow’s ride. Date: Saturday, 9 June 2012. Time: 11 AM. Location: North end of Justin Herman Plaza (JHP) near Market St. and the Embarcadero, across from the Ferry Building. Meet on the north or east side of the large fountain next to the large grassy area. Schedule 11 AM : Gather. Same earlier time as last year to take advantage of the midday warmth during the ride. Undress. Apply sunscreen. Group consensus on route details. Bike checkout & attaching signs / body painting & slogans. For June plan to paint your body at home or bring supplies. (Lip stick works in a pinch as long as you like shades of red.) : Gather. Same earlier time as last year to take advantage of the midday warmth during the ride. Undress. Apply sunscreen. Group consensus on route details. Bike checkout & attaching signs / body painting & slogans. For June plan to paint your body at home or bring supplies. (Lip stick works in a pinch as long as you like shades of red.) Noon : Ride starts (approx). Depending on the mood of the group and the amount of media coverage we are getting the ride may start a bit after noon. : Ride starts (approx). Depending on the mood of the group and the amount of media coverage we are getting the ride may start a bit after noon. 4 pm (approx): Main ride finishes. (approx): Main ride finishes. Skaters are welcome! More info here. The weather forecast is “sunny, highs in the 50s to upper 60s. West winds 5 to 15 mph.” Could be worse. In San Francisco nudity is not illegal, but if you do want to sit down somewhere, be sure to bring a towel to sit on.
/** * <p><b>Purpose</b>: Build a DOM from SAX events.</p> */ public class SAXDocumentBuilder implements ExtendedContentHandler, LexicalHandler { protected Document document; protected List<Node> nodes; protected XMLPlatform xmlPlatform; protected Map namespaceDeclarations; protected StrBuffer stringBuffer; protected Locator locator; public SAXDocumentBuilder() { super(); nodes = new ArrayList<Node>(); xmlPlatform = XMLPlatformFactory.getInstance().getXMLPlatform(); stringBuffer = new StrBuffer(); namespaceDeclarations = new HashMap(); } public Document getDocument() { return document; } public Document getInitializedDocument() throws SAXException { if (document == null) { try { document = xmlPlatform.createDocument(); nodes.add(document); } catch (Exception e) { throw new SAXException(e); } } return document; } @Override public void setDocumentLocator(Locator locator) { this.locator = locator; } @Override public void startDocument() throws SAXException { try { document = xmlPlatform.createDocument(); nodes.add(document); } catch (Exception e) { throw new SAXException(e); } } @Override public void endDocument() throws SAXException { nodes.remove(nodes.size() - 1); } @Override public void startPrefixMapping(String prefix, String uri) throws SAXException { if(null == prefix) { prefix = Constants.EMPTY_STRING; } if(null == uri) { uri = Constants.EMPTY_STRING; } if (namespaceDeclarations == null) { namespaceDeclarations = new HashMap(); } namespaceDeclarations.put(prefix, uri); } @Override public void endPrefixMapping(String prefix) throws SAXException { } @Override public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { if (null != namespaceURI && namespaceURI.length() == 0) { namespaceURI = null; } Element element = getInitializedDocument().createElementNS(namespaceURI, qName); Node parentNode = nodes.get(nodes.size()-1); if ((stringBuffer.length() > 0) && !(nodes.size() == 1)) { Text text = getInitializedDocument().createTextNode(stringBuffer.toString()); parentNode.appendChild(text); stringBuffer.reset(); } appendChildNode(parentNode, element); nodes.add(element); if (namespaceDeclarations != null) { Iterator<Entry<String, String>> namespaceEntries = namespaceDeclarations.entrySet().iterator(); String prefix; String uri; while (namespaceEntries.hasNext()) { Entry<String, String> nextEntry = namespaceEntries.next(); prefix = nextEntry.getKey(); uri = nextEntry.getValue(); boolean prefixEmpty = prefix.length() == 0; String elemNamespaceURI = element.getNamespaceURI(); boolean elementNamespaceNull = elemNamespaceURI == null; boolean elementNamespaceEmpty = elemNamespaceURI != null && elemNamespaceURI.length() == 0; boolean isRootElement = element.getParentNode().getNodeType() == Node.DOCUMENT_NODE; if (prefixEmpty && isRootElement && (elementNamespaceEmpty || elementNamespaceNull)) { // Don't add namespace } else { addNamespaceDeclaration(element, prefix, uri); } } namespaceDeclarations = null; } int numberOfAttributes = atts.getLength(); String attributeNamespaceURI; for (int x = 0; x < numberOfAttributes; x++) { attributeNamespaceURI = atts.getURI(x); if (null != attributeNamespaceURI && attributeNamespaceURI.length() == 0) { attributeNamespaceURI = null; } // Handle case where prefix/uri are not set on an xmlns prefixed attribute if (attributeNamespaceURI == null && (atts.getQName(x).startsWith(javax.xml.XMLConstants.XMLNS_ATTRIBUTE + ":") || atts.getQName(x).equals(javax.xml.XMLConstants.XMLNS_ATTRIBUTE))) { attributeNamespaceURI = javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI; } String value = atts.getValue(x); element.setAttributeNS(attributeNamespaceURI, atts.getQName(x), value); } } @Override public void endElement(String namespaceURI, String localName, String qName) throws SAXException { Element endedElement = (Element)nodes.remove(nodes.size()-1); if (stringBuffer.length() > 0) { Text text = getInitializedDocument().createTextNode(stringBuffer.toString()); endedElement.appendChild(text); stringBuffer.reset(); } } @Override public void characters(char[] ch, int start, int length) throws SAXException { stringBuffer.append(ch, start, length); } @Override public void characters(CharSequence characters) { stringBuffer.append(characters.toString()); } @Override public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { } @Override public void processingInstruction(String target, String data) throws SAXException { ProcessingInstruction pi = getInitializedDocument().createProcessingInstruction(target, data); Node parentNode = nodes.get(nodes.size() -1); parentNode.appendChild(pi); } @Override public void skippedEntity(String name) throws SAXException { } protected void addNamespaceDeclaration(Element parentElement, String prefix, String uri) { if (prefix.length() == 0 || javax.xml.XMLConstants.XMLNS_ATTRIBUTE.equals(prefix)) { //handle default/target namespaces parentElement.setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE, uri); } else { parentElement.setAttributeNS(javax.xml.XMLConstants.XMLNS_ATTRIBUTE_NS_URI, javax.xml.XMLConstants.XMLNS_ATTRIBUTE + Constants.COLON + prefix, uri); } } public void appendChildNode(Node parentNode, Node childNode) { parentNode.appendChild(childNode); } @Override public void setNil(boolean isNil) {} @Override public void startDTD(String name, String publicId, String systemId) throws SAXException {} @Override public void endDTD() throws SAXException {} @Override public void startEntity(String name) throws SAXException {} @Override public void endEntity(String name) throws SAXException {} @Override public void startCDATA() throws SAXException { CDATASection cdata = document.createCDATASection(null); Node parentNode = nodes.get(nodes.size() -1); parentNode.appendChild(cdata); } @Override public void endCDATA() throws SAXException { CDATASection cdata = (CDATASection)nodes.get(nodes.size()-1).getFirstChild(); if (stringBuffer.length() > 0) { cdata.setData(stringBuffer.toString()); stringBuffer.reset(); } } @Override public void comment(char[] ch, int start, int length) throws SAXException {} }
#include <cassert> #include <iostream> #include <vector> #include <algorithm> #include <chrono> #include <random> #include <sstream> #include <thread> #include <fstream> #include <iomanip> #include <iterator> #include <climits> #include <omp.h> #define FOR(i,a,b) for(int i=a;i<b;i++) #define rep(i,b) FOR(i,0,b) const int MaxThreadNum=224; const long long MaxDataSize=10000000000; const long long MaxDataNum=4294967295; const int MaxKisuu=256; std::vector<int> Dataset; long long Datasize; static const int kRadixBits = 8; static const size_t kInsertSortThreshold = 0; static const int kRadixMask = (1 << kRadixBits) - 1; static const int kRadixBin = 1 << kRadixBits; template<class D> inline int determineDigitBucket(int stage,D num){ return ((num>>(8*stage))&kRadixMask); } template< class _Type> inline void _swap(_Type &a, _Type&b) { _Type temp = b; b = a; a = temp; } void report_num_threads(int level) { #pragma omp single { printf("Level %d: number of threads in the team - %d\n", level, omp_get_num_threads()); } } template<class T> bool compare(const T &x,const T &y){ return x < y; } template <class RandomIt> inline void insert_sort_core_(RandomIt s, RandomIt e) { for (RandomIt i = s + 1; i < e; ++i) { if (compare(*i, *(i - 1))) { RandomIt j; auto tmp = *i; *i = *(i - 1); for (j = i - 1; j > s && compare(tmp, *(j - 1)); --j) { *j = *(j - 1); } *j = tmp; } } } template<int kth_byte,class RandomIt> inline void PARADIS_core(RandomIt s,RandomIt t,RandomIt begin_itr,int processes=1){ long long cnt[MaxKisuu]={0}; long long elenum=distance(s,t); long long start=distance(begin_itr,s); //assert(start>=0);assert(elenum>=0); //step1 //assert(processes>0); long long part=elenum/processes; long long res=elenum%processes; long long localHists[MaxThreadNum][MaxKisuu]; long long gh[MaxKisuu],gt[MaxKisuu],starts[MaxKisuu],ends[MaxKisuu]; long long ph[MaxThreadNum][MaxKisuu]; long long pt[MaxThreadNum][MaxKisuu]; long long SumCi=elenum; long long pfp[processes+1]; int var_p=processes; #pragma omp parallel num_threads(processes) { int th=omp_get_thread_num(); #pragma omp for rep(i,kRadixBin){ rep(t,processes)localHists[t][i]=0; } #pragma omp barrier #pragma omp for for(int i=start;i<start+elenum;i++){ int digit=determineDigitBucket(kth_byte,*(begin_itr+i)); localHists[th][digit]++; } #pragma omp barrier #pragma omp for for(int i=0;i<kRadixBin;i++){ for(int j=0;j<processes;j++){ cnt[i]+=localHists[j][i]; } } #pragma omp barrier #pragma omp single { gh[0]=start; gt[0]=gh[0]+cnt[0]; starts[0]=gh[0]; } //step2 #pragma omp single for(int i=1;i<kRadixBin;i++){ //calc ghi gh[i]=gh[i-1]+cnt[i-1]; //calc gti gt[i]=gh[i]+cnt[i]; starts[i]=gh[i]; } #pragma omp barrier //step3 while(SumCi!=0){ #pragma omp for for(int ii=0;ii<processes;ii++){ int pID=omp_get_thread_num(); for(int i=0;i<kRadixBin;i++){ long long part=(long long)(gt[i]-gh[i])/(long long)var_p; long long res=(long long)(gt[i]-gh[i])%(long long)(var_p); if(pID<var_p-1){ ph[pID][i]=part*pID+gh[i]; pt[pID][i]=part*(pID+1LL)+gh[i]; }else{ ph[pID][i]=part*pID+gh[i]; pt[pID][i]=part*(pID+1LL)+gh[i]+res; } } for(int i=0;i<kRadixBin;i++){ long long head=ph[pID][i]; while(head<pt[pID][i]){ auto v=*(begin_itr+head); int k=determineDigitBucket(kth_byte,v); while(k!=i&&ph[pID][k]<pt[pID][k]){ _swap(v,*(begin_itr+(int)ph[pID][k]));ph[pID][k]++; k=determineDigitBucket(kth_byte,v); } if(k==i){ *(begin_itr+head)=*(begin_itr+ph[pID][i]);head++; *(begin_itr+ph[pID][i])=v;ph[pID][i]++; }else{ *(begin_itr+head)=v;head++; } } } }//end of omp permute #pragma omp single { SumCi=0; long long pfpN=kRadixBin/var_p; long long pfpM=kRadixBin%var_p; pfp[0]=0LL; long long pfpMR=0LL; for(long long i=1LL;i<var_p+1LL;i++){ if(pfpMR<pfpM)pfpMR++; pfp[i]=i*pfpN+pfpMR; } } #pragma omp barrier #pragma omp for for(int k=0;k<processes;k++){ for(long long i=pfp[k];i<pfp[k+1];i++){ long long tail=gt[i]; { for(int pID=0;pID<processes;pID++){ long long head=ph[pID][i]; while(head<pt[pID][i]&&head<tail){ int v=*(begin_itr+head);head++; if(determineDigitBucket(kth_byte,v)!=i){ while(head<=tail){ tail--; int w=*(begin_itr+tail); if(determineDigitBucket(kth_byte,w)==i){ *(begin_itr+(head-1))=w; *(begin_itr+tail)=v; break; } } } } } } gh[i]=tail; } } #pragma omp barrier #pragma omp single { int prevSumCi=SumCi; SumCi-0; for(int i=0;i<kRadixBin;i++){ SumCi+=(gt[i]-gh[i]); } } #pragma omp barrier }//end of while }//end of omp2 if(kth_byte>0){ #pragma omp parallel num_threads(processes) #pragma omp single { for(int i=0;i<kRadixBin;i++){ int nextStageThreads=1; nextStageThreads=processes*(cnt[i]*(log(cnt[i])/log(kRadixBin))/(elenum*(log(elenum)/log(kRadixBin)))); if(cnt[i]>64LL){ #pragma omp task PARADIS_core<(kth_byte > 0 ? (kth_byte - 1) : 0)>(begin_itr+starts[i],begin_itr+(starts[i]+cnt[i]),begin_itr,std::max(nextStageThreads,1)); }else if(cnt[i]>1){ insert_sort_core_(begin_itr+starts[i],begin_itr+(starts[i]+cnt[i])); //std::sort(begin_itr+starts[i],begin_itr+(starts[i]+cnt[i])); } } #pragma omp taskwait } } } template<class RandomIt> inline void PARADIS(RandomIt s,RandomIt t,int threadNum){ const size_t vsize=sizeof(typename std::iterator_traits<RandomIt>::value_type); PARADIS_core<vsize-1>(s,t,s,threadNum); }
/* print first n numbers containing only 5 and 6 as digits */ void printNumbers(int n) { queue<string> q; q.push("5"); q.push("6"); for(int i=0;i<n;i++) { string curr=q.front(); cout<<curr<<endl; q.pop(); q.push(curr+"5"); q.push(curr+"6"); } }
//private static final Logger log = LogManager.getLogger(VisualizrTest.class); @Test public void testVisualizr() { }
<reponame>lkadalski/docx-rs use std::io::Read; use xml::reader::{EventReader, XmlEvent}; use super::*; use crate::reader::{FromXML, ReaderError}; use std::str::FromStr; impl FromXML for WebSettings { fn from_xml<R: Read>(reader: R) -> Result<Self, ReaderError> { let mut parser = EventReader::new(reader); let mut settings = Self::default(); loop { let e = parser.next(); match e { Ok(XmlEvent::StartElement { attributes, name, .. }) => { let e = XMLElement::from_str(&name.local_name).unwrap(); dbg!(&e); if let XMLElement::Div = e { if let Ok(div) = Div::read(&mut parser, &attributes) { settings.divs.push(div); } } } Ok(XmlEvent::EndElement { name, .. }) => { let e = XMLElement::from_str(&name.local_name).unwrap(); if let XMLElement::WebSettings = e { break; } } Ok(XmlEvent::EndDocument { .. }) => break, Err(_) => return Err(ReaderError::XMLReadError), _ => {} } } Ok(settings) } }
Impact of Community-Based DOT on Tuberculosis Treatment Outcomes: A Systematic Review and Meta-Analysis Background Poor adherence to tuberculosis (TB) treatment can lead to prolonged infectivity and poor treatment outcomes. Directly observed treatment (DOT) seeks to improve adherence to TB treatment by observing patients while they take their anti-TB medication. Although community-based DOT (CB-DOT) programs have been widely studied and promoted, their effectiveness has been inconsistent. The aim of this study was to critical appraise and summarize evidence of the effects of CB-DOT on TB treatment outcomes. Methods Studies published up to the end of February 2015 were identified from three major international literature databases: Medline/PubMed, EBSCO, and EMBASE. Unpublished data from the grey literature were identified through Google and Google Scholar searches. Results Seventeen studies involving 12,839 pulmonary TB patients (PTB) in eight randomized controlled trials (RCTs) and nine cohort studies from 12 countries met the criteria for inclusion in this review and 14 studies were included in meta-analysis. Compared with clinic-based DOT, pooled results of RCTs for all PTB cases (including smear-negative or -positive, new or retreated TB cases) and smear-positive PTB cases indicated that CB-DOT promoted successful treatment , and completed treatment , reduced death , and transfer out . Pooled results of all studies (RCTs and cohort studies) with all PTB cases demonstrated that CB-DOT promoted successful treatment and curative treatment compared with self-administered treatment. Conclusions CB-DOT did improved TB treatment outcomes according to the pooled results of included studies in this review. Studies on strategies for implementation of patient-centered and community-centered CB-DOT deserve further attention. Methods Studies published up to the end of February 2015 were identified from three major international literature databases: Medline/PubMed, EBSCO, and EMBASE. Unpublished data from the grey literature were identified through Google and Google Scholar searches. Results Seventeen studies involving 12,839 pulmonary TB patients (PTB) in eight randomized controlled trials (RCTs) and nine cohort studies from 12 countries met the criteria for inclusion in this review and 14 studies were included in meta-analysis. Compared with clinic-based DOT, pooled results of RCTs for all PTB cases (including smear-negative or -positive, new or retreated TB cases) and smear-positive PTB cases indicated that CB-DOT promoted successful treatment . Global tuberculosis report in 2014 estimated that there were 9.0 million TB cases and 1.5 million TB mortalities in 2013 . The emergence of multidrug-resistant (MDR) TB and extensively drug-resistant TB has further heightened the public health importance of TB control efforts. There is concern that the estimated 2012 global incident cases of MDR-TB of 450,000 and mortality of 170,000 may only represent about one-third of actual cases of MDR-TB . Drug resistance primarily arises from poor treatment adherence or incorrect drug usage. Early diagnosis of patients and rapid initiation of effective therapy are essential in the prevention of MDR-TB . The effective TB control policy recommended by the World Health Organization (WHO) is Directly Observed Therapy, Short-course (DOTS) . Directly observed treatment (DOT) is the key component of DOTS . DOTS supervisors are required to encourage and observe patients swallow their anti-TB drugs during the course of their treatment . A DOT supervisor can be a clinical staff member, an employer, a teacher, a family member, or a lay volunteer who may be professional or amateur . DOT can be beneficial for prevention, diagnosis, support, and care , with its primary aim being an improvement in medication adherence . Decentralization of treatment expands access to healthcare services for all stakeholders, increases access to effective TB treatment , and results in more equitable provision of available treatment . The purpose of decentralizing TB services, including community TB care is to increase access to DOTS and to improve program performance, especially in relation to detection of new smear-positive cases and treatment success rates . Evidence also shows that decentralization of TB services does not compromise treatment outcomes . Community-based TB care refers to a decentralized program of TB services that is implemented in settings where patients live, work, and receive education . As a component of communitybased TB care, Community-based DOT (CB-DOT) is designed to relieve the pressure of patient care on over-stretched health facilities in countries with a high TB burden . Patients can remain in their homes, at their workplaces, or schools during therapy rather than traveling long distances and waiting for long hours in healthcare facilities . This is particularly important in areas with poor access to health services. For such settings, care in the community can have a significant impact on improving treatment outcomes . Many studies have investigated the effects of CB-DOT. However, the results of these studies have been inconsistent. For example, some studies reported a higher treatment success rate in TB patients treated under CB-DOT compared with those treated under clinic-based DOT, while other studies reported no significant difference. A randomized controlled trial by Lwilla et al reported that fewer patients died during treatment under CB-DOT than under institution-based DOT, while another study found no differences in mortality rates between CB-DOT and clinic/self-administered TB treatment groups. There is a need to critically review and appraise current evidence in order to elucidate the effect of CB-DOT on TB treatment. One meta-analysis investigated the impact of lay health workers (LHWs). However, the study did not specifically focus on DOT by community members who did not have a healthcare background, and did not explore the impact of CB-DOT on improving negative treatment outcomes (treatment default, death, treatment failure, and transfer out) . A review by Volmink and Garner analyzed the differences in cure rate and successful treatment rate (cure or completion) by comparing different approaches to treatment: self-administered treatment and home-based DOT compared with clinic-based DOT; clinic-based DOT compared with DOT by a family member or community health worker; and DOT by a family member compared with DOT by a community health worker. The review did not compare DOT by community volunteers with clinic-based DOT, DOT by family member or workplace DOT, and did not elucidate the effect of CB-DOT on negative outcomes. A systematic review and meta-analysis published in 2015 by Karumbi and Garner compared the differences of treatment outcome between DOT and self-administered treatment, DOT at home and DOT at health facility, DOT by family member and DOT by a community health worker. But this review also only focused on outcomes related to cure or treatment completion and did not explore the impact of CB-DOT on improving negative treatment outcomes (treatment default, death, treatment failure, and transfer out). Another by Munro et al. compared the effect of DOT and Self-Administered Therapy (SAT) in preventing microbiologic failure, relapse, or Adverse Drug Reaction (ADR), but did not compare DOT by different volunteer community members. To address these gaps in knowledge, we examined the effect of CB-DOT on both positive treatment outcomes (cured treatment, completed treatment and successful treatment) and negative treatment outcomes (default, death, failure treatment, transfer out and interrupted treatment rate) compared with clinic-based DOT, family-based DOT, workplace-based DOT, and self-administered treatment by reviewing all available randomized controlled trials (RCTs) and cohort studies. Search strategy This review was performed according to the standard procedures of the Cochrane Collaboration and the Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) checklist (S1 PRISMA Checklist) . To identify eligible prospective studies published in English up to February 2015, we searched Medline/PubMed, EMBASE, and EBSCO (PsycINFO and CINAHL). A mix of free text and index terms were used to maximize the retrieval of potentially relevant studies (S1 Text). We sought unpublished data from the grey literature through Google and Google Scholar. We hand-searched reference lists of identified articles. Inclusion/exclusion criteria The following criteria were employed: 1. Types of studies: Follow-up studies including randomized controlled trials (RCTs) and prospective cohort studies were included. 2. Participants: Patients with pulmonary TB (PTB) (including smear-positive or -negative PTB, newly diagnosed cases, and those undergoing retreatment were included. For studies that reported treatment outcomes of participants with smear-positive TB and smear-negative TB, and new or retreated TB and ex-pulmonary TB patients separately, , we only included results of TB patients without ex-pulmonary TB. Studies were excluded where we could not separate PTB patients from extra-PTB patients . 3. Type of interventions: Studies in which CB-DOT services were provided by lay/community health workers, volunteers, peers, friends, etc. . For the purpose of this review, we defined CB-DOT as DOT that was delivered by lay healthcare personnel (including village health workers)/community health workers or voluntary lay individuals from the community (not including family or workplace individuals). Outcomes measures: We used the following definitions of treatment outcomes for non MDR-TB : (i) Successful treatment: a patient who was cured or who completed treatment; (ii) Cured treatment: a patient who was initially sputum smear-positive and who was sputum smear-negative in the last month of treatment and on at least one previous occasion; (iii) Completed treatment: a patient with sputum smear-positive or sputum smearnegative pulmonary TB who completed treatment; (iv) death: a patient who died from any cause during treatment; (v) Failure: a patient who was initially sputum smear-positive and who remained sputum smear-positive at month 5, or was later found to have a MDR strain at any point during treatment, whether they were smear-negative or smear-positive; (vi) Default: a patient whose treatment was interrupted for two consecutive months or more; and (vii) Transfer out: a patient who was transferred to another reporting unit and whose treatment outcome was unknown. When there was evidence of multiple publications of the same study over time, only the article with a full report was included. Study selection Two reviewers (HZ and YL) used the above inclusion and exclusion criteria to identify relevant studies. Each reviewer independently screened the titles and abstracts of identified studies to assess their eligibility for inclusion in the review, using an eligibility form based on the inclusion criteria. Where there was disagreement, a decision to include a study was reached after discussion and consensus by among all reviewers. Quality assessment Two reviewers (HZ and YL) independently assessed the methodological quality of included studies. For RCTs, we assessed generation of the allocation sequence and concealment of allocation as adequate, inadequate, or unclear . Blinding was classified as adequate if steps were taken to ensure that those recording the main outcome of the study were blind to the assigned interventions, and inadequate if this was not the case, or if there was no description of the method for assessing the adequacy of the randomization procedure. Completeness of follow-up was assessed as adequate if it included 90% or more, inadequate if it included less than 90%, or unclear if it was not mentioned. We assessed the quality of cohort studies using the Newcastle-Ottawa Scale . For cohort studies, we assessed the representativeness of the exposed cohort in the study setting, the selection of a non-exposed cohort, the ascertainment of exposure, demonstration that the outcome of interest was not present at initiation of the study, comparability of the cohorts on the basis of study design and analyses, outcome assessment, and the adequacy of follow-up . The assessment is presented using a scoring system, where 1 indicates that the study met the criteria; 0 indicates the study did not meet the criteria; ND indicates that fulfilment of the criteria could not be determined. After assessing the quality of each included study on the basis of these criteria, a composite quality score was assigned, ranging from 0 to 9. Studies that scored 8 were judged to be of high quality. Data abstraction Data from eligible studies were independently abstracted by two reviewers (HZ and YL). Differences were resolved by discussion and consensus among all reviewers. Data extracted from each study included: name of first author/year of publication, type of study design, place of study, type of participant (newly diagnosed/retreatment and smear-positive/negative pulmonary TB patients) age, type of intervention (comparison groups and sample size of each group), outcomes (successful treatment, completed treatment, cured treatment, death, default, failure, and transfer out), and the main results of each study. Assessment of heterogeneity Heterogeneity between studies was evaluated using the Q test and the I-squared statistic (I 2 = 100% × (Q-df)/Q) . For the Q test, a p-value 0.10 was considered to indicate significant heterogeneity among the studies. Where the p-value was 0.10, we calculated I 2 , and studies with I 2 50% were deemed acceptable for inclusion in the meta-analysis. Where heterogeneity was significant, subgroup analysis was conducted to explore possible reasons for the heterogeneity. In the subgroup analyses, the heterogeneity within groups was also tested, using the same statistical methods. Description of studies The quality assessment indicated that the generation of the allocation sequence for six trials was adequate, but two trials lacked information on generation of allocation sequence. Concealment of allocation in three studies was inadequate, and two studies did not provide information. Outcome assessment was blind in only five trials ; completeness of follow-up in one trial was assessed as inadequate, two trials did not provide sufficient information to assess this aspect of study quality and the rest of the trials were assessed as adequate (S1A Table). All cohort studies met the criteria for ascertainment of exposure, outcome of interest not present at the start of the study, long enough follow-up for outcomes to occur, and complete accounting of follow-up in the cohorts. Only one study did not describe a non-exposed cohort drawn from the same community as the exposed cohort; two studies did not describe control of factors (such as severity of disease, health service) that may be associated with treatment outcome. Four studies did not report taking measures to control for additional factors (such as demographic characteristics or socioeconomic factors) which may be associated with treatment outcome; three studies did not describe the methods for determining the outcome of treatment. One study scored 6; 2 studies scored 7, the rest 6 studies had scores 8 (S1B Table). The period of DOT implementation varied among studies. DOT was implemented in the first 2 months of intensive treatment in four studies . One study did not describe the DOT period, and DOT in one study was conducted 5 days per week for the first 8 weeks for new patients, and 12 weeks for re-treated patients, followed by 3 days per week for the continuation phase . DOT was implemented in the entire standard treatment course in 26,31, . Despite the focus of all CB-DOT in monitoring patients while swallowing their anti-TB drugs, all studies included additional measures (such as recording adherence on a Treatment Supporter Card, encouraging patients to complete treatment, noting the adherence card in each visit to the patients, recalling the patients of default from treatment in 24 hours) to improve treatment adherence except in three studies that did not report additional measures (Table 1). Noticeably, CB-DOT providers, LHWs, also played a mentoring role, visiting the patients, encouraging and monitoring treatment adherence regularly such as in the RCT by Clarke et al. . Meta-analysis of the impact of CB-DOT on treatment outcome CB-DOT vs. clinic-based DOT. Twelve studies (six RCT and six cohort studies) were included in the meta-analysis on the impact of CB-DOT on TB treatment outcomes compared with clinic-based DOT: Twelve studies on all PTB cases: The heterogeneity test indicated that all studies on successful treatment, cured treatment, treatment default, and death had significant heterogeneity (I 2 = 87, 88, 42 and 65 respectively) and therefore the random effects model was used for the metaanalysis. However, all studies on completed treatment, treatment failure, and transfer out had no significant heterogeneity (I 2 = 20, 0, and 38 respectively), and the fixed effects model was , but had no effect on curative treatment, treatment completion, treatment default, death, and treatment failure ( Table 2). Six RCT trials on all PTB cases: All studies of all treatment outcome (successful treatment, cured treatment, completed treatment, default, failure, transfer out and death) had no significant heterogeneity. Pooled analysis of six RCTs for all PTB cases demonstrated that CB-DOT promoted successful treatment and treatment , but had no effect on curative treatment, treatment default, and treatment failure ( Table 2). CB-DOT vs. family-based DOT. Three RCTs and one cohort study were included in the meta-analysis of the effect of CB-DOT on treatment outcome compared with family based-DOT. The heterogeneity test indicated that all studies of five treatment outcome (successful treatment, curative treatment, treatment completion, death, and treatment failure) had no significant heterogeneity. The fixed effects model was therefore, used for the meta-analysis. Pooled results of all studies indicated that there were no significant differences in any treatment outcome between CB-DOT and family-based DOT (Table 3). CB-DOT vs. workplace-based DOT. Only two cohort studies were included in the metaanalysis of the impact of CB-DOT on successful treatment compared with workplace-DOT. These studies had no significant heterogeneity. Pooled results showed that CB-DOT achieved a lower successful treatment rate than workplace-based DOT in all PTB cases (Table 3). CB-DOT vs. self-administered therapy. Three studies (one RCT and two cohort studies) were included in a meta-analysis of the impact of CB-DOT versus self-administered therapy (Table 3). Subgroup analysis. Subgroup analysis was conducted by type of PTB cases (smear-positive PTB, new/retreatment PTB), DOT period (during the intensive treatment period/continuous treatment period) and quality of studies. We found type of PTB cases and DOT period were possible causes of heterogeneity between studies. But we observed no marked influence of type of PTB cases, DOT periods and quality of studies on pooled results (S2A-S2C Table). Discussion DOT was launched by WHO in 1992 , and has long been accepted as an effective strategy to promote patient adherence to anti-TB treatment, thus helping to cure most TB cases, to prevent the spread of TB in the community, and to prevent drug-resistant TB . Poor implementation of DOT leads to monotherapy and intermittent treatment, which leads to the emergence of TB drug resistance . CB-DOT is accepted by many countries as a major element of community involvement in TB control . CB-DOT has advantages, particularly in low-to-middle income countries, because costs associated with CB-DOT are typically 40-50% lower than health facility-based care, and the cost-effectiveness of CB-DOT is approximately 50% higher . In response to these findings, more national treatment programs in Africa are now beginning to introduce and expand implementation of CB-DOT as part of routine activities . Before further expansion of CB-DOT, it is necessary to clarify its effect. Thus, this systematic review and meta-analysis updated available evidence on the beneficial effect of CB-DOT on TB control, and suggested that CB-DOT had increased successful treatment rate and completed treatment, and reduced rates of death and transfer out compared with clinicbased DOT. CB-DOT appeared to promote successful treatment and cured treatment compared with self-administered treatment, based on studies with no significant heterogeneity. Workplace-based DOT may have advantage in promoting successful treatment in patients who continue to work during treatment. Regarding positive treatment outcomes (cured treatment, completed treatment and successful treatment), a review by Lewin et al. provided evidence of moderate quality of the effectiveness of LHWs in improving PTB cure rates . However, the review by Volmink and Garner found no significant difference in clinical outcomes (cure or completion of treatment) between DOT at a clinic versus DOT by a family member or community health worker, or DOT provided by a family member versus a community health worker . The current meta-analysis compared CB-DOT with clinic-based DOT, and similarly found that CB-DOT had no advantage in promoting the cure rate in all PTB patients, either based on pooled results of all studies (RCTs and cohort studies) or of RCT studies. However, this meta-analysis found that CB-DOT increased rates of successful treatment in all TB patients (based on all RCT and cohort studies or on only RCTs with the fixed model), and in smear-positive patients (five RCTs fixed model) compared with clinic-based DOT. We also found that CB-DOT promoted completed treatment for all PTB patients (five RCTs, fixed model), and for smear-positive cases (four RCTs, fixed model) compared with clinic-based DOT. In addition, this meta-analysis identified that CB-DOT promoted cured treatment and successful treatment compared with self-administration (one RCT and two cohort studies, fixed model), but workplace DOT had an advantage in successful treatment compared with CB-DOT, based on two cohort studies with no significant heterogeneity. Regarding negative treatment outcome (default, death, failure, and transfer out), one systematic review and meta-analysis by Toczek et al. demonstrated that the engagement of community health workers as DOT providers and the provision of DOT through outreach treatment were associated with lower default rates for drug-resistant TB. The current metaanalysis identified that CB-DOT significantly reduced death for all PTB patients (six RCTs, fixed model) or for smear-positive PTB patients (five RCTs, fixed model), and the transfer out rate for all cases (nine studies, fixed model) and smear-positive PTB (five RCTs, fixed model) compared with clinic-based DOT. A previous review concluded that within CB-DOT, comparisons between DOT provided by a family member versus a community health worker had similar outcomes. Compared with DOT by family members, our review similarly found that CB-DOT had similar outcomes (successful treatment, cured treatment, competed treatment, death and failure) based on all relevant studies (three RCTs plus one cohort or three RCTs). Similar to our findings, the review by Karumbi and Garner evaluated DOT compared to self-administered treatment in people on treatment for active TB or on prophylaxis to prevent active disease, and demonstrated little or no difference in cure or treatment completion when DOT was implemented by a family member compared with DOT by community health worker . Another systematic review concluded that DOT was not significantly better than self-administered therapy in preventing microbiologic failure, relapse, or acquired drug resistance. Our review also similar overserved that CB-DOT didn't improved death rate and failed treatment compared with self-administered therapy. Limitations Our review has some limitations. First, the DOT intervention varied in different studies. Many had additional measures to promote treatment compliance , while some studies did not mention additional measures . The differences in the additional measures in different studies were possible reasons for the heterogeneity between studies. However, we could not conduct subgroup analysis or sensitivity analysis because of the limitations in the available data. Second, this review did not cover the impact of CB-DOT on the risk of relapse and time to relapse as well as latent TB infection because few relevant studies were identified. Third, the definition of death by WHO is: "a patient who died from any cause during treatment" . This outcome should focus on death from TB and exclude death from causes other than TB. Finally, the RCTs included in our review did not report which data adhered to the principle of intention-to-treat. Equally, we only included published studies in this review, and this may have led to publication bias . In addition, exclusion of studies with HIV-infected PTB patients is another one limitation. Implications Though a plethora of factors are associated with preventive or curative TB treatment , evidence from this meta-analysis shows that CB-DOT, as one key component of community involvement in TB control, can improve TB treatment outcomes. CB-DOT has the potential to contribute to better treatment outcomes, particularly in low-to-middle income countries with high TB burden because of its convenience and cost-effectiveness , and may enable substantial savings in indirect costs associated with clinic-based treatment, including travel costs, child care costs, and loss of earnings. However, CB-DOT needs modification to tailor it to local conditions and perhaps patient preferences. For example: (1) Providers and locations of CB-DOT can be decided by the patients because CB-DOT is patient-centered and community-centered in order to provide flexible and convenient CB-DOT to individual TB patients where they live, work or attend school. (2) Once inexpensive, evidencebased interventions are available in some form, it is important to adapt it to local contexts. Implementation research is use of strategies to adopt, adapt, integrate evidence-based health interventions and policies, and change practice patterns within specific settings . Therefore, further implementation research on strategies for implementing CB-DOT in specific community settings would help to provide guidance on how best to integrate evidence-based CB-DOT into the healthcare system . (3) The meta-analysis by Kangovi et al. found that CB-DOT programs where providers received financial reward differed possibly from programs without financial reward for providers in TB treatment outcomes . Offering financial incentive to CB-DOT providers are more likely to increase motivation and their effectiveness, but further studies are needed to confirm this hypothesis. Conclusions This systematic review and meta-analysis demonstrated that, as one component of decentralization of TB care from health facilities into the community, "patient centered" and "community centered" CB-DOT did improve treatment outcomes if it tailored to local community conditions. Possibly, it is a promising strategy to scale up CB-DOT in low-to-middle income countries with high TB burden, because it is cost-effective and acceptable. CB-DOT interventions could benefit from further implementation studies to ensure proper tailoring of interventions in line with constraints and resources of the local settings in which they are implemented.
<reponame>mgiessing/oauth2-proxy package providers import ( "context" "errors" "fmt" "io/ioutil" "net/http" "net/url" "strings" "github.com/coreos/go-oidc/v3/oidc" "github.com/oauth2-proxy/oauth2-proxy/v7/pkg/apis/options" "github.com/oauth2-proxy/oauth2-proxy/v7/pkg/apis/sessions" "github.com/oauth2-proxy/oauth2-proxy/v7/pkg/logger" internaloidc "github.com/oauth2-proxy/oauth2-proxy/v7/pkg/oidc" "github.com/oauth2-proxy/oauth2-proxy/v7/pkg/providers/util" "golang.org/x/oauth2" ) const ( // This is not exported as it's not currently user configurable oidcUserClaim = "sub" ) // ProviderData contains information required to configure all implementations // of OAuth2 providers type ProviderData struct { ProviderName string LoginURL *url.URL RedeemURL *url.URL ProfileURL *url.URL ProtectedResource *url.URL ValidateURL *url.URL // Auth request params & related, see //https://openid.net/specs/openid-connect-basic-1_0.html#rfc.section.2.1.1.1 AcrValues string ApprovalPrompt string // NOTE: Renamed to "prompt" in OAuth2 ClientID string ClientSecret string ClientSecretFile string Scope string Prompt string // Common OIDC options for any OIDC-based providers to consume AllowUnverifiedEmail bool UserClaim string EmailClaim string GroupsClaim string Verifier *internaloidc.IDTokenVerifier // Universal Group authorization data structure // any provider can set to consume AllowedGroups map[string]struct{} getAuthorizationHeaderFunc func(string) http.Header } // Data returns the ProviderData func (p *ProviderData) Data() *ProviderData { return p } func (p *ProviderData) GetClientSecret() (clientSecret string, err error) { if p.ClientSecret != "" || p.ClientSecretFile == "" { return p.ClientSecret, nil } // Getting ClientSecret can fail in runtime so we need to report it without returning the file name to the user fileClientSecret, err := ioutil.ReadFile(p.ClientSecretFile) if err != nil { logger.Errorf("error reading client secret file %s: %s", p.ClientSecretFile, err) return "", errors.New("could not read client secret file") } return string(fileClientSecret), nil } // setAllowedGroups organizes a group list into the AllowedGroups map // to be consumed by Authorize implementations func (p *ProviderData) setAllowedGroups(groups []string) { p.AllowedGroups = make(map[string]struct{}, len(groups)) for _, group := range groups { p.AllowedGroups[group] = struct{}{} } } type providerDefaults struct { name string loginURL *url.URL redeemURL *url.URL profileURL *url.URL validateURL *url.URL scope string } func (p *ProviderData) setProviderDefaults(defaults providerDefaults) { p.ProviderName = defaults.name p.LoginURL = defaultURL(p.LoginURL, defaults.loginURL) p.RedeemURL = defaultURL(p.RedeemURL, defaults.redeemURL) p.ProfileURL = defaultURL(p.ProfileURL, defaults.profileURL) p.ValidateURL = defaultURL(p.ValidateURL, defaults.validateURL) if p.Scope == "" { p.Scope = defaults.scope } if p.UserClaim == "" { p.UserClaim = oidcUserClaim } } // defaultURL will set return a default value if the given value is not set. func defaultURL(u *url.URL, d *url.URL) *url.URL { if u != nil && u.String() != "" { // The value is already set return u } // If the default is given, return that if d != nil { return d } return &url.URL{} } // **************************************************************************** // These private OIDC helper methods are available to any providers that are // OIDC compliant // **************************************************************************** func (p *ProviderData) verifyIDToken(ctx context.Context, token *oauth2.Token) (*oidc.IDToken, error) { rawIDToken := getIDToken(token) if strings.TrimSpace(rawIDToken) == "" { return nil, ErrMissingIDToken } if p.Verifier == nil { return nil, ErrMissingOIDCVerifier } return p.Verifier.Verify(ctx, rawIDToken) } // buildSessionFromClaims uses IDToken claims to populate a fresh SessionState // with non-Token related fields. func (p *ProviderData) buildSessionFromClaims(rawIDToken, accessToken string) (*sessions.SessionState, error) { ss := &sessions.SessionState{} if rawIDToken == "" { return ss, nil } extractor, err := p.getClaimExtractor(rawIDToken, accessToken) if err != nil { return nil, err } // Use a slice of a struct (vs map) here in case the same claim is used twice for _, c := range []struct { claim string dst interface{} }{ {p.UserClaim, &ss.User}, {p.EmailClaim, &ss.Email}, {p.GroupsClaim, &ss.Groups}, // TODO (@NickMeves) Deprecate for dynamic claim to session mapping {"preferred_username", &ss.PreferredUsername}, } { if _, err := extractor.GetClaimInto(c.claim, c.dst); err != nil { return nil, err } } // `email_verified` must be present and explicitly set to `false` to be // considered unverified. verifyEmail := (p.EmailClaim == options.OIDCEmailClaim) && !p.AllowUnverifiedEmail var verified bool exists, err := extractor.GetClaimInto("email_verified", &verified) if err != nil { return nil, err } if verifyEmail && exists && !verified { return nil, fmt.Errorf("email in id_token (%s) isn't verified", ss.Email) } return ss, nil } func (p *ProviderData) getClaimExtractor(rawIDToken, accessToken string) (util.ClaimExtractor, error) { extractor, err := util.NewClaimExtractor(context.TODO(), rawIDToken, p.ProfileURL, p.getAuthorizationHeader(accessToken)) if err != nil { return nil, fmt.Errorf("could not initialise claim extractor: %v", err) } return extractor, nil } // checkNonce compares the session's nonce with the IDToken's nonce claim func (p *ProviderData) checkNonce(s *sessions.SessionState) error { extractor, err := p.getClaimExtractor(s.IDToken, "") if err != nil { return fmt.Errorf("id_token claims extraction failed: %v", err) } var nonce string if _, err := extractor.GetClaimInto("nonce", &nonce); err != nil { return fmt.Errorf("could not extract nonce from ID Token: %v", err) } if !s.CheckNonce(nonce) { return errors.New("id_token nonce claim does not match the session nonce") } return nil } func (p *ProviderData) getAuthorizationHeader(accessToken string) http.Header { if p.getAuthorizationHeaderFunc != nil && accessToken != "" { return p.getAuthorizationHeaderFunc(accessToken) } return nil }
/** * Release the group semaphore. Every P operation must be * followed by a V operation. This may cause another thread to * wake up and return from its P operation. */ private synchronized void Vstartgroup() { groupSemaphore++; notifyAll(); }
import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.LinkedList; public class Main{ public static void main(String[] args) throws NumberFormatException, IOException { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); LinkedList<folder> inps = new LinkedList<folder>(); disk [] disks = new disk[5]; String s = in.readLine(); while(s!=null && s.length()!=0){ int a = s.charAt(0)-'C'; if(disks[a]==null){ disks[a] = new disk(s.charAt(0)); } s = s.substring(3); int m = 0; for (int i = 0; i < s.length(); i++) { if(s.charAt(i)=='\\') m++; } String [] p = new String [m+1]; m = 0; String curr = ""; for (int i = 0; i < s.length(); i++) { if(s.charAt(i)=='\\'){ p[m++] = curr; curr = ""; }else curr+=s.charAt(i); } p[m++] = curr; boolean flag = false; folder k = null; for (int i = 0; i < disks[a].fs.size(); i++) { if(disks[a].fs.get(i).a.equals(p[0])){ flag = true; k = disks[a].fs.get(i); } } if(!flag){ k = new folder(p[0]); inps.add(k); disks[a].fs.add(k); } for (int i = 1; i < p.length-1; i++) { boolean flag2 = false; //System.out.println(p[i]); for (int j = 0; j < k.ins.size(); j++) { if(k.ins.get(j).a.equals(p[i])){ flag2 = true; k = k.ins.get(j); } } if(!flag2){ folder e = new folder(p[i]); k.ins.add(e); k = e; } } k.files.add(p[p.length-1]); s = in.readLine(); } int maxfolder = 0; int maxfile = 0; for (int i = 0; i < inps.size(); i++) { folder curr = inps.get(i); maxfolder = Math.max(maxfolder,call(curr)); maxfile = Math.max(maxfile,call2(curr)); } System.out.println(maxfolder+" "+maxfile); } public static int call(folder j){ int a = j.ins.size(); for (int i = 0; i < j.ins.size(); i++) { a+=call(j.ins.get(i)); } return a; } public static int call2(folder j){ int a = j.files.size(); for (int i = 0; i < j.ins.size(); i++) { a+=call2(j.ins.get(i)); } return a; } } class disk{ char c; LinkedList<folder>fs; public disk(char cc){ c = cc; fs = new LinkedList<folder>(); } } class folder{ String a; LinkedList<folder> ins; LinkedList<String> files; public folder(String aa){ a=aa; ins = new LinkedList<folder>(); files = new LinkedList<String>();; } }
#include<bits/stdc++.h> using namespace std; int main() { int n,m; cin>>n>>m; int k=n/m; int i; int ar[m]; for(i=0;i<m;i++) ar[i]=k; k=n%m; i=0; while(k>0) { ar[i]+=1; i++; k--; if(i==m) i=0; } for(i=0;i<m;i++) cout<<ar[i]<<" "; }
#include <bits/stdc++.h> using namespace std; typedef long long ll; bool cmp(const string &a, const string &b){ return a+b<b+a; } int main(){ // freopen("input.txt", "r", stdin);freopen("output.txt", "w", stdout); string s; cin>>s>>s; sort(s.begin(), s.end()); cout<<s; return 0; }
What's in the Box Setup Eyefi Mac app How it Works Eyefi Settings menu, accessible via the Mac app Features Eyefi Apps Customer Service Who's it For? Uploads photos automatically at home Uploads photos via direct connect when no WiFi is available Convenient Supports RAW and JPEG files Eyefi Cloud makes photos available everywhere Setup is a bit confusing Expensive Some battery drain Selective transfer is time consuming How to Buy Popular WiFi-connected SD card manufacturer Eyefi recently released a brand new product, the Eyefi Mobi Pro . For those unfamiliar with Eyefi, the company makes WiFi-connected SD cards to give people a way to quickly transfer photos from their cameras to their Macs, iPhones, and iPads, even when a WiFi network is unavailable.The company's newest card, the Eyefi Mobi Pro offers 32GB of storage, support for RAW file transfers, and a wireless transfer feature that lets users selectively choose which photos to upload. When used on a home WiFi network, the Mobi Pro lets users transfer images at high speeds, but when away from home, it creates its own WiFi hotspot, so it's always possible to get pictures from the SD card to an iPad, iPhone, or Mac.MacRumors went hands-on with the new Eyefi Mobi Pro SD card to check out all of the new features and to figure out whether or not it's worth the $99 price tag.The Eyefi box contains one 32GB Class 10 SDHC WiFi card, a USB card adapter that's used to configure the Mobi card, and an activation card that lets the Mobi Pro pair with desktop and iOS apps. The card also allows users to sign up for a free year of access to Eyefi's cloud service, which allows unlimited photo uploads and storage.The Eyefi box directs users straight to a setup website , which makes finding the setup steps easy. The website has instructions for setting the Mobi Pro up to connect to a mobile device or a computer.Connecting the Mobi Pro to a mobile device first requires a download of the accompanying Eyefi app. On iOS, the app is called Eyefi Mobi and is available for free in the App Store. Once installed, the app asks for an activation code, which is included in the box, and it will ask you to install a provisioning profile to allow the iPhone to recognize the Mobi Pro card.From there, you need to put the SD card in the camera, snap a few pictures, and leave the camera on while you go to the Settings app on your iPhone. Navigate to WiFi and choose the Mobi Pro card network. You'll need to enter the activation code as a password, but the app doesn't specify that, which makes setup a bit more difficult than it needs to be.Connecting the Mobi Pro card to a Mac is a similar process, and involves downloading the Eyefi Mobi Desktop app then following many of the same activation steps. On the Mac, you don't get a full app -- just a mini app accessible from the menu bar. Plugging the Eyefi card into a Mac with the included USB adapter will bring up some advanced setup settings, letting you add a home network to make uploading photos at home easier.Both the iOS and Mac apps will prompt you to connect the Mobi Pro card to an Eyefi Cloud account. You get a free year of Eyefi Cloud service with the purchase of a Mobi Pro card, and it's $49.99 per year afterwards. It's not a bad deal because you get unlimited storage and it supports both RAW and JPEG files.Eyefi Cloud is not required to use the Mobi Pro card, but it's useful because it syncs photos across all devices and makes them available through the Eyefi Cloud website.Setting up the Mobi Pro isn't exactly hard, but it's not entirely straightforward. Documentation is not as clear as it could be, and there were some quirks we found confusing. For example, connecting to the Mobi Pro's WiFi required a password that turned out to be the activation code, but that information wasn't listed anywhere. As another example, when we downloaded the Mac app from the Eyefi site, it downloaded an older version of the software that did not auto update.Once set up, the Mobi Pro card is simple to use. When away from home, the card will create its own WiFi network, so photos (or videos) can be uploaded to an iPhone, iPad, or Mac even when WiFi is not available. Connecting to the card's WiFi is done through the Mac's WiFi bar or the iPhone's settings menu, just like any other WiFi network. When connected, all photos you've taken will be transferred to the device you're connected to (one connection is supported at a time).If you're away from WiFi and upload your photos to your iPhone using a direct connection, you can go on to upload those photos to the Eyefi Cloud over cellular if your data plan allows for it, making them accessible anywhere right away.In the advanced settings menu, accessible by plugging the Mobi Pro into a computer, you can also set it up to work with a home WiFi network by adding your network's name and password. With your home WiFi network added to the card, it will use your home WiFi to automatically upload photos to the Eyefi Mac app without the need to connect your Mac directly to the card.Once you get home from a photo shooting session, just open the Mac app and turn your camera on to get all of the photos that you shot from the Mobi Pro to the Mac. If you've also signed into Eyefi Cloud, photos will upload to the cloud, which also makes them accessible through the iPhone app. If you're shooting at home, photos will transfer to the computer automatically for easy editing as long as the Mac app is open.So basically, you've got multiple ways to get your photos to every device quickly depending on how you want to use the Mobi Pro card. If you do a direct connection to the iPhone app and enable cloud syncing over cellular, photos will be transferred to the iPhone, uploaded to the Eyefi Cloud and accessible on a Mac via a browser. If you wait until you get home, you can transfer the photos to your Mac, where they'll also be uploaded to Eyefi Cloud and available on the iPhone.Whenever you're uploading photos from your camera to an iOS device or Mac, the SD card slot on the camera needs to be activated. On most cameras, power is set to shut off after approximately 30 seconds to preserve battery, so this will need to be extended in the camera settings menu to make sure the card stays on. Leaving the camera on to transfer photos can drain the battery faster.This isn't much of a problem with JPEGs because the file size is relatively small and it takes just seconds to transfer them, but it can be an issue when uploading larger RAW files because those take a bit longer. It's wise to keep an extra battery on hand if you're planning to use the Mobi Pro to upload a lot of files when away from home.Speed wise, the Mobi Pro is a class 10 SDHC card, so it supports read speeds of 13MB/s and write speeds up to 23/MB/s. That means it can record full 1080p video or consecutive high-quality still photos. When shooting RAW, we had no issues taking multiple burst photos, and there were no issues recording 1080p video.Physically, the card is similar to a standard SD card, and it should be noted that it is more durable than previous-generation Eyefi cards that were prone to breakage. As with most SD cards, there's a physical write protect switch on the side.Eyefi's Mobi Pro card supports several image and video formats, including RAW. RAW files will be uploaded to the Mac automatically, but when you try to transfer RAW files to the iPhone app, they'll be converted into JPEGs. The full RAW files will then need to be transferred to the Mac later, but they can also be uploaded to the cloud via the iPhone if you're signed into the Eyefi Cloud service.One of the new features unique to the Mobi Pro is selective transfer, which lets you choose the photos that you want to sync to your devices. With previous Eyefi cards, uploading photos was an all or nothing deal, but that's not the case with the new card. To use selective transfer, you need to enable it using your Mac and then choose photos to keep using the "protect" feature in your camera's settings. You need to individually flag each photo with the protect option, which isn't the most elegant solution, but it's nice to have the option if you only want to sync a handful of photos.The Mobi Pro, like all of the Eyefi SD cards, is compatible with a wide range of cameras. Companies like Olympus, Nikon, and Canon even have Eyefi support built-in to their cameras. You can check whether your camera is compatible using Eyefi's site The Eyefi Mac app is basically just used to facilitate photo transfers and to adjust the Mobi Pro's settings, but the Eyefi Mobi iOS app has a few more features. Unlike the Mac app, it will display all of the photos that you've uploaded from your Mobi Pro card, and if you're signed into the Eyefi Cloud, it'll display all of your cloud photos too.There are some basic editing tools for cropping and straightening photos built in, plus it organizes all of your photos into albums by date. It also supports tags, includes EXIF info, and lets you delete info. Settings within the app give you the option to send photos you've uploaded to the camera roll, and there's also a setting to import photos you've taken on your iPhone, giving you a way to get all of your photos, taken on camera or iOS device, into the Eyefi Cloud if you're using that.During our testing of the Mobi Pro, we ran into a major bug that caused the card to be nearly unusable and unable to connect to a home WiFi network. It wasn't being recognized by the Mac app because as it turns out, our card was never activated properly. This was an error that came up during the activation process, and it turned out to be an error the Eyefi team said it hadn't seen before.We had a conversation with an Eyefi product manager who, with the help of engineers, talked us through reactivating the Mobi Pro card and then pushed a fix so it wouldn't happen again to other users. As we had a review unit, we obviously got straight to the top of the customer service ladder, but we were impressed with the company's support database and its willingness to spend several hours on a Friday night fixing an obscure bug.The Eyefi Mobi Pro is a card that's suitable for DSLR users who want a way to automatically transfer RAW and JPEG files to their computers or mobile devices. On average, a 32GB class 10 SD card can be purchased for under $20, so at $99 for the Eyefi Mobi Pro, you're paying quite a premium for convenience.Is that extra money worth it? It depends on your workflow. If you want a way to quickly get photos from your camera to your iPhone, the Mobi Pro is a good solution. If you need a way to get photos to your Mac or iOS device even when you don't have Internet, the Mobi Pro does that. If you don't want to hassle with removing the SD card from your camera and plugging it into your Mac after a long day of shooting, the Mobi Pro will upload them automatically, and for some, the time saved will make the Mobi Pro worth the money.For computers that don't have SD card slots, like Apple's upcoming MacBook, the Mobi Pro will be especially useful, especially during the early months when no SD card adapters exist.Many DSLRs and mirrorless cameras these days come with built-in WiFi. If you already have a WiFi-enabled camera, the Mobi Pro may make less sense, but many WiFi cameras are somewhat more limited. For example, with Olympus' line of mirrorless cameras, you can upload to iOS via an app, but there are no options for automatically syncing all photos to the desktop.If you're going to get a Mobi Pro SD card, keep in mind that it works best with the Eyefi Cloud service. With Eyefi Cloud, all of your photos are available on all of your devices almost instantaneously for quick sharing and editing, and that's a benefit that can't be overlooked. You get a free year with purchase, but you're probably going to want to keep using it after storing a year's worth of photos, so take into account the $49.99/year charge.Eyefi's new Mobi Pro 32GB WiFi SD card can be purchased from the Eyefi website for $99.99. That price includes a complimentary year of the company's Eyefi Cloud service, with unlimited syncing and storage.
// basic flow control statements: for, if, else, switch, defer package main import ( "fmt" "math" "runtime" "time" ) // basic for loop understanding func forLoops() { // basic for loop (go only has for loops) sum := 0 for i := 0; i < 10; i++ { sum += i } fmt.Println(sum) // for can be like a while loop => optional init and post statements sum = 1 for sum < 1000 { sum += sum } fmt.Println(sum) // infinite loop if exit condition not specified // for { // } // for loops and functions Sqrt(1) } // Newton's method: compute sqrt using loop through guesses func Sqrt(x float64) float64 { z := x / 2 z_prev := -z for i := 0; i < 10; i++ { z -= (z*z - x) / (2 * z) // break if value not changing if z == z_prev { fmt.Println("Exiting loop") break } z_prev = z fmt.Println(z) } fmt.Printf("The sqrt of %g is ~%g\n", x, z) return z } // basic if/else understanding func ifElse() { fmt.Println(sqrt(2), sqrt(-4)) // Both calls to pow return their results before the call to fmt.Println begins fmt.Println( pow(3, 2, 10), pow(3, 3, 20), ) } // get sqrt of float as string func sqrt(x float64) string { // irrational number if x < 0 { return sqrt(-x) + "i" } // Sprint converts number to string return fmt.Sprint(math.Sqrt(x)) } // get biggest number between x^n or lim and return it func pow(x, n, lim float64) float64 { // if statement can have short statement to execute before start of condition if v := math.Pow(x, n); v < lim { return v } else { // will print out before the numbers, since pow calls complete before it is printed out in if_else() fmt.Printf("%g >= %g\n", v, lim) } return lim } // basic switch understanding func switchStatements() { fmt.Println("Go runs on ") // switch cases do not need to be integers switch os := runtime.GOOS; os { case "darwin": fmt.Println("OS X.") // do not need break statement (automatically added) case "linux": fmt.Println("Linux.") default: // freebsd, openbsd, // plan9, windows... fmt.Printf("%s.\n", os) } // switch cases do not need to be constants fmt.Println("When's Saturday?") today := time.Now().Weekday() switch time.Saturday { case today: fmt.Println("Today.") case today + 1: fmt.Println("Tomorrow.") case today + 2: fmt.Println("In two days.") default: fmt.Println("Too far away.") } // switch with no conditions is the same as switch true (ideal for long if-then-else chains) t := time.Now() switch { case t.Hour() < 12: fmt.Println("Good morning.") case t.Hour() < 17: fmt.Println("Good afternoon.") default: fmt.Println("Good evening.") } } // basic understanding of defer statements func deferStatements() { // A defer statement defers the execution of a function until the surrounding function returns. // The deferred call's arguments are evaluated immediately, but the function call is not executed // until the surrounding function returns. // will only execute after surrounding function (hello and stack_defer) is done defer fmt.Println("world") fmt.Println("hello") // stacking defers stackDefer() } // Deferred function calls are pushed onto a stack. // When a function returns, its deferred calls are executed in last-in-first-out order. func stackDefer() { fmt.Println("counting") for i := 0; i < 10; i++ { // will print in revers order defer fmt.Println(i) } fmt.Println("done") } func main() { fmt.Println("Flow control statements: for, if, else, switch, defer") forLoops() ifElse() switchStatements() deferStatements() }
def base_int(string: str) -> int: if len(string) > 1 and string[0:2] == ("0x" or "0X"): return int(string[2:], 16) if len(string) > 0 and string[0] == "0": return int(string[1:], 8) if len(string) > 1 and string[0:2] == ("0b" or "0B"): return int(string[2:], 16) return int(string)
// Register listeners for the given situation private static void registerListeners(Permazen jdb, Transaction tx, boolean automaticValidation, boolean isSnapshot) { if (jdb.hasOnCreateMethods || (automaticValidation && jdb.anyJClassRequiresDefaultValidation)) tx.addCreateListener(new InternalCreateListener()); if (jdb.hasOnDeleteMethods) tx.addDeleteListener(new InternalDeleteListener()); for (JClass<?> jclass : jdb.jclasses.values()) { for (OnChangeScanner<?>.MethodInfo info : jclass.onChangeMethods) { if (isSnapshot && !info.getAnnotation().snapshotTransactions()) continue; final OnChangeScanner<?>.ChangeMethodInfo changeInfo = (OnChangeScanner<?>.ChangeMethodInfo)info; changeInfo.registerChangeListener(tx); } } if (automaticValidation) { final DefaultValidationListener defaultValidationListener = new DefaultValidationListener(); jdb.fieldsRequiringDefaultValidation .forEach(storageId -> tx.addFieldChangeListener(storageId, new int[0], null, defaultValidationListener)); } if (jdb.hasOnVersionChangeMethods || jdb.hasUpgradeConversions || (automaticValidation && jdb.anyJClassRequiresDefaultValidation)) tx.addVersionChangeListener(new InternalVersionChangeListener()); }
/* * Internal helper function that generates a store to a local. * * stloc [home] = newValue * -- track local in newValue */ void TraceBuilder::genStLocAux(uint32 id, SSATmp* newValue, bool storeType) { Opcode opc = storeType ? StLoc : StLocNT; genInstruction(opc, newValue->getType(), genLdHome(id), newValue); setLocalValue(id, newValue); }
// helper.cc // Some useful functions // <NAME> <<EMAIL>> 2014 #include <cmath> #include "helper.h" int stringToInt(const std::string &str) { int result = 0; for (unsigned int i = str.length() - 1; i >= 0; --i) { result += (str[i] * pow(10, str.length() - i - 1)); } return result; }
Integrating Economics and Ecology: A Case of Intellectual Imperialism? An issue at the heart of American forestry is the attempt to integrate ecological and economic approaches to environmental management. The modern debate harks back to the ideological struggle between Gifford Pinchot's wise-use approach to conservation and Aldo Leopold's or Harold Ickes's nonutilitarian, ethical approach. The four books reviewed here suggest that the issues are still in ferment. Interdisciplinary attempts to integrate ecology and economics date back to the nineteenth century. As Martinez-Alier's at times incoherent and always polemical Ecological Economics points out, nineteenthcentury physicists, biologists, and chemists generated an extensive literature linking an energy theory of value, based on ecology, to the Marxist labor theory of value. According to Martinez-Alier, the scientists' energy-flow accounts (which measured values in terms of energy input/output ratios) demonstrated the "indisputable superiority" of traditional peasant agriculture in underdeveloped countries over capitalistic practices: it proved that peasant agriculture could feed "the population with minimum use of nonrenewable forms of energy" (p. 241). But these nineteenth-century ideas fell on infertile ground, and the world turned instead to capitalist systems, which rely on market prices to measure value. This choice, says Martinez-Alier, led to the waste of energy resources in overdeveloped countries, the unequal distribution of energy resources between the developed and dependent countries, and a continuing class struggle. According to Martinez-Alier, the new interdisciplinary ecological economics being developed now will vindicate and extend a Marxist-based ecological revolution throughout the Third World. Very open-minded members of the Forest History Society who want better to understand the ideological aspect of international debates may find this book of interest. They may also want to compare his view with that found in Donald Worster's widely read history of ecology (Nature's Economy), which accuses economics of foisting energy-flow accounting off on ecology, rather than vice versa. Worster differs with Martinez-Alier in focusing on the twentieth rather than the nineteenth century, and in seeing the energyflow accounting approach as essentially capitalist. Martinez-Alier, of course, sees the approach as conflicting with capitalist market values. Except for the inherent interest of such comparisons, however, I find Martinez-Alier to be a waste of intellectual energy. Charles Perrings's Economy and Environment presents a more formidable and formal case against the "market solution" to environmental problems. Perrings attacks the market solution's emphasis on the sovereignty of the individual, the sanctity of private property, and the domination of the present. In its place, Perrings suggests a positive role for the collective good, which goes beyond the mere adding up of individual satisfactions. Perrings extends the Marxist view of ecological economics by applying the formal logic of matrix algebra to the problem of interactive economic and ecological systems. His analysis suggests that the market solution to environmental-resource allocation flounders on three grounds. First, because it is strictly atomistic and individualistic, it has no way of assessing the collective social good. Second, because it lacks adequate feedback loops to capture interactions between the environment and the economy, it generates increasing uncertainty and increases environmental and economic instability. Third, it lacks an effective way to value the externalities that are pervasive in dynamic and evolutionary (read dialectical) systems. Perrings's book is an excruciatingly painful way to learn several elementary lessons: (1) Economic systems interact with environmental systems (defined as those beyond human control). (2) Market prices, and therefore the market solution, do not take into account all interactions between economic and environmental systems. (3) The trajectory of unstable and evolutionary economic and environmental systems is uncertain. (4) Discounting the interests of future generations at (presumably high) private rates of time preference is less egalitarian than discounting those future interests at (presumably low) social rates. (5) If "society" wants to protect the environment from unforeseen catastrophe, it should sell environmental "usufruct" bonds, the price of which will depend upon the potential environmental losses to society (including all future members) in a worst-case scenario. "The presumption that the appropriate time horizon . . . is one that encompasses intergenerational issues leads to the conviction that . . . the collectivity and not individuals within it . . . should determine the environmental strategy adopted by society" (p. 151, emphasis added). Not only Marxists hold that the market solution to environmental management is flawed by its acceptance of atomistic individualism. Mark Sagoff, an environmental philosopher, argues in The Economy of the
/** * Context for when an editor is created from the data browser * */ public class DBContext { SqlClientApp app; JPanel mainPanel; // panel with NORTH=panelWithButtons CENTER=results and SOUTH=tabPanel int queryResultIndex; List<List<Object>> queryResults; JScrollPane queryResultsPanel; String sql; DataBrowserDefinedTable t; JTabbedPane tabPane; DataBrowserTableModel jtableModel; JTable queryResultsTable; DataBrowserDefinedRelationship rel; // can be null String sqlPredicate; // the sql string used to find this row JTextField sqlTextBox; JTextField columnFilterField; RelationshipTracker relshipTracker; DBCellRender cellRender; String dbGroupname; }
/** * {@link CommunicationTest} checks how RMI client and server are interacting between each other. */ public class CommunicationTest { private static final int PORT = 0; private static final String LOCALHOST = "localhost"; private static final String STRUCTURE_ID = "structureId"; public static final long ONE_MINUTE = 60_000L; private static final RmiBuilder RMI_BUILDER = new RmiBuilder(). withTimeoutMs(ONE_MINUTE); /** * Rule that should be used to check existing */ @Rule public ExpectedException expectedException = ExpectedException.none(); private ExecutorService threadPool; private RmiServer server; private RmiClient client; private SimpleService service; /** * Initializes thread pool. * * @throws RmiException in case RMI server cannot be created and started. * @throws InterruptedException in case server starting process was interrupted * before it started. */ @Before public void before() throws RmiException, InterruptedException { threadPool = Executors.newSingleThreadExecutor(); server = createServer(threadPool); client = RMI_BUILDER.client(LOCALHOST, server.getPort()); service = client.getService(SimpleService.class); } /** * Releases all resources occupied by tests. * * @throws IOException in case server cannot be closed correctly. * @throws InterruptedException in case awaiting of thread pool termination has * been interrupted. */ @After public void after() throws IOException, InterruptedException { server.close(); threadPool.shutdown(); threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS); threadPool.shutdownNow(); } /** * Checks that complex structures successfully serialized and received on the client side. * * @throws RmiException in case something goes wrong during client/server * interaction. */ @Test public void checkSuccessfulRequestsWithComplexStructures() throws RmiException { MatcherAssert.assertThat(service.sayHello("name"), CoreMatchers.is("Hello name")); MatcherAssert.assertThat(service.getStructures(Collections.singleton(STRUCTURE_ID)), CoreMatchers.<Collection<ComplexStructure>>is(Collections .singleton(createComplexStructure(STRUCTURE_ID)))); } /** * Checks that checked exception failed on the server side will be correctly passed and provided * to the client. * * @throws RmiException in case something goes wrong during client/server * interaction. * @throws CheckedException in case remote service method will throw checked * exception. */ @Test public void checkCheckedExceptionFailure() throws RmiException, CheckedException { expectedException.expect(new ExceptionMatcher<>(RmiException.class, null, CheckedException.class, "Test exception id")); service.helloThrowingCheckedException("id"); } /** * Checks that unchecked exception failed on the server side will be correctly passed and * provided to the client. * * @throws RmiException in case something goes wrong during client/server * interaction. */ @Test public void checkUncheckedExceptionFailure() throws RmiException { expectedException.expect(new ExceptionMatcher<>(RmiException.class, null, RuntimeException.class, "RuntimeException id")); service.helloThrowingRuntimeException("id"); } /** * Checks that exception will be thrown in case client will request an instance of unregistered * service. * * @throws RmiException in case something goes wrong during client/server * interaction. * @throws InterruptedException in case service registration process has been * interrupted. */ @Test public void checkServiceNotRegistered() throws RmiException, InterruptedException { expectedException.expect(new ExceptionMatcher<>(RmiException.class, "There is no service implementation registered for 'UnregisteredService' interface", null, null)); final UnregisteredService unregisteredService = client.getService(UnregisteredService.class); unregisteredService.testMethod(); } /** * Checks that invocation of the method which return type is {@code void} completed gracefully * without any issues. * * @throws RmiException in case something goes wrong during client/server * interaction. */ @Test public void checkMethodCallWithVoidResult() throws RmiException { service.methodWithoutResult(); } /** * Checks that invocation of the method which return type is a primitive completed gracefully * without any issues. * * @throws RmiException in case something goes wrong during client/server * interaction. */ @Test public void checkMethodCallWithPrimitiveResult() throws RmiException { service.methodPrimitiveResult(); } /** * Checks that in case multiple clients will decide to call the same method with different * parameters will return expected results. * * @throws IOException in case of error while client connections will close * @throws InterruptedException in case client thread pool shutdown process * would be interrupted. * @throws ExecutionException in case one of the clients could not receive a * response. */ @Test public void checkSeveralClients() throws IOException, InterruptedException, ExecutionException { final int clientsAmount = 40; final CountDownLatch clientReadyLatch = new CountDownLatch(clientsAmount); final ExecutorService multipleClientsPool = Executors.newFixedThreadPool(clientsAmount); final Collection<RmiClient> clients = new HashSet<>(); final Collection<Future<?>> futures = new HashSet<>(); multipleClientsPool.submit(createServiceTask(clientReadyLatch, service, "Name")); for (int i = 0; i < clientsAmount - 1; i++) { final RmiClient clientN = RMI_BUILDER.client(LOCALHOST, server.getPort()); clients.add(clientN); final SimpleService serviceN = clientN.getService(SimpleService.class); futures.add(multipleClientsPool .submit(createServiceTask(clientReadyLatch, serviceN, "Name" + i))); } for (Future<?> future : futures) { future.get(); } for (RmiClient clientN : clients) { clientN.close(); } multipleClientsPool.shutdownNow(); } private static Runnable createServiceTask(final CountDownLatch clientReadyLatch, final SimpleService service, final String name) { return new Runnable() { @Override public void run() { clientReadyLatch.countDown(); try { MatcherAssert.assertThat(service.sayHello(name), CoreMatchers.is(String .format(SimpleServiceImpl.MESSAGE_FORMAT, name))); } catch (RmiException e) { throw new RuntimeException("Unexpected failure", e); } } }; } private static ComplexStructure createComplexStructure(String id) { return new ComplexStructure(Collections.singleton( new NestedStructure(Collections.singleton(String.format("Nested %s", id)), 0)), String.format("Name for %s", id)); } private static RmiServer createServer(ExecutorService threadPool) throws RmiException { final RmiServer server = RMI_BUILDER.server(PORT); server.register(SimpleService.class, new SimpleServiceImpl()); threadPool.submit(server); return server; } }
def observe(self, vm): return vm.VM.RAM[self.__addr]
import Data.List convertToInt :: String -> Int convertToInt = read readOneInt :: IO Int readOneInt = do line_ <- getLine return $ convertToInt line_ readLineInts :: IO [Int] readLineInts = do line_ <- getLine return $ map convertToInt $ words line_ convertIntsToLine :: [Int] -> String convertIntsToLine [] = "" convertIntsToLine (x:xs) = (show x) ++ " " ++ (convertIntsToLine xs) main :: IO () main = do -- read t t <- readOneInt for t where for 0 = return () for t' = do _ <- readOneInt ints <- readLineInts let nums = zip ints [1,2..] let sorted = sortBy (\(a,_) (b,_) -> compare a b) nums let (alpha:beta:last_) = sorted let theta = last last_ putStrLn $ if (fst alpha) + (fst beta) <= (fst theta) then convertIntsToLine $ sort (snd (unzip [alpha, beta, theta])) else "-1" for (t'-1)
Identification and characterization of three novel RHCE*ce variant alleles affecting Rhc (RH4) reactivity T he Rh antigens, encoded by the homologous RHD and RHCE genes, are highly polymorphic. Numerous variant alleles have been described leading to quantitative and/or qualitative modification of antigen expression. As for RhD, a change in the RhCE protein can express weak or partial antigen. This report describes three new RHCE*ce alleles associated with a decrease of Rhc antigen expression. Molecular characterization of the alleles was supported by serologic investigations of Rhc antigen and the impact of amino acid changes on the RhCE protein.
Positive impacts of a dedicated General Paediatrics “home” ward in a tertiary paediatric Australian hospital Whilst a centralised model of care intuitively makes sense and is advocated in other subspecialty areas of medicine, there is a paucity of supportive evidence for General Paediatrics. Following ward restructuring at our tertiary paediatric centre in preparation for the COVID‐19 pandemic, a new dedicated General Paediatrics ward was established. We evaluated medical and nursing staff well‐being, morale and perceived impacts on care after the ward's establishment.
<filename>omoide/migration_engine/operations/freeze/helpers.py<gh_stars>0 # -*- coding: utf-8 -*- """Fast lookup values, some statistic, etc. """ import json from sqlalchemy.orm import Session from omoide import infra, constants from omoide import search_engine from omoide.database import models def build_helpers(session: Session, stdout: infra.STDOut) -> int: """Create fast lookup tables.""" new_values = 0 new_values += calculate_statistics(session, stdout) new_values += construct_navigation_info(session, stdout) return new_values def calculate_statistics(session: Session, stdout: infra.STDOut) -> int: """Calculate statistics for all realms/themes.""" stdout.print('\tCalculating statistics') new_values = 0 all_stats = search_engine.Statistics() for theme in session.query(models.Theme).all(): theme_stats = search_engine.Statistics() for group in theme.groups: for meta in group.metas: theme_stats.add( item_date=meta.registered_on or group.registered_on, item_size=meta.size, item_tags=[x.value for x in meta.tags] ) all_stats += theme_stats new_helper = models.Helper( key=f'stats__{theme.uuid}', value=json.dumps(theme_stats.as_dict(), ensure_ascii=False) ) session.add(new_helper) new_values += 1 new_helper = models.Helper( key=f'stats__{constants.ALL_THEMES}', value=json.dumps(all_stats.as_dict(), ensure_ascii=False) ) session.add(new_helper) new_values += 1 session.commit() return new_values def construct_navigation_info(session: Session, stdout: infra.STDOut) -> int: """Build graph of available realms/themes.""" stdout.print('\tConstructing graph') graph = {} for theme in session.query(models.Theme).order_by('label').all(): graph[theme.uuid] = { 'label': theme.label, 'groups': {}, } for group in theme.groups: graph[theme.uuid]['groups'][group.uuid] = { 'label': group.label } new_helper = models.Helper( key='graph', value=json.dumps(graph, ensure_ascii=False) ) session.add(new_helper) session.commit() return 1
<reponame>fc277073030/gitlabsource /* Copyright 2019 The TriggerMesh Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ra import ( "fmt" "io/ioutil" "log" "net/http" "time" "github.com/google/uuid" "github.com/knative/pkg/cloudevents" webhooks "gopkg.in/go-playground/webhooks.v3" gitlab "gopkg.in/go-playground/webhooks.v3/gitlab" ) const ( GLHeaderEvent = "Gitlab-Event" ) // GitLabReceiveAdapter converts incoming GitLab webhook events to // CloudEvents and then sends them to the specified Sink type GitLabReceiveAdapter struct { Sink string Client *http.Client } // HandleEvent is invoked whenever an event comes in from GitHub func (ra *GitLabReceiveAdapter) HandleEvent(payload interface{}, header webhooks.Header) { hdr := http.Header(header) err := ra.handleEvent(payload, hdr) if err != nil { log.Printf("unexpected error handling GitLab event: %s", err) } } func (ra *GitLabReceiveAdapter) handleEvent(payload interface{}, hdr http.Header) error { gitLabEventType := hdr.Get("X-" + GLHeaderEvent) extensions := map[string]interface{}{ cloudevents.HeaderExtensionsPrefix + GLHeaderEvent: hdr.Get("X-" + GLHeaderEvent), } log.Printf("Handling %s", gitLabEventType) var eventID string if uuid, err := uuid.NewRandom(); err == nil { eventID = uuid.String() } cloudEventType := fmt.Sprintf("%s.%s", "dev.triggermesh.source.gitlab", gitLabEventType) source := sourceFromGitLabEvent(gitlab.Event(gitLabEventType), payload) return ra.postMessage(payload, source, cloudEventType, eventID, extensions) } func (ra *GitLabReceiveAdapter) postMessage(payload interface{}, source, eventType, eventID string, extensions map[string]interface{}) error { ctx := cloudevents.EventContext{ CloudEventsVersion: cloudevents.CloudEventsVersion, EventType: eventType, EventID: eventID, EventTime: time.Now(), Source: source, Extensions: extensions, } req, err := cloudevents.Binary.NewRequest(ra.Sink, payload, ctx) if err != nil { log.Printf("Failed to marshal the message: %+v : %s", payload, err) return err } log.Printf("Posting to %q", ra.Sink) client := ra.Client if client == nil { client = &http.Client{} } resp, err := client.Do(req) if err != nil { return err } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { // TODO: in general, receive adapters may have to be able to retry for error cases. log.Printf("response Status: %s", resp.Status) body, _ := ioutil.ReadAll(resp.Body) log.Printf("response Body: %s", string(body)) } return nil } func sourceFromGitLabEvent(gitLabEvent gitlab.Event, payload interface{}) string { switch gitLabEvent { case gitlab.PushEvents: pe := payload.(gitlab.PushEventPayload) return pe.Repository.URL case gitlab.TagEvents: te := payload.(gitlab.TagEventPayload) return te.Repository.URL case gitlab.IssuesEvents: ie := payload.(gitlab.IssueEventPayload) return ie.ObjectAttributes.URL case gitlab.ConfidentialIssuesEvents: cie := payload.(gitlab.ConfidentialIssueEventPayload) return cie.ObjectAttributes.URL case gitlab.CommentEvents: ce := payload.(gitlab.CommentEventPayload) return ce.ObjectAttributes.URL case gitlab.MergeRequestEvents: mre := payload.(gitlab.MergeRequestEventPayload) return mre.ObjectAttributes.URL case gitlab.WikiPageEvents: wpe := payload.(gitlab.WikiPageEventPayload) return wpe.ObjectAttributes.URL case gitlab.PipelineEvents: pe := payload.(gitlab.PipelineEventPayload) return pe.ObjectAttributes.URL case gitlab.BuildEvents: be := payload.(gitlab.BuildEventPayload) return be.Repository.URL } return "" }
// makeHttpServer creates and configures the HTTP server to be used to serve incoming requests func (app *apiServer) makeHttpServer() { srvMux := new(http.ServeMux) app.srv = &http.Server{ Addr: app.cfg.Server.BindAddress, ReadTimeout: time.Second * time.Duration(app.cfg.Server.ReadTimeout), WriteTimeout: time.Second * time.Duration(app.cfg.Server.WriteTimeout), IdleTimeout: time.Second * time.Duration(app.cfg.Server.IdleTimeout), ReadHeaderTimeout: time.Second * time.Duration(app.cfg.Server.HeaderTimeout), Handler: srvMux, } app.setupHandlers(srvMux) }
<filename>src/bin_parse/archetypes.rs<gh_stars>1-10 use super::*; use crate::structs::{ Archetype, CharacterAttributes, CharacterAttributesTable, Keyed, NameKey, NamedTable, }; use std::rc::Rc; /// Reads all of the archetypes in the current .bin file. /// /// # Arguments: /// /// * `reader` - An open `Read` + `Seek` /// * `strings` - The `StringPool` for archetypes /// * `messages` - The global `MessageStore` containing client messages /// /// # Returns: /// /// If successful, a map containing zero or more `Archetype` structs. /// Otherwise, a `ParseError` with the error information. pub fn serialized_read_archetypes<T>( reader: &mut T, strings: &StringPool, messages: &MessageStore, ) -> ParseResult<Keyed<Archetype>> where T: Read + Seek, { // data length let (expected_bytes, begin_pos) = read_struct_length(reader)?; let mut archetypes = Keyed::<_>::new(); let at_size: usize = bin_read(reader)?; for _ in 0..at_size { let archetype = read_archetype(reader, strings, messages)?; if let Some(class_key) = &archetype.class_key { archetypes.insert(class_key.clone(), Rc::new(archetype)); } } verify_struct_length(archetypes, expected_bytes, begin_pos, reader) } /// Reads an `Archetype` struct from a .bin file. /// Refer to Common/entity/classesh TokenizerParseInfo structs. /// /// # Arguments: /// /// * `reader` - An open `Read` + `Seek` /// * `strings` - The `StringPool` for archetypes /// * `messages` - The global `MessageStore` containing client messages /// /// # Returns: /// /// If successful, an `Archetype`. /// Otherwise, a `ParseError` with the error information. fn read_archetype<T>( reader: &mut T, strings: &StringPool, messages: &MessageStore, ) -> ParseResult<Archetype> where T: Read + Seek, { let mut archetype = Archetype::new(); macro_rules! at_string { ($($field:ident),+) => { $( archetype.$field = read_pool_string(reader, strings, messages)?; )+ } } macro_rules! at_string_arr { ($field:ident) => { read_pool_string_arr(&mut archetype.$field, reader, strings, messages)?; }; } macro_rules! at_attrib_arr { ($field:ident) => { bin_read_arr_fn( &mut archetype.$field, |re| read_character_attributes(re), reader, )?; }; } macro_rules! at_table_arr { ($field:ident) => { bin_read_arr_fn( &mut archetype.$field, |re| read_character_attributes_table(re), reader, )?; }; } let (expected_bytes, begin_pos) = read_struct_length(reader)?; at_string!(pch_name, pch_display_name, pch_display_help); if let Some(name) = &archetype.pch_name { // This is used later to speed up matching against "requires" fields. let mut class_key = String::new(); class_key.push_str(&Archetype::CLASS_PREFIX[0..1]); let lcase_name = name.to_ascii_lowercase().replace(' ', "_"); if !lcase_name.starts_with(&Archetype::CLASS_PREFIX[1..]) { class_key.push_str(&Archetype::CLASS_PREFIX[1..]); } class_key.push_str(&lcase_name); archetype.class_key = Some(NameKey::new(class_key)); } at_string_arr!(ppch_allowed_origin_names); at_string_arr!(ppch_special_restrictions); at_string!( pch_store_restrictions, pch_locked_tooltip, pch_product_code, pch_reduction_class ); archetype.b_reduce_as_av = bin_read(reader)?; bin_read_arr(&mut archetype.pi_level_up_respecs, reader)?; at_string!(pch_display_short_help, pch_icon); archetype.pch_primary_category = read_name_key(reader, strings)?; archetype.pch_secondary_category = read_name_key(reader, strings)?; archetype.pch_power_pool_category = read_name_key(reader, strings)?; archetype.pch_epic_pool_category = read_name_key(reader, strings)?; at_attrib_arr!(pp_attrib_min); at_attrib_arr!(pp_attrib_base); at_attrib_arr!(pp_attrib_strength_min); at_attrib_arr!(pp_attrib_resistance_min); // For each of Strength, Current, Resistances: Read the inner and out diminishing returns tables. // (Will be 6 reads.) for i in &[ Archetype::kClassesDiminish_Inner, Archetype::kClassesDiminish_Outer, ] { bin_read_arr_fn( &mut archetype.pp_attrib_diminishing_str[*i], |re| read_character_attributes(re), reader, )?; } for i in &[ Archetype::kClassesDiminish_Inner, Archetype::kClassesDiminish_Outer, ] { bin_read_arr_fn( &mut archetype.pp_attrib_diminishing_cur[*i], |re| read_character_attributes(re), reader, )?; } for i in &[ Archetype::kClassesDiminish_Inner, Archetype::kClassesDiminish_Outer, ] { bin_read_arr_fn( &mut archetype.pp_attrib_diminishing_res[*i], |re| read_character_attributes(re), reader, )?; } at_table_arr!(pp_attrib_temp_max); at_table_arr!(pp_attrib_temp_max_max); at_table_arr!(pp_attrib_temp_strength_max); at_table_arr!(pp_attrib_temp_resistance_max); let size: u32 = bin_read(reader)?; for _ in 0..size { let table = read_named_table(reader, strings, messages)?; if let Some(table_name) = &table.pch_name { archetype .pp_named_tables .insert(table_name.to_lowercase(), table); } } archetype.b_connect_hp_and_status = bin_read(reader)?; // connect hp and integrity TOK_REDUNDANTNAME archetype.off_defiant_hit_points_attrib = bin_read(reader)?; archetype.f_defiant_scale = bin_read(reader)?; verify_struct_length(archetype, expected_bytes, begin_pos, reader) } /// Reads a `CharacterAttributes` struct from a .bin file. /// Refer to Common/entity/character_attribs.h TokenizerParseInfo structs. /// /// # Arguments: /// /// * `reader` - An open `Read` + `Seek` /// * `strings` - The `StringPool` for archetypes /// * `messages` - The global `MessageStore` containing client messages /// /// # Returns: /// /// If successful, a `CharacterAttributes`. /// Otherwise, a `ParseError` with the error information. fn read_character_attributes<T>(reader: &mut T) -> ParseResult<CharacterAttributes> where T: Read + Seek, { let mut attrib = CharacterAttributes::new(); macro_rules! attr { ($($field:ident),+) => { $( attrib.$field = bin_read(reader)?; )+ } } let (expected_bytes, begin_pos) = read_struct_length(reader)?; for i in 0..attrib.f_damage_type.len() { attrib.f_damage_type[i] = bin_read(reader)?; } attr!(f_hit_points, f_absorb, f_endurance, f_insight); // idea TOK_REDUNDANTNAME attr!(f_rage, f_to_hit); for i in 0..attrib.f_defense_type.len() { attrib.f_defense_type[i] = bin_read(reader)?; } attr!(f_defense, f_speed_running); // run speed TOK_REDUNDANTNAME attr!(f_speed_flying); // fly speed TOK_REDUNDANTNAME attr!( f_speed_swimming, f_speed_jumping, f_jump_height, f_movement_control, f_movement_friction, f_stealth, f_stealth_radius, f_stealth_radius_player, f_perception_radius, f_regeneration, f_recovery, f_insight_recovery, f_threat_level, f_taunt, f_placate ); attr!(f_confused); // confuse TOK_REDUNDANTNAME attr!(f_afraid, f_terrorized); // terrorize TOK_REDUNDANTNAME attr!(f_held, f_immobilized); // immobilize TOK_REDUNDANTNAME attr!(f_stunned); // stun TOK_REDUNDANTNAME attr!( f_sleep, f_fly, f_jump_pack, f_teleport, f_untouchable, f_intangible, f_only_affects_self, f_experience_gain, f_influence_gain, f_prestige_gain, f_null_bool ); // evade TOK_REDUNDANTNAME attr!( f_knock_up, f_knock_back, f_repel, f_accuracy, f_radius, f_arc, f_range, f_time_to_activate, f_recharge_time, f_interrupt_time, f_endurance_discount, f_insight_discount, f_meter ); for i in 0..attrib.f_elusivity.len() { attrib.f_elusivity[i] = bin_read(reader)?; } attr!(f_elusivity_base); verify_struct_length(attrib, expected_bytes, begin_pos, reader) } /// Reads a `CharacterAttributesTable` struct from a .bin file. /// Refer to Common/entity/character_attribs.h TokenizerParseInfo structs. /// /// # Arguments: /// /// * `reader` - An open `Read` + `Seek` /// * `strings` - The `StringPool` for archetypes /// * `messages` - The global `MessageStore` containing client messages /// /// # Returns: /// /// If successful, a `CharacterAttributesTable`. /// Otherwise, a `ParseError` with the error information. fn read_character_attributes_table<T>(reader: &mut T) -> ParseResult<CharacterAttributesTable> where T: Read + Seek, { let mut table = CharacterAttributesTable::new(); macro_rules! tbl_arr { ($($field:ident),+) => { $( bin_read_arr(&mut table.$field, reader)?; )+ } } let (expected_bytes, begin_pos) = read_struct_length(reader)?; for i in 0..table.pf_damage_type.len() { bin_read_arr(&mut table.pf_damage_type[i], reader)?; } tbl_arr!(pf_hit_points, pf_endurance, pf_insight); // idea TOK_REDUNDANTNAME tbl_arr!(pf_rage, pf_to_hit); for i in 0..table.pf_defense_type.len() { bin_read_arr(&mut table.pf_defense_type[i], reader)?; } tbl_arr!(pf_defense, pf_speed_running); // run speed TOK_REDUNDANTNAME tbl_arr!(pf_speed_flying); // fly speed TOK_REDUNDANTNAME tbl_arr!( pf_speed_swimming, pf_speed_jumping, pf_jump_height, pf_movement_control, pf_movement_friction, pf_stealth, pf_stealth_radius, pf_stealth_radius_player, pf_perception_radius, pf_regeneration, pf_recovery, pf_insight_recovery, pf_threat_level, pf_taunt, pf_placate, pf_confused ); // confuse TOK_REDUNDANTNAME tbl_arr!(pf_afraid, pf_terrorized); // terrorize TOK_REDUNDANTNAME tbl_arr!(pf_held, pf_immobilized); // immobilize TOK_REDUNDANTNAME tbl_arr!(pf_stunned); // stun TOK_REDUNDANTNAME tbl_arr!( pf_sleep, pf_fly, pf_jump_pack, pf_teleport, pf_untouchable, pf_intangible, pf_only_affects_self, pf_experience_gain, pf_influence_gain, pf_prestige_gain, pf_null_bool ); // evade TOK_REDUNDANTNAME tbl_arr!( pf_knock_up, pf_knock_back, pf_repel, pf_accuracy, pf_radius, pf_arc, pf_range, pf_time_to_activate, pf_recharge_time, pf_interrupt_time, pf_endurance_discount, pf_insight_discount, pf_meter ); for i in 0..table.pf_elusivity.len() { bin_read_arr(&mut table.pf_elusivity[i], reader)?; } tbl_arr!(pf_defense, pf_absorb); verify_struct_length(table, expected_bytes, begin_pos, reader) } /// Reads a `NamedTable` struct from a .bin file. /// Refer to Common/entity/classes.h TokenizerParseInfo structs. /// /// # Arguments: /// /// * `reader` - An open `Read` + `Seek` /// * `strings` - The `StringPool` for archetypes /// * `messages` - The global `MessageStore` containing client messages /// /// # Returns: /// /// If successful, a `NamedTable`. /// Otherwise, a `ParseError` with the error information. fn read_named_table<T>( reader: &mut T, strings: &StringPool, messages: &MessageStore, ) -> ParseResult<NamedTable> where T: Read + Seek, { let mut table = NamedTable::new(); let (expected_bytes, begin_pos) = read_struct_length(reader)?; table.pch_name = read_pool_string(reader, strings, messages)?; bin_read_arr(&mut table.pf_values, reader)?; verify_struct_length(table, expected_bytes, begin_pos, reader) }
def callbackFunc(self, event): feedback = ("New Element Selected: {}".format(event.widget.get())).split(':') print(feedback) print(' '.join(feedback[:]))
/** * LRU map implementation. * <p> * Created by davide-maestroni on 06/16/2016. * * @param <K> the key type. * @param <V> the value type. */ @SuppressWarnings("WeakerAccess") public class LruHashMap<K, V> extends LinkedHashMap<K, V> { private static final int DEFAULT_INITIAL_CAPACITY = 16; private static final float DEFAULT_LOAD_FACTOR = 0.75f; private static final long serialVersionUID = 3190208293198477083L; private final int mMaxCapacity; /** * Constructor. * * @param maxCapacity the maximum capacity. * @see HashMap#HashMap() */ public LruHashMap(final int maxCapacity) { this(maxCapacity, DEFAULT_INITIAL_CAPACITY); } /** * Constructor. * * @param maxCapacity the maximum capacity. * @param initialCapacity the initial capacity. * @see HashMap#HashMap(int) */ public LruHashMap(final int maxCapacity, final int initialCapacity) { this(maxCapacity, initialCapacity, DEFAULT_LOAD_FACTOR); } /** * Constructor. * * @param maxCapacity the maximum capacity. * @param initialCapacity the initial capacity. * @param loadFactor the load factor. * @see HashMap#HashMap(int, float) */ public LruHashMap(final int maxCapacity, final int initialCapacity, final float loadFactor) { super(initialCapacity, loadFactor, true); mMaxCapacity = ConstantConditions.positive("maximum capacity", maxCapacity); } /** * Constructor. * * @param maxCapacity the maximum capacity. * @param map the initial content. * @see HashMap#HashMap(Map) */ public LruHashMap(final int maxCapacity, @NotNull final Map<? extends K, ? extends V> map) { this(maxCapacity, map.size()); putAll(map); } @Override protected boolean removeEldestEntry(final Entry<K, V> eldest) { return (size() > mMaxCapacity); } }
<filename>src/test/java/petclinic/NewPetFormValidationSeleniumTest.java<gh_stars>0 package petclinic; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import mlech.petclinic.AbstractSelenium; import mlech.petclinic.enums.PetType; import mlech.petclinic.pages.NewPetPage; import org.junit.Before; import org.junit.Test; public class NewPetFormValidationSeleniumTest extends AbstractSelenium { private NewPetPage newPetPage; @Before public void setUp() { super.setUp(System.getProperty("browser")); newPetPage = openPetClinic().clickFindOwner().setLastName("Davis").clickFindOwnersButton(). clickDaniDavis().clickAddNewPet(); } @Test public void shouldCheckIfNameIsRequired() { newPetPage.setBirthDate("2015/05/16").setType(PetType.SNAKE).clickAddPetButton(); assertTrue(newPetPage.hasError()); } @Test public void shouldCheckIfBirthDateIsRequired() { newPetPage.setName("pet").setType(PetType.SNAKE).clickAddPetButton(); assertTrue(newPetPage.hasError()); } @Test(expected=IllegalArgumentException.class) public void shouldFail() { // newPetPage.setType(PetType.SNAKE); assertEquals(PetType.SNAKE, newPetPage.getPetType()); } }
class Reader: """ Base abstract reader class """ def __init__(self, tmo): """ Constructor Parameters ---------- tmo : int read timeout """ self.timeout = tmo self.need_stop = False def read(self, sz): """ Reads a number of bytes Parameters ---------- sz : int number of bytes to read Returns ------- bytes object read bytes Returns ------- ReaderTimeoutError if timeout expires ReaderShutdownRequest if SIGINT was received during reading """ pass def readline(self): """ Reads line Parameters ---------- sz : int number of bytes to read Returns ------- string read line """ pass def forward(self, sz): """ Moves read pointer to a number of bytes Parameters ---------- sz : int number of bytes to read """ pass def cleanup(self): """ Cleans up reader """ self.need_stop = True
<filename>src/components/MouseMonitor.tsx import React, { Component } from "react"; interface Props { onMoveAway: () => void; paddingX: number; paddingY: number; children: JSX.Element; } class MouseMonitor extends Component<Props> { container: HTMLDivElement | null = null; unsubscribe = () => {}; onMouseMove = (event: MouseEvent) => { if (!this.container) { return; } const { onMoveAway, paddingX, paddingY } = this.props; const { clientX, clientY } = event; // TODO: see if possible to optimize const { left, top, width, height } = this.container.getBoundingClientRect(); const inBoundsX = clientX > left - paddingX && clientX < left + width + paddingX; const inBoundsY = clientY > top - paddingY && clientY < top + height + paddingY; const isNear = inBoundsX && inBoundsY; if (!isNear) { onMoveAway(); } }; attachRef = (ref: HTMLDivElement | null) => { this.container = ref; this.unsubscribe(); if (ref) { const { ownerDocument: doc } = ref; doc.addEventListener("mousemove", this.onMouseMove); this.unsubscribe = () => { doc.removeEventListener("mousemove", this.onMouseMove); }; } }; render() { // eslint-disable-next-line const { onMoveAway, paddingX, paddingY, children, ...restProps } = this.props; return ( <div ref={this.attachRef}>{React.cloneElement(children, restProps)}</div> ); } } export default MouseMonitor;
<reponame>Jackzmc/l4d2-workshop-manager #![cfg_attr( all(not(debug_assertions), target_os = "windows"), windows_subsystem = "windows" )] mod config; mod logger; mod util; use steam_workshop_api::{Workshop, WorkshopItem}; use regex::Regex; use serde::{Deserialize, Serialize}; use tauri::{Manager, State, Window}; use futures::{StreamExt}; use std::{io::Write, time::{UNIX_EPOCH}}; use std::sync::{Arc, Mutex}; struct Data { settings: config::Settings, downloads: Arc<Mutex<config::Downloads>>, logger: logger::Logger } struct SplashscreenWindow(Arc<Mutex<Window>>); struct MainWindow(Arc<Mutex<Window>>); #[derive(Serialize, Deserialize)] enum ItemType { Updateable, Managed, Unmanaged, Unknown, Workshop } #[derive(Serialize, Deserialize)] #[serde(untagged)] enum File { Managed { item: WorkshopItem, item_type: ItemType, enabled: bool }, Item { item: WorkshopItem, item_type: ItemType, }, Unknown { item: UnknownFile, item_type: ItemType } } #[derive(Serialize, Deserialize)] struct UnknownFile { publishedfileid: String, file_size: Option<u64>, time_updated: Option<u64>, } /*TODO: Refactor the check of: 1. Valid File name 2. Has ID or Unknown timestamp into a method to be reused for disable check */ #[tauri::command] fn get_items(state: tauri::State<'_, Data>) -> Result<Vec<File>, String> { let regex = Regex::new(r"([0-9]{7,})").unwrap(); let mut unknown_ids = Vec::new(); let fileids = match Workshop::get_vpks_in_folder(&state.settings.gamedir.as_ref().unwrap()) { Ok(results) => { //Tries to find an ID to parse let mut fileids: Vec<String> = Vec::with_capacity(results.len()); for filename in results.iter() { if let Some(mat) = regex.find(&filename) { fileids.push(filename[mat.start()..mat.end()].to_string()); } else { //ItemType::Unknown let full_file = format!("{}.vpk", filename); if let Ok(metadata) = std::fs::metadata(&state.settings.gamedir.as_ref().unwrap().join(full_file)) { unknown_ids.push(UnknownFile { publishedfileid: filename.clone(), file_size: Some(metadata.len()), time_updated: metadata.modified().ok() .map(|metadata| metadata.duration_since(UNIX_EPOCH).expect("time went backwards").as_millis() as u64) }); } else { unknown_ids.push(UnknownFile { publishedfileid: filename.clone(), file_size: None, time_updated: None }); } } } fileids }, Err(err) => { state.logger.error("get_items", &format!("get_vpks_in_folder returnd error: {}\nDirectory: {:?}", err, state.settings.gamedir.as_ref().unwrap())); return Err(err) } }; if fileids.is_empty() { return Ok(Vec::new()); } let mut files: Vec<File> = Vec::with_capacity(fileids.len()); let details: Vec<WorkshopItem> = match Workshop::new(None).get_published_file_details(&fileids) { Ok(details) => details, Err(err) => { state.logger.error("get_items", &format!("Failed to get normal item details: {}\nIDS: {:?}", err, fileids)); return Err(err.to_string()) } }; let downloads = config::Downloads::load()?; for detail in details { //TODO: 1. Check if file is in downloads list //2. Check if file has an update match downloads.get_download(&detail.publishedfileid) { Some(download) => { let item_type = if detail.time_updated > download.time_updated { ItemType::Updateable } else { ItemType::Managed }; files.push(File::Item { item: detail, item_type }); }, None => { files.push(File::Item { item: detail, item_type: ItemType::Unmanaged, }); } } } if let Ok(workshop_items) = get_workshop_items(&state) { for item in workshop_items { files.push(File::Item { item, item_type: ItemType::Workshop }) } } for unknown in unknown_ids { files.push(File::Unknown { item: unknown, item_type: ItemType::Unknown, }); } Ok(files) } #[derive(Serialize, Deserialize)] struct UpdatePayload { publishedfileid: String, bytes_downloaded: usize, complete: bool } #[derive(Serialize, Deserialize)] struct ErrorPayload { publishedfileid: Option<String>, error: String } #[tauri::command] fn get_settings(state: tauri::State<Data>) -> config::Settings { state.settings.clone() } #[tauri::command] fn save_settings(state: tauri::State<Data>, changed: config::Settings) -> Result<(), String> { match config::Settings::load() { Ok(mut settings) => { settings.telemetry = changed.telemetry; Ok(()) }, Err(err) => { state.logger.error("save_settings", &format!("Could not load settings: {}", err.to_string())); return Err(err.to_string()); } } } #[tauri::command] fn close_splashscreen( splashscreen: State<SplashscreenWindow>, main: State<MainWindow>, ) { // Close splashscreen splashscreen.0.lock().expect("splashscreen lock fail").close().expect("splash close fail"); // Show main window main.0.lock().expect("main lock fail").show().expect("main close fail"); } #[tauri::command] fn get_install_info( state: tauri::State<'_, Data>, id: String ) -> Option<config::DownloadEntry> { match state.downloads.lock().expect("get_install_info: Could not get downloads lock").get_download(&id) { Some(download) => Some(download.clone()), None => None } } #[tauri::command] fn import_addon( state: tauri::State<'_, Data>, item: steam_workshop_api::WorkshopItem, is_workshop: bool ) -> Result<(), String> { let dest_folder = state.settings.gamedir.as_ref().unwrap(); let src_folder = if is_workshop { dest_folder.join("workshop") } else { dest_folder.clone() }; let filename = format!("{}.vpk", &item.publishedfileid); let download = config::DownloadEntry::from_item(&item); state.logger.debug("import_addon", &format!("Moving {} from {} to {}", filename, src_folder.to_string_lossy(), dest_folder.to_string_lossy() )); if is_workshop { if let Err(err) = std::fs::rename(src_folder.join(&filename), dest_folder.join(&filename)) { state.logger.error("import_addon", &format!("Moving import for {} error: {}", item.publishedfileid, err)); return Err(err.to_string()); } } let mut downloads = state.downloads.lock().expect("import_addon: Could not get downloads lock"); downloads.add_download(download); if let Err(err) = downloads.save() { state.logger.error("import_addon", &format!("Saving import for {} error: {}", item.publishedfileid, err)); return Err(err.to_string()); } state.logger.logp(logger::LogLevel::NORMAL, "import_addon", &format!("Imported item \"{}\" (id {}). IsWorkshop: {}", &item.title, item.publishedfileid, is_workshop)); Ok(()) } #[tauri::command] fn mark_addons_updated( state: tauri::State<'_, Data>, items: Vec<steam_workshop_api::WorkshopItem> ) -> Result<u32, String> { let mut downloads = state.downloads.lock().expect("import_addon: Could not get downloads lock"); let fileids: Vec<String> = items.into_iter().map(|item| item.publishedfileid).collect(); let details: Vec<WorkshopItem> = match Workshop::new(None).get_published_file_details(&fileids) { Ok(details) => details, Err(err) => { state.logger.error("mark_addons_updated", &format!("Failed to get item details: {}\nIDS: {:?}", err, fileids)); return Err(err.to_string()) } }; let mut updated: u32 = 0; for item in details { if let Some(index) = downloads.get_id_index(&item.publishedfileid) { let old = downloads.get(index).unwrap(); state.logger.logp(logger::LogLevel::NORMAL, "mark_addons_updated", &format!("Marked {} as updated ({} -> {})", &item.title, old.time_updated, item.time_updated)); downloads.set_download(index, config::DownloadEntry::from_item(&item)); updated += 1; } else { state.logger.error("mark_addons_updated", &format!("File is not managed: {}", item)); } } if updated > 0 { if let Err(_) = downloads.save() { return Err("Could not save downloads".to_owned()) } else { return Ok(updated) } } Ok(0) } #[tauri::command] async fn download_addon(window: Window, state: tauri::State<'_, Data>, item: steam_workshop_api::WorkshopItem) -> Result<(), String> { let config = &state.settings; let mut dest = { let fname = config.gamedir.as_ref().unwrap().join(format!("{}.vpk", item.publishedfileid)); std::fs::File::create(fname).expect("Could not create file") }; let mut downloaded: usize = 0; state.logger.logp(logger::LogLevel::NORMAL, "download_addons", &format!("Starting download of file \"{}\" (id {}) ({} bytes)", &item.title, item.publishedfileid, item.file_size)); match reqwest::Client::new() .get(&item.file_url) .header("User-Agent", "L4D2-Workshop-Downloader") .send() .await { Ok(response) => { let mut stream = response.bytes_stream(); let mut chunk_index: u8 = 0; while let Some(result) = stream.next().await { match result { Ok(chunk) => { if let Err(err) = dest.write(&chunk) { state.logger.error("download_addon", &format!("Write error for ID {}: {}", item.publishedfileid, err)); println!("[{}] Write Error: {}", &item.publishedfileid, err); break; } downloaded += chunk.len(); chunk_index += 1; if chunk_index > 100 { chunk_index = 0; window.emit("progress", UpdatePayload { publishedfileid: item.publishedfileid.clone(), bytes_downloaded: downloaded, complete: false }).ok(); } }, Err(err) => { window.emit("progress", ErrorPayload { publishedfileid: Some(item.publishedfileid.clone()), error: err.to_string() }).ok(); state.logger.error("download_addon", &format!("Chunk failure for ID {}: {}", item.publishedfileid, err)); println!("Download for {} failed:\n{}", item.title, &err); return Err(err.to_string()) } } } dest.flush().ok(); window.emit("progress", UpdatePayload { publishedfileid: item.publishedfileid.clone(), bytes_downloaded: downloaded, complete: true }).ok(); let entry = config::DownloadEntry::from_item(&item); let mut downloads = state.downloads.lock().expect("download_addon: Could not get downloads lock"); match downloads.get_id_index(&item.publishedfileid) { Some(index) => downloads.set_download(index, entry), None => downloads.add_download(entry) } state.logger.logp(logger::LogLevel::NORMAL, "download_addon", &format!("Downloaded file \"{}\" (id {}) ({} bytes)", &item.title, item.publishedfileid, item.file_size)); return Ok(()) }, Err(err) => { println!("Download failure for {}: {}", &item, err); return Err(err.to_string()) } } } fn main() { if let Err(_) = config::Settings::load() { let gamedir = util::prompt_game_dir(); let mut settings = config::Settings::new(Some(gamedir)); if let Err(err) = settings.save() { panic!("Could not save settings: {}", err); } }; tauri::Builder::default() .setup(|app| { // set the splashscreen and main windows to be globally available with the tauri state API app.manage(SplashscreenWindow(Arc::new(Mutex::new( app.get_window("splashscreen").expect("splash window fail") )))); let main = app.get_window("main").expect("main window fail"); main.hide().ok(); app.manage(MainWindow(Arc::new(Mutex::new( main )))); //TODO: Check if settings exists, if not, create new. exit on error (or send err) let logger = logger::Logger::new(config::get_appdir().join("downloader.log")); let settings = match config::Settings::load() { Ok(config) => config, Err(err) => { panic!("Settings failure: {}", err); } }; if !settings.gamedir.as_ref().unwrap().exists() { logger.error("setup", &format!("Specified game directory folder \"{}\" does not exist", settings.gamedir.as_ref().unwrap().to_string_lossy())); std::process::exit(1); } let downloads = match config::Downloads::load() { Ok(downloads) => downloads, Err(_e) => { config::Downloads::new() } }; if settings.telemetry { util::send_telemetry(&logger, downloads.size()); } app.manage(Data { settings, downloads: Arc::new(Mutex::new(downloads)), logger }); Ok(()) }) .invoke_handler(tauri::generate_handler![ get_items, download_addon, get_settings, save_settings, close_splashscreen, import_addon, get_install_info, mark_addons_updated ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); } fn get_workshop_items(state: &tauri::State<Data>) -> Result<Vec<WorkshopItem>, String>{ let wsfolder = &state.settings.gamedir.as_ref().unwrap().join("workshop"); if !wsfolder.exists() { std::fs::create_dir(wsfolder).ok(); return Ok(vec![]); } let fileids = match Workshop::get_vpks_in_folder(wsfolder.as_path()) { Ok(fileids) => fileids, Err(err) => { state.logger.error("get_workshop_items", &format!("Failed to get workshop items: {}", err)); return Err(err) } }; if fileids.is_empty() { return Ok(Vec::new()); } match Workshop::new(None).get_published_file_details(&fileids) { Ok(details) => return Ok(details), Err(err) => { state.logger.error("get_workshop_items", &format!("Failed to get workshop item details: {}", err)); return Err(err.to_string()) } }; }
/******************************************************************************* * @file ll.c * @brief Linked List module. * @author llHoYall <<EMAIL>> * @version v1.0 * @note * - 2018.03.09 Created. ******************************************************************************/ /* Include Headers -----------------------------------------------------------*/ // Standard #include <stdlib.h> // System #include "ll.h" /* APIs ----------------------------------------------------------------------*/ eLL_STATUS LL_Init(ptLL head) { if (head == NULL) return eLL_STATUS_INVALID_ARGS; head->next = head; head->prev = head; return eLL_STATUS_SUCCESS; } eLL_STATUS LL_InsertHead(const ptLL head, const ptLL node) { if ((head == NULL) || (node == NULL)) return eLL_STATUS_INVALID_ARGS; node->prev = head; node->next = head->next; head->next = node; (node->next)->prev = node; return eLL_STATUS_SUCCESS; } eLL_STATUS LL_AppendTail(const ptLL head, const ptLL node) { if ((head == NULL) || (node == NULL)) return eLL_STATUS_INVALID_ARGS; node->prev = head->prev; node->next = head; head->prev = node; (node->prev)->next = node; return eLL_STATUS_SUCCESS; } eLL_STATUS LL_RemoveNode(const ptLL node) { if (node == NULL) return eLL_STATUS_INVALID_ARGS; (node->prev)->next = node->next; (node->next)->prev = node->prev; return eLL_STATUS_SUCCESS; } eLL_STATUS LL_RemoveHead(const ptLL head, const pptLL node) { if (head == NULL) return eLL_STATUS_INVALID_ARGS; LL_RemoveNode(head->next); *node = head->next; (*node)->prev = NULL; (*node)->next = NULL; return eLL_STATUS_SUCCESS; } eLL_STATUS LL_RemoveTail(const ptLL head, const pptLL node) { if (head == NULL) return eLL_STATUS_INVALID_ARGS; LL_RemoveNode(head->prev); *node = head->prev; (*node)->prev = NULL; (*node)->next = NULL; return eLL_STATUS_SUCCESS; } eLL_STATUS LL_RemoveAll(const ptLL head) { if (head == NULL) return eLL_STATUS_INVALID_ARGS; while (head->next != head) { ptLL node; LL_RemoveHead(head, (pptLL)&node); free(node); } return eLL_STATUS_SUCCESS; } eLL_STATUS LL_GetNext(const ptLL node, pptLL next_node) { if (node == NULL) return eLL_STATUS_INVALID_ARGS; *next_node = node->next; return eLL_STATUS_SUCCESS; }
def join_images_horizontally(images): array = np.concatenate((images[0], images[1]), axis=1) return Image.fromarray(np.uint8(array))
from django.apps import AppConfig from django.conf import settings from django.db.models.signals import post_migrate def init_reader_study_permissions(*_, **__): from django.contrib.auth.models import Group from guardian.shortcuts import assign_perm from grandchallenge.reader_studies.models import DisplaySet, ReaderStudy g, _ = Group.objects.get_or_create( name=settings.READER_STUDY_CREATORS_GROUP_NAME ) assign_perm( f"{ReaderStudy._meta.app_label}.add_{ReaderStudy._meta.model_name}", g ) g, _ = Group.objects.get_or_create( name=settings.REGISTERED_USERS_GROUP_NAME ) assign_perm( f"{ReaderStudy._meta.app_label}.change_{ReaderStudy._meta.model_name}", g, ) assign_perm( f"{ReaderStudy._meta.app_label}.add_{DisplaySet._meta.model_name}", g, ) assign_perm( f"{ReaderStudy._meta.app_label}.change_{DisplaySet._meta.model_name}", g, ) assign_perm( f"{ReaderStudy._meta.app_label}.view_{DisplaySet._meta.model_name}", g, ) assign_perm( f"{ReaderStudy._meta.app_label}.delete_{DisplaySet._meta.model_name}", g, ) def init_answer_permissions(*_, **__): from django.contrib.auth.models import Group from guardian.shortcuts import assign_perm from grandchallenge.reader_studies.models import Answer g, _ = Group.objects.get_or_create( name=settings.REGISTERED_USERS_GROUP_NAME ) assign_perm(f"{Answer._meta.app_label}.add_{Answer._meta.model_name}", g) assign_perm( f"{Answer._meta.app_label}.change_{Answer._meta.model_name}", g ) class ReaderStudiesConfig(AppConfig): name = "grandchallenge.reader_studies" def ready(self): post_migrate.connect(init_reader_study_permissions, sender=self) post_migrate.connect(init_answer_permissions, sender=self) # noinspection PyUnresolvedReferences import grandchallenge.reader_studies.signals # noqa: F401
t = int(input()) NUM = 2050 for i in range(t): n = int(input()) length = len(str(n)) total_count = 0 if(n%NUM != 0): print(-1) else: start = (NUM*(pow(10,length-4))) while(start >= NUM): start = int(start) count = n//start if(count == 0): start /= 10 continue else: n -= (count*start) total_count += count start /= 10 start = int(start) print(int(total_count))
Sinn Féin Finance Spokesperson Deputy Pearse Doherty has today praised the hundreds of people who attended today’s Commemoration ceremony at Drumboe and used his speech to pay tribute to the role which Republicans in Donegal and West Tyrone have played down through the years during the various periods of struggle. Today’s proceedings were chaired by Sinn Féin West Tyrone Assembly candidate Grace McDermott, while Sinn Féin TD Pearse Doherty was today’s main speaker. The following is the full text of Deputy Pearse Doherty’s speech: “A chairde, Ar dtús, ba maith liomsa fáilte mhor a chur romhaibh uilig go dtí an suíomh cuimhneacháin Droim Bó ar an lá mór speisialta seo agus céad bliain slán ó Éirí Amach na Cásca. Ba chóir liom fáilte chroíúil faoi leith a chur roimh na daoine atá inár gcuideachta anseo inniú don chéad uair mar chuid de chomóradh an chéid. For 100 years now Republicans the length and breadth of this island have been coming together to remember our patriot dead. Those brave men and women who were committed and dedicated to the Republic, today we remember and pay tribute. Indeed, Easter Sunday is not a day on which to mourn, but to remember and give reverence. We do not therefore come to grieve, but rather to honour and pay homage to all those Republicans who’ve gone before us. We remember their sacrifice, the heroism and the inspirational acts of courage and bravery which has set them apart as the true sons and daughters of Ireland. Amongst them were great leaders, comrades, family members and friends. They were visionaries who, through their sheer belief in the Republic proclaimed on the steps of the GPO, gave of themselves and – in doing so – have earned their rightful place amongst the great makers of nations. And while they may be gone, their sacrifice has not been for nothing: their deaths have most certainly not been in vein. We do not mourn them because they live on today. From our rural villages and towns, to our busy cities’ streets; wherever injustice and inequality rears its ugly head, and wherever there exists a desire to defeat it, it is in that desire that our fallen comrades live on. A chairde, we as Republicans continue to actively pursue and strive toward these very ideals for which our brothers and sisters both lived and died. It’s important therefore that we use this occasion to take a moment to recognise the very real and worthy nature of our struggle, and all those from across Ireland who’ve played their part in advancing that very cause in the decades gone by. Since the earliest recordings of Irish resistance to British rule, over hundreds of years, County Donegal and West Tyrone has made its mark at every juncture. This region has seen our own periods of struggle, defining moments of exceptional bravery and remarkable resistance. Some are well known, others not so much One such moment is that of the Drumboe Martyrs, whose recently refurbished commemoration site we gather at here this afternoon. The heroism displayed by the martyrs is testament to the infamy by which they have become renowned and admired by Republicans both near and far. Charlie Daly, Seán Larkin, Dan Enright and Timothy O’Sullivan, each young twenty-somethings whose idealism and unfaltering belief in the Republican cause was such that they were prepared to make the ultimate sacrifice in the pursuit of Irish liberty and the true betterment of their fellow countrymen and women. Their execution, on the morning of 14th March 1923 in the grounds of Drumboe Castle and Woods by a Free State firing squad, coupled with the cold and callous nature with which their bodies were thrown into a ready-made grave afterwards, while both gruesome and cowardly, has only served to ignite the passionate fires of resistance which burn brightly in each of us who reject out rightly the very forces whom conspire to oppress us and that which we believe in and hold true. For you see they are the inspiration, and their comrades that came before them, and those that followed in their footsteps, that drives this generation of Republicans forward to unite our nation and build a true Republic. A century ago when the British executed the leaders of the Rising they were cheered on by the establishment and media in Dublin. They believed that they could end the Republic before it was born and dismiss the ideals of the proclamation: they were wrong. When the Freestaters executed Republicans like as happened here at Drumboe, they believed they would end the Republican struggle and cower a generation: they were wrong. When the Fianna Fáil ordered the execution of Charlie Kearns and his comrades they believed that the demand for Irish unity and a true Republic would follow them into the grave. They were wrong. When the British watched on by an Irish government believed that the demand for justice and equality would end with the hunger strike of 1981: they were wrong. They were wrong because you cannot kill an ideal, and you cannot defeat a people bound by principle and acting in solidarity. Today, as we do each Easter, tens of thousands of Republicans will gather across Ireland. But this weekend will see thousands of events marking the centenary all over the country and indeed in many parts of the world. But, lets not pretend to ourselves that everyone will mark it with the same honest or genuine intentions that we would like. There will also be many people attending, participating and in fact, organising some commemoration events this weekend who will try to convince, who may even actually themselves believe, that we are living in the Republic declared on Easter Monday 1916. Let me be clear – this state is not the Republic envisaged by the leaders of 1916. And, how do we know that? Because the leaders of 1916 gave us the Proclamation of the Republic which was read here today. That proclamation set out in clear terms what the authors were fighting for and were willing to give their lives for. They fought and died for ‘the whole nation and all its parts’ – not 26 counties. The very notions that the leaders of 1916 would have been satisfied with a partitioned island – That Thomas Clarke, the first signatory, would have accepted his native Tyrone being annexed into a sectarian state – That the large numbers of northern volunteers involved in the rising were fighting for other people’s freedom – That the provisional government would chose the strongest symbol of Irish Unity, the tricolour of Green, White and Orange, as the National flag yet have settled for a divided Ireland – are notions as ludicrous as they are laughable. Today as we gather on this centenary year of the 1916 rising there is now more support for Sinn Féin and Irish Republicanism than at any other time in our history. The promise of the Republic is in sight: it is in our hands. At his court martial in Richmond Barracks, Thomas MacDonagh speaking to the British Military Tribunal said of the proclamation, “You think that it is already a dead and buried letter, but it lives, it lives. From minds alight with Ireland’s vivid intellect it sprang, in hearts aflame with Irelands’ mighty love it was conceived. Such documents do not die.” He was right such documents do not die. The proclamation, a revolutionary document of its time, remains the mission statement for modern day Irish Republicanism It is our determination to deliver upon that proclamation that sets us apart from the rest. To the political leaders who will go this weekend and wave their flags yet turn a blind eye to the partition of our country, to the crises in health and housing and to the growing inequality and poverty in our state my message to them is clear. It is the same message delivered by Connolly years before the Rising to the Irish people when he said. “Ireland as distinct from her people, is nothing to me: and the man who is bubbling over with love and enthusiasm for Ireland, and can yet pass unmoved through our streets and witness all the wrong and the suffering, the shame and the degradation brought upon the people of Ireland – aye, brought by Irishmen upon Irishmen and women, without burning to end it, is in my opinion, a fraud and a liar in his heart, no matter how he loves that combination of chemical elements he is pleased to call Ireland.” The British in 1916 removed the revolutionary leadership, when they executed our leaders. The revolutionary period was followed by a counter revolution. English colonial rule was replaced by a conservative free state version. This new Irish establishment set aside the proclamation. For over 90 years they have presided over partition and inequality. The rights of citizens secondary to the needs of elites. In the north a state formed out of discrimination, repression and exclusion could not reflect the principles of the proclamation. This is no longer 1916, 1968 or 1981. Much has been hard fought for and much has been hard won. This generation of Republicans must work to deliver the Republic. Through all of this change the proclamation remains a constant. 100 years on from the Rising and 35 years on from the Hunger Strikes we still need to ‘assert the right of the people of Ireland to the ownership of Ireland and the unfettered control of Irish destinies, to be sovereign and indefeasible’. We now have a peaceful and democratic pathway to Irish Unity. The agreements provided for polls north and south to end partition and the union. It is the democratic right of all our people to have a say in the future. We need to build support for unity and highlight the continued failure of partition and assert the right of people in the north to end the link with Britain. Across Ireland there are those in the establishment who oppose equality, those who do not ‘cherish all of the children of the nation equally’. Sinn Féin stands for a fair recovery, for free health care and for the right to a home. We believe in an inclusive Ireland. The proclamation makes clear that the republic guarantees the religious and civil liberties, equal rights and equal opportunities of all its citizens. We oppose racism, homophobia, sectarianism and any form of inequality and discrimination. There is an obligation on us all to build relations to overcome these differences, to reconcile with the past and build an inclusive society where we all work together for the common good. Republicans will need to be generous and patient. The proclamation makes clear, there cannot be any place for those who dishonour the Republican cause, with criminality or by engaging in counterproductive militarism that offers nothing but suffering, imprisonment and death. There remain those opposed to Republicanism, those opposed to unity and those opposed to equality. This year we have made significant progress and we have increased our representation in the Dáil. We stood on a platform of a Fair Recovery and for a new Republic. To end water charges and the property tax. To end the housing and health crisis. We stood against the old politics and parties that delivered crisis, austerity and inequality. The current standoff between these parties is nothing to do with the housing and health crisis, nothing to do with mass emigration and growing inequality. It is nothing to do with uniting the country and building a Republic. It is all to do with Ministerial positions, who will get the spoils of the election. During the election we said we would not prop up a Fine Gael or Fianna Fáil-led government. Unlike the Labour party we will stand by our election pledges. We will not support a government of the parties which created and sustained the crisis. Their policies are the problem. These parties are not the solution. We are now the main opposition in the Dáil to the conservative and failed establishment parties of Fine Gael/ Fianna Fáil and Labour. We are the leaders of progressive republican politics across Ireland. And we will be in government, North and South. It is not a case of if this happens but when it will happen. After almost a century of acting, interchangeably as government and opposition Fianna Fail and Fine Gael, who have each been equally committed to the status quo and that maintenance of the state ahead of the people who live in it, are terrified of coalescing, not for any reason of policy but because – they say – it will leave Sinn Féin as the largest opposition party. Once again they underestimate our ambition. We want Fianna Fáil and Fine Gael together – but not in government – Sinn Féin wants those two regressive, conservative, partitionist parties together in opposition so that we can lead a government intent on delivering the Republic that lives in our hearts. We want the Republic to live in the lives of every single family on this island. And, our challenge this weekend and throughout the period of commemoration in this centenary year is to rededicate ourselves to its realisation. We must build the demand for real change. For unity and for a Republic that honours the men and women of 1916 and all our patriots. In the north, we face into an assembly election, ongoing Tory cuts and an EU referendum. Martin McGuinness and the executive team has had a real and practical impact on the lives of our citizens there. Sinn Féin is the driving force behind the progressive measures that the Executive has taken –blocking water charges, protecting lower student fees and free prescriptions and pensioners’ travel. When others said no more could be achieved we continued to negotiate. In the Fresh Start Agreement we secured over half a billion pounds to protect the most vulnerable in society. Sinn Féin achieved it in spite of governments in London and Dublin, which are wedded to the politics of cuts and austerity. Sinn Féin delivered a deal that allows us to protect core public services, particularly in health and education. Sinn Fein remains committed to find a resolution to the issue of the legacy of the past. The British government are continuing to block progress on this issue. They are more interested in covering up their actions than progressing reconciliation and healing. But we will preserve and the truth will come out. The process of change continues. 100 years on from when the British believed that they had defeated Republicanism, we now stand stronger than ever. 35 years on from when Haughey and Thatcher believed that they has defeated the hunger strikers we are now the largest party in Ireland. We have travelled far and achieved much, but the only fitting tribute to those who came before us to build the Republic proclaimed in 1916. We have further to travel to build that republic. It will take commitment, hard work and patience. But that is the way of struggle. As I look around here I see the people that will deliver that change. Together we will write Robert Emmet’s epitaph. Together we will build the only fitting tribute to our patriots – a true Republic. Before he was executed Patrick Pearse told the British: “You cannot conquer Ireland and you cannot extinguish the Irish passion for freedom; if our deed has not been sufficient to win freedom then our children will win it with a better deed”. 65 years later Bobby Sands on the 17th day of his hunger strike, in the last entry in his diary he wrote. “They won’t break me because the desire for freedom and the freedom of the Irish people is in my heart” Today in this place, in this year – the 100th anniversary of the Easter Rising, the 35th anniversary of the Hunger strike we cry out to Pearse and Sands and to all our patriot dead to let them know, to let our own children know, that that our passion for freedom has never been extinguished and we intend to win that Freedom. Beirígí Bua! Up the Republic!” PEARSE DOHERTY TELLS DRUMBOE: ‘WE CRY OUT TO PEARSE AND SANDS FOR FREEDOM’ was last modified: by Tags:
/** * Test method for {@link ZOAuth2Servlet#doPost}<br> * Validates that the event handler is called. * * @throws Exception If there are issues testing */ @Test public void testDoPostEvent() throws Exception { final String action = "event"; final String client = "test-client"; final String path = String.format("%s/%s/%s/", OAuth2Constants.DEFAULT_SERVER_PATH.getValue(), action, client); final String authHeader = "verification-token"; final Map<String, Object> bodyParams = new HashMap<String, Object>(); bodyParams.put("event", "test"); bodyParams.put("payload", Collections.emptyMap()); expect(mockRequest.getPathInfo()).andReturn(path); expect(mockRequest.getInputStream()).andReturn(null); expect(mockRequest.getHeader(OAuth2HttpConstants.HEADER_AUTHORIZATION.getValue())) .andReturn(authHeader); expect(mockRequest.getHeader(OAuth2HttpConstants.HEADER_DISABLE_EXTERNAL_REQUESTS.getValue())) .andReturn(null); expect(OAuth2JsonUtilities.streamToMap(anyObject())).andReturn(bodyParams); OAuth2ResourceUtilities.event(matches(client), anyObject(), eq(bodyParams)); PowerMock.expectLastCall().once(); mockResponse.setStatus(Status.ACCEPTED.getStatusCode()); PowerMock.expectLastCall().once(); mockResponse.flushBuffer(); PowerMock.expectLastCall().once(); replay(mockRequest); PowerMock.replay(OAuth2JsonUtilities.class); PowerMock.replay(OAuth2ResourceUtilities.class); PowerMock.replay(mockResponse); servlet.doPost(mockRequest, mockResponse); verify(mockRequest); PowerMock.verify(OAuth2JsonUtilities.class); PowerMock.verify(OAuth2ResourceUtilities.class); PowerMock.verify(mockResponse); }
def testCheckTrue(self): with self.assertRaises(subprocess.CalledProcessError) as cpe: bootstrap._call(0, FAIL, check=True) bootstrap._call(0, PASS, check=True)
/** * Deserializer used to convert the JSON-formatted representation of a custom_operation * into its java object version. * * The following is an example of the serialized form of this operation: * * [ * 35, * { * "fee": { * "amount": 100000, * "asset_id": "1.3.0" * }, * "payer": "1.2.20", * "required_auths": [ * "1.2.20" * ], * "id": 61166, * "data": "736f6d652064617461" * } * ] */ public static class CustomOperationDeserializer implements JsonDeserializer<CustomOperation> { @Override public CustomOperation deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { if (json.isJsonArray()){ // This block is used just to check if we are in the first step of the deserialization // when we are dealing with an array. JsonArray serializedCustomOperation = json.getAsJsonArray(); if (serializedCustomOperation.get(0).getAsInt() != OperationType.CUSTOM_OPERATION.ordinal()){ // If the operation type does not correspond to a custom operation, we return null return null; } else { // Calling itself recursively, this is only done once, so there will be no problems. return context.deserialize(serializedCustomOperation.get(1), CustomOperation.class); } }else{ // This block is called in the second recursion and takes care of deserializing the // limit order data itself. JsonObject jsonObject = json.getAsJsonObject(); AssetAmount fee = context.deserialize(jsonObject.get(KEY_FEE), AssetAmount.class); String payerId = jsonObject.get(KEY_PAYER) .getAsString(); UserAccount payer = new UserAccount(payerId); List<UserAccount> requiredAuths = new LinkedList<>(); JsonElement requiredAuthsElement = jsonObject.get(KEY_REQUIRED_AUTHS); if ((requiredAuthsElement != null) && (requiredAuthsElement.isJsonArray())) { JsonArray requiredAuthsArray = requiredAuthsElement.getAsJsonArray(); for (JsonElement jsonElement : requiredAuthsArray) { String userAccountId = jsonElement.getAsString(); requiredAuths.add(new UserAccount(userAccountId)); } } int operationId = jsonObject.get(KEY_ID).getAsInt(); String data = new String(Util.hexToBytes(jsonObject.get(KEY_DATA).getAsString())); return new CustomOperation(fee, payer, operationId, requiredAuths, data); } } }
Automobile inspection and maintenance programs: their role in reducing air pollution. The development of Inspection and Maintenance Programs to control automobile emissions are one component of a comprehensive strategy to reduce automobile related air pollutants such as CO, NOX, and HC. Since the efficiency at which most motored vehicles are designed to limit pollutant emissions deteriorate with prolonged driving, an Inspection and Maintenance system is needed to restore the ability of the automobile to achieve its designed emission standards. Several types of approaches toward developing Inspection and Maintenance Programs in the U.S. including city, county, and state levels of organization and their effectiveness have been described. However, information on the efficiency of these approaches in achieving reductions in pollutant levels remain to be documented, as well as the cost effectiveness of such programs, and their acceptance by the public. In light of the important role that Inspection and Maintenance Programs have been projected to have in the U.S. in reducing automobile related air pollution, it is recommended that interdisciplinary research projects evaluating the multiple dimensions of Inspection and Maintenance Programs be initiated.
Imaging findings in patients with clinical anophthalmos. PURPOSE To review the intracranial and facial imaging features in children with congenital anophthalmos. METHODS We retrospectively studied eight children with anophthalmos with respect to intraorbital, intracranial, and craniofacial anomalies (six had CT examinations, including the face, orbits, and brain, and four had MR imaging, including the orbits and brain). RESULTS Three patients had primary bilateral anophthalmos on CT (n = 1) and MR (n = 3) studies. In these patients, MR images showed hypoplasia of the optic chiasm and posterior visual pathways (n = 3), agenesis (n = 1) or dysgenesis of the corpus callosum (n = 2), and a mass in the tuber cinereum region (n = 1). One patient had incontinentia pigmenti. Five patients had unilateral anophthalmos on CT (n = 5) and MR (n = 1) studies. One of these patients had a contralateral congenital cystic eye and one had contralateral severe microphthalmia and absent optic chiasm. All had craniofacial anomalies that consisted of midline facial clefts (n = 2) and concomitant hemifacial hypoplasia (n = 2). One had a craniosynostosis. All five had normal-appearing brains. CONCLUSION Patients with bilateral anophthalmos represent a distinct group from those with unilateral anophthalmos. In our patients, bilateral anophthalmos was associated with absence of the optic chiasm, diminished size of the posterior optic pathways, and agenesis or dysgenesis of the corpus callosum. Patients with unilateral anophthalmos had severe craniofacial anomalies. Imaging of the face is helpful in patients with unilateral anophthalmos.
<reponame>DavideEva/2ppy<filename>tuprolog/solve/stdlib/__init__.py from tuprolog import logger # noinspection PyUnresolvedReferences import jpype.imports # noinspection PyUnresolvedReferences import it.unibo.tuprolog.solve.stdlib as _stdlib CommonBuiltins = _stdlib.CommonBuiltins CommonFunctions = _stdlib.CommonFunctions CommonPrimitives = _stdlib.CommonPrimitives CommonRules = _stdlib.CommonRules logger.debug("Loaded JVM classes from it.unibo.tuprolog.solve.stdlib.*")
import math N,K=map(int,input().split()) ans=0 for i in range(N): temp=1/N*0.5**math.ceil(math.log2(K/(i+1))) if i+1<K else 1/N ans+=temp #print(temp) print(ans)
// that all notification types will notify all listeners. @Test public void multipleListeners() { WaveformPresentationModel.Listener listener2 = mock(WaveformPresentationModel.Listener.class); model.addListener(listener2); model.setHorizontalScale(1); verify(listener).scaleChanged(1); verify(listener2).scaleChanged(1); verifyNoMoreInteractions(listener); }
<reponame>ebrahim575/ebrahim575.github.io print(Content-type: text/html\n\n") import cgi form = cgi.FieldStorage() username = form.getvalue("username") print(username)
import { Element, MultiCurveParametrization, PathMaker } from "."; import { Point } from "../common"; import { RigidTransform, Style } from "./elements"; export declare abstract class CoordinateSystem { /** Get the transform of the whole coordinate system (in the final Cartesian system) */ abstract getBaseTransform(): RigidTransform; /** Transform the point (x, y) to Cartesian system */ abstract transformPoint(x: number, y: number): Point; abstract transformDirectionAtPoint(x: number, y: number, dx: number, dy: number): Point; /** Get the local affine transform at point (x, y) */ abstract getLocalTransform(x: number, y: number): RigidTransform; abstract transformPointWithBase(x: number, y: number): Point; abstract transformDirectionAtPointWithBase(x: number, y: number, dx: number, dy: number): Point; } /** Normal cartesian coordinate system */ export declare class CartesianCoordinates extends CoordinateSystem { origin: Point; constructor(origin?: Point); getBaseTransform(): RigidTransform; transformPoint(x: number, y: number): Point; transformDirectionAtPoint(x: number, y: number, dx: number, dy: number): Point; transformPointWithBase(x: number, y: number): Point; transformDirectionAtPointWithBase(x: number, y: number, dx: number, dy: number): Point; getLocalTransform(x: number, y: number): RigidTransform; } /** Polar coordinates. Angle is in degrees, clockwise, top is 0 */ export declare class PolarCoordinates extends CoordinateSystem { origin: Point; radial1: number; radial2: number; distortY: boolean; constructor(origin?: Point, radial1?: number, radial2?: number, distortY?: boolean); getBaseTransform(): RigidTransform; transformRadial(radial: number): number; inverseTransformRadial(distance: number): number; transformPoint(angle: number, radial: number): Point; transformDirectionAtPoint(angle: number, radial: number, dx: number, dy: number): Point; getLocalTransform(angle: number, radial: number): RigidTransform; transformPointWithBase(angle: number, radial: number): Point; transformDirectionAtPointWithBase(angle: number, radial: number, dx: number, dy: number): Point; } /** Bezier curve coordinate system. */ export declare class BezierCurveCoordinates extends CoordinateSystem { origin: Point; private curve; constructor(origin: Point, curve: MultiCurveParametrization); getBaseTransform(): RigidTransform; transformPoint(x: number, y: number): Point; transformDirectionAtPoint(x: number, y: number, dx: number, dy: number): Point; getLocalTransform(x: number, y: number): RigidTransform; transformPointWithBase(x: number, y: number): Point; transformDirectionAtPointWithBase(x: number, y: number, dx: number, dy: number): Point; getLength(): number; getCurve(): MultiCurveParametrization; } export declare class CoordinateSystemHelper { coordinateSystem: CoordinateSystem; constructor(coordinateSystem: CoordinateSystem); rect(x1: number, y1: number, x2: number, y2: number, style?: Style): Element; ellipse(x1: number, y1: number, x2: number, y2: number, style?: Style): Element; line(x1: number, y1: number, x2: number, y2: number, style?: Style): Element; lineTo(path: PathMaker, x1: number, y1: number, x2: number, y2: number, newPath: boolean): void; }
<filename>app/src/main/java/net/pvtbox/android/service/PvtboxService.java package net.pvtbox.android.service; import android.app.ActivityManager; import android.app.Notification; import android.app.NotificationChannel; import android.app.NotificationManager; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Build; import android.os.Handler; import android.os.HandlerThread; import android.os.IBinder; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.core.app.NotificationCompat; import androidx.localbroadcastmanager.content.LocalBroadcastManager; import android.util.Log; import com.bugfender.sdk.Bugfender; import net.pvtbox.android.BuildConfig; import net.pvtbox.android.R; import net.pvtbox.android.api.AuthHttpClient; import net.pvtbox.android.api.EventsHttpClient; import net.pvtbox.android.api.ShareHttpClient; import net.pvtbox.android.application.App; import net.pvtbox.android.application.Const; import net.pvtbox.android.db.DataBaseService; import net.pvtbox.android.db.model.DeviceRealm; import net.pvtbox.android.service.monitor.Monitor; import net.pvtbox.android.tools.JSON; import net.pvtbox.android.tools.SpeedTool; import net.pvtbox.android.service.signalserver.HttpLoader; import net.pvtbox.android.service.signalserver.ShareSignalServerService; import net.pvtbox.android.service.signalserver.SignalServerService; import net.pvtbox.android.service.signalserver.WipeTool; import net.pvtbox.android.service.sync.SyncService; import net.pvtbox.android.tools.FileTool; import net.pvtbox.android.tools.PatchTool; import net.pvtbox.android.service.transport.Connectivity.ConnectivityService; import net.pvtbox.android.service.transport.Downloads.DownloadManager; import net.pvtbox.android.ui.start.EmulatorDetector; import net.pvtbox.android.ui.start.StartActivity; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Objects; /** * * Pvtbox. Fast and secure file transfer & sync directly across your devices. * Copyright © 2020 Pb Private Cloud Solutions Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **/ public class PvtboxService extends Service { private static final String TAG = PvtboxService.class.getSimpleName(); private boolean inited = false; @Nullable private HandlerThread handlerThread; private Handler handler; private SignalServerService signalServerService; private ConnectivityService connectivityService; private DownloadManager downloadManager; private ShareSignalServerService shareSignalServerService; @Nullable private ConnectivityService shareConnectivityService; private DownloadManager shareDownloadManager; private DeviceStatusBroadcaster deviceStatusBroadcaster; @Nullable private BroadcastReceiver receiverStop; private PreferenceService preferenceService; private EventsHttpClient eventsHttpClient; private ShareHttpClient shareHttpClient; private OperationService operationService; private Monitor monitor; private WipeTool wipeTool; private DataBaseService dataBaseService; private SpeedTool speedTool; private AuthHttpClient authHttpClient; @Nullable private JSONObject loginData = null; private final BroadcastReceiver exitReceived = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (preferenceService != null) preferenceService.setExited(true); PvtboxService.stopServiceAsync(getBaseContext()); } }; @Nullable private Intent intent; private NotificationCompat.Builder notificationBuilder; private boolean performingLogin = false; @Nullable private BroadcastReceiver receiverDownloadsResume; @Nullable private BroadcastReceiver receiverDownloadsPause; private boolean paused = false; private int status = R.string.app_connecting; @Override public void onCreate() { Log.i(TAG, "onCreate"); super.onCreate(); Bugfender.init(this, BuildConfig.BUGFENDER_KEY, true); Bugfender.disableReflection(true); registerReceiver(exitReceived, new IntentFilter(Const.ACTION_EXIT)); Intent notificationIntent = new Intent(this, StartActivity.class); notificationIntent.addFlags( Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0); Intent exitIntent = new Intent(Const.ACTION_EXIT); PendingIntent exitPendingIntent = PendingIntent.getBroadcast(this, 1, exitIntent, 0); String channelId = BuildConfig.APPLICATION_ID; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) { NotificationManager manager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); if (manager != null) { NotificationChannel channel = new NotificationChannel( channelId, getString(R.string.app_name), NotificationManager.IMPORTANCE_DEFAULT); manager.createNotificationChannel(channel); } } notificationBuilder = new NotificationCompat.Builder(this, channelId) .setSmallIcon(R.drawable.notification_icon) .setContentIntent(pendingIntent) .setPriority(NotificationCompat.PRIORITY_LOW) .addAction(R.drawable.exit, getString(R.string.exit), exitPendingIntent); status = R.string.app_connecting; Notification notification = notificationBuilder .setContentTitle(getText(R.string.app_connecting)) .setContentText(getText(R.string.tap_to_open)) .setSubText(null) .build(); startForeground(1, notification); } @Override public void onDestroy() { Log.i(TAG, "onDestroy"); unregisterReceiver(exitReceived); super.onDestroy(); if (handlerThread == null) return; destroy(); } @Nullable @Override public IBinder onBind(Intent intent) { return null; } @Override public int onStartCommand(Intent intent, int flags, int startId) { if (!inited) { inited = true; handlerThread = new HandlerThread( "PvtboxService", HandlerThread.NORM_PRIORITY); handlerThread.start(); handler = new Handler(handlerThread.getLooper()); handler.post(this::regBroadcastReceiverStop); handler.post(this::regBroadcastReceiverDownloads); handler.post(this::init); } this.intent = intent; Log.i(TAG, String.format( "onStartCommand, intent: %s, flags: %s, startId: %s, handler: %s, ", intent, flags, startId, handler)); handler.post(this::start); return START_STICKY; } private void init() { Log.i(TAG, "init"); Context context = getBaseContext(); preferenceService = new PreferenceService( getSharedPreferences(Const.SETTINGS_NAME, Context.MODE_PRIVATE)); if (preferenceService.isStatisticEnabled()) { Bugfender.enableLogcatLogging(); Bugfender.enableCrashReporting(); } preferenceService.setExited(false); authHttpClient = new AuthHttpClient(context, preferenceService); FileTool fileTool = new FileTool(context); fileTool.createDirectory(Const.DEFAULT_PATH); fileTool.createDirectory(Const.INTERNAL_PATH); fileTool.createDirectory(Const.COPIES_PATH); fileTool.createDirectory(Const.PATCHES_PATH); fileTool.createEmptyFile(FileTool.buildPathForCopyNamedHash(Const.EMPTY_FILE_HASH)); dataBaseService = new DataBaseService( getBaseContext(), preferenceService, fileTool); dataBaseService.setupOwnDevice(); dataBaseService.setOnSyncedCallback(() -> { if (!Const.FREE_LICENSE.equals(preferenceService.getLicenseType()) && status != R.string.app_synced) { Log.d(TAG, "show notification: synced"); status = R.string.app_synced; Notification notification = notificationBuilder .setContentTitle(getText(R.string.app_synced)) .setContentText(getText(R.string.tap_to_open)) .setSubText(null) .build(); NotificationManager mNotificationManager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); Objects.requireNonNull(mNotificationManager).notify(1, notification); } handler.postDelayed(this::cleanup, 60 * 1000); }); dataBaseService.setOnSyncingCallback(() -> { if (!Const.FREE_LICENSE.equals(preferenceService.getLicenseType()) && status != R.string.app_syncing) { Log.d(TAG, "show notification: syncing"); status = R.string.app_syncing; Notification notification = notificationBuilder .setContentTitle(getText(R.string.app_syncing)) .setContentText(getText(R.string.tap_to_open)) .setSubText(null) .build(); NotificationManager mNotificationManager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); Objects.requireNonNull(mNotificationManager).notify(1, notification); } }); dataBaseService.setOnPausedCallback(() -> { if (!Const.FREE_LICENSE.equals(preferenceService.getLicenseType()) && status != R.string.app_paused) { Log.d(TAG, "show notification: paused"); status = R.string.app_paused; Notification notification = notificationBuilder .setContentTitle(getText(R.string.app_paused)) .setContentText(getText(R.string.tap_to_open)) .setSubText(null) .build(); NotificationManager mNotificationManager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); Objects.requireNonNull(mNotificationManager).notify(1, notification); } }); speedTool = new SpeedTool(dataBaseService); wipeTool = new WipeTool( context, dataBaseService, preferenceService, authHttpClient, fileTool); eventsHttpClient = new EventsHttpClient(context, preferenceService); shareHttpClient = new ShareHttpClient(context, preferenceService); operationService = new OperationService( context, dataBaseService, fileTool, eventsHttpClient, shareHttpClient); HttpLoader httpLoader = new HttpLoader( fileTool, dataBaseService, shareHttpClient, operationService, speedTool, getBaseContext()); signalServerService = new SignalServerService( context, preferenceService, dataBaseService, httpLoader, wipeTool, this); operationService.setSignalServerService(signalServerService); shareSignalServerService = new ShareSignalServerService( context, preferenceService, operationService, dataBaseService, fileTool); PatchTool patchTool = new PatchTool(fileTool); monitor = new Monitor( context, operationService, fileTool, dataBaseService, preferenceService, eventsHttpClient, patchTool); connectivityService = new ConnectivityService( context, "main", signalServerService, speedTool, dataBaseService); signalServerService.setConnectivityService(connectivityService); downloadManager = new DownloadManager( context, connectivityService, fileTool, dataBaseService, patchTool, false, paused); shareConnectivityService = new ConnectivityService( context, "share", shareSignalServerService, speedTool, null); shareSignalServerService.setConnectivityService(shareConnectivityService); shareDownloadManager = new DownloadManager( context, shareConnectivityService, fileTool, dataBaseService, patchTool, true, paused); shareSignalServerService.setDownloadManager(shareDownloadManager); SyncService syncService = new SyncService( dataBaseService, signalServerService, () -> { downloadManager.onInitialSyncDone(); monitor.onInitialSyncDone(); dataBaseService.updateOwnDeviceInitialSyncing(false); }); signalServerService .setSyncService(syncService); deviceStatusBroadcaster = new DeviceStatusBroadcaster( signalServerService, preferenceService, dataBaseService); signalServerService.setOnConnectedCallback(() -> { if (!Const.FREE_LICENSE.equals(preferenceService.getLicenseType()) && status != R.string.app_syncing) { Log.d(TAG, "show notification: syncing (network connected)"); status = R.string.app_syncing; Notification notification = notificationBuilder .setContentTitle(getText(R.string.app_syncing)) .setContentText(getText(R.string.tap_to_open)) .setSubText(null) .build(); NotificationManager mNotificationManager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); Objects.requireNonNull(mNotificationManager).notify(1, notification); } dataBaseService.updateOwnDeviceOnline(true); deviceStatusBroadcaster.checkAndBroadcastStatus(true); }); signalServerService.setOnDisconnectedCallback(() -> { if (!Const.FREE_LICENSE.equals(preferenceService.getLicenseType()) && status != R.string.app_connecting) { Log.d(TAG, "show notification: connecting"); status = R.string.app_connecting; Notification notification = notificationBuilder .setContentTitle(getText(R.string.app_connecting)) .setContentText(getText(R.string.tap_to_open)) .setSubText(null) .build(); NotificationManager mNotificationManager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); Objects.requireNonNull(mNotificationManager).notify(1, notification); } dataBaseService.updateOwnDeviceOnline(false); }); } private void cleanup() { if (handler == null || handlerThread == null || !handlerThread.isAlive()) return; if (!dataBaseService.cleanup()) { handler.postDelayed(this::cleanup, 60 * 1000); } } private void destroy() { Log.i(TAG, "destroy"); inited = false; performingLogin = false; try { if (App.getApplication() != null) { App.getApplication().onServiceStopped(); } Log.d(TAG, "destroy: 1"); if (handlerThread == null) return; handler.removeCallbacksAndMessages(null); handler.post(() -> { if (speedTool != null) speedTool.onDestroy(); if (eventsHttpClient != null) eventsHttpClient.onDestroy(); if (shareHttpClient != null) shareHttpClient.onDestroy(); if (deviceStatusBroadcaster != null) deviceStatusBroadcaster.onDestroy(); if (shareSignalServerService != null) shareSignalServerService.onDestroy(); if (signalServerService != null) signalServerService.onDestroy(); if (monitor != null) monitor.onDestroy(); if (operationService != null) operationService.onDestroy(); if (shareDownloadManager != null) shareDownloadManager.onDestroy(); if (shareConnectivityService != null) shareConnectivityService.onDestroy(); if (downloadManager != null) downloadManager.onDestroy(); if (connectivityService != null) connectivityService.onDestroy(); }); handlerThread.quitSafely(); handlerThread = null; if (receiverStop != null) { LocalBroadcastManager.getInstance(this).unregisterReceiver(receiverStop); receiverStop = null; } if (receiverDownloadsPause != null) { LocalBroadcastManager.getInstance(this).unregisterReceiver(receiverDownloadsPause); receiverDownloadsPause = null; } if (receiverDownloadsResume != null) { LocalBroadcastManager.getInstance(this).unregisterReceiver(receiverDownloadsResume); receiverDownloadsResume = null; } stopSelf(); } finally { Log.d(TAG, "destroyed"); } } private void start() { Log.i(TAG, "start"); if (preferenceService.isLoggedIn() && loginData != null) { Log.i(TAG, "start: start signal server service"); startSignalServerService(); } else { Intent i = new Intent(Const.NETWORK_STATUS); i.putExtra(Const.NETWORK_STATUS_SIGNAL_CONNECTING, true); i.putExtra(Const.NETWORK_STATUS_INFO_HEADER, R.string.connecting_to_server); i.putExtra(Const.NETWORK_STATUS_INFO, R.string.wait_while_connecting); LocalBroadcastManager.getInstance(getBaseContext()).sendBroadcast(i); login(); } } private void login() { if (intent == null) { executeLogin(); return; } String loginResponse = intent.getStringExtra("loginResponse"); if (loginResponse == null) { executeLogin(); return; } try { loginData = new JSONObject(loginResponse); } catch (JSONException e) { e.printStackTrace(); executeLogin(); return; } onLoggedIn(loginData); } private void onLoggedIn(@NonNull JSONObject response) { performingLogin = false; loginData = response; JSONArray servers = loginData.optJSONArray("servers"); if (servers == null) { executeLogin(); return; } preferenceService.setLoggedIn(true); preferenceService.setLastEventUuid(JSON.optString(response, "last_event_uuid")); connectivityService.init(servers); Objects.requireNonNull(shareConnectivityService).init(servers); signalServerService.setServers(servers); shareSignalServerService.setServers(servers); App app = App.getApplication(); if (app != null) { app.setServers(servers); } String licenseTypeOld = preferenceService.getLicenseType(); String licenseType = JSON.optString(response, "license_type"); boolean needClear = Const.FREE_LICENSE.equals(licenseTypeOld) && !Objects.equals(licenseTypeOld, licenseType); preferenceService.setLicenseType(licenseType); onLicenseTypeChanged(); if (needClear) { dataBaseService.setAllEventsUnchecked(); } startSignalServerService(); JSONArray actions = response.optJSONArray("remote_actions"); if (actions != null) { for (int i = 0; i < actions.length(); ++i) { wipeTool.executeAction(actions.optJSONObject(i)); } } } public void onLicenseTypeChanged() { Notification notification; if (notificationBuilder == null) { return; } if (Const.FREE_LICENSE.equals(preferenceService.getLicenseType())) { notification = notificationBuilder .setContentTitle(getText(R.string.app_is_free)) .setContentText(getText(R.string.app_sync_disabled)) .setSubText(getText(R.string.app_upgrade_license)) .build(); if (App.getApplication() != null) { App.getApplication().shouldShowFreeLicenseMessage = true; } } else if (App.getApplication() != null) { App.getApplication().shouldShowFreeLicenseMessage = false; } LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(Const.LICENSE_CHANGED)); } private void executeLogin() { if (preferenceService.getUserHash() == null) return; if (performingLogin) return; performingLogin = true; authHttpClient.login( preferenceService.getUserHash(), response -> handler.post(() -> onLoggedIn(response)), error -> handler.post(() -> onLoginError(error))); } private void onLoginError(@Nullable JSONObject error) { performingLogin = false; handler.postDelayed(this::executeLogin, 1000); if (error != null) { String errcode = JSON.optString(error, "errcode"); JSONArray actions = error.optJSONArray("remote_actions"); if (actions != null && actions.length() > 0) { for (int i = 0; i < actions.length(); ++i) { wipeTool.executeAction(actions.optJSONObject(i)); } } else if ("USER_NOT_FOUND".equals(errcode) || "LICENSE_LIMIT".equals(errcode)) { wipeTool.logoutAndClose(null); } } } private void startSignalServerService() { Log.i(TAG, "startSignalServerService"); if (!signalServerService.isStarted()) { Log.i(TAG, "startSignalServerService: starting"); signalServerService.start(); } else { Log.i(TAG, "startSignalServerService: already started"); } if (intent == null) { Log.i(TAG, "startSignalServerService: intent==null"); return; } String shareHash = intent.getStringExtra(Const.KEY_SHARE_HASH); if (shareHash != null) { shareHash = shareHash.trim() .replace("\n", "").replace("\r", ""); } String pathDownload = intent.getStringExtra(Const.KEY_SHARE_PATH_DOWNLOAD); intent.removeExtra(Const.KEY_SHARE_HASH); intent.removeExtra(Const.KEY_SHARE_PATH_DOWNLOAD); intent = null; if (shareHash != null && !shareHash.isEmpty()) { if (shareSignalServerService.isStarted()) { Log.i(TAG, "startSignalServerService: " + "share signal server service already started"); return; } shareSignalServerService.downloadDirectLink(shareHash, pathDownload); } } private void regBroadcastReceiverStop() { IntentFilter filter = new IntentFilter(Const.STOP_SERVICE_INTENT); receiverStop = new BroadcastReceiver() { @Override public void onReceive(Context context, @NonNull Intent intent) { handler.post(() -> { if (intent.getBooleanExtra(Const.STOP_SERVICE_WIPE, false)) { wipeTool.wipe(); } destroy(); if (intent.getBooleanExtra(Const.STOP_SERVICE_WIPE, false)) { preferenceService.setUserHash(null); preferenceService.setLoggedIn(false); } }); } }; LocalBroadcastManager.getInstance(this).registerReceiver(receiverStop, filter); filter = new IntentFilter(Const.LOGOUT_INTENT); LocalBroadcastManager.getInstance(this).registerReceiver(receiverStop, filter); } private void regBroadcastReceiverDownloads() { receiverDownloadsPause = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { handler.post(() -> { paused = true; dataBaseService.updateOwnDevicePaused(true); if (downloadManager != null) { downloadManager.pause(); } }); } }; LocalBroadcastManager.getInstance(this).registerReceiver( receiverDownloadsPause, new IntentFilter(Const.DOWNLOADS_PAUSE_OPERATION)); receiverDownloadsResume = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { handler.post(() -> { paused = false; dataBaseService.updateOwnDevicePaused(false); if (downloadManager != null) { downloadManager.resume(); } }); } }; LocalBroadcastManager.getInstance(this).registerReceiver( receiverDownloadsResume, new IntentFilter(Const.DOWNLOADS_RESUME_OPERATION)); } public static void stopService(@NonNull Context context, boolean wipe) { Log.d(TAG, "stopService"); Intent intent = new Intent(Const.STOP_SERVICE_INTENT); intent.putExtra(Const.STOP_SERVICE_WIPE, wipe); LocalBroadcastManager.getInstance(context).sendBroadcastSync(intent); } private static void stopServiceAsync(@NonNull Context context) { Log.d(TAG, "stopServiceAsync"); Intent intent = new Intent(Const.STOP_SERVICE_INTENT); intent.putExtra(Const.STOP_SERVICE_WIPE, false); LocalBroadcastManager.getInstance(context).sendBroadcast(intent); } public static boolean isServiceRunning(@NonNull Context context) { ActivityManager manager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); if (manager == null) { Log.i(TAG, "isServiceRunning: false"); return false; } for (ActivityManager.RunningServiceInfo service : manager.getRunningServices(Integer.MAX_VALUE)) { if (PvtboxService.class.getName().equals(service.service.getClassName())) { Log.i(TAG, String.format( "isServiceRunning: true, SignalServerService.IsConnected: %s", SignalServerService.IsConnected())); return SignalServerService.IsConnected(); } } Log.i(TAG, "isServiceRunning: false"); return false; } public static void startPbService(@NonNull Context context, String hash, String path) { Log.i(TAG, "startPbService"); try { Intent intent = new Intent(context, PvtboxService.class); intent.putExtra(Const.KEY_SHARE_HASH, hash); intent.putExtra(Const.KEY_SHARE_PATH_DOWNLOAD, path); startService(context, intent); } catch (IllegalStateException e) { e.printStackTrace(); } } public static void startPbService(@NonNull Context context, String loginResponse) { Log.i(TAG, "startPbService"); try { Intent intent = new Intent(context, PvtboxService.class); intent.putExtra("loginResponse", loginResponse); startService(context, intent); } catch (IllegalStateException e) { e.printStackTrace(); } } private static void startService(@NonNull Context context, Intent intent) { if (EmulatorDetector.isEmulator(context)) { return; } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { context.startForegroundService(intent); } else { context.startService(intent); } } }
#include<stdio.h> int a[200020]; main() { int T,n,m,i,j,k,l,t,p; while(scanf("%d %d",&m,&n)!=EOF) { t=p=0; for(i=1;i<=m;i++) { scanf("%d",&a[i]); if(a[i]==1) { t++; } else{ p++; } } for(i=0;i<n;i++) { scanf("%d %d",&j,&k); l=k-j+1; if(l%2==1) { printf("0\n"); continue; } if(l<=(2*t)&&l<=(2*p)) { printf("1\n"); } else { printf("0\n"); } } } }
/* * Evaluate the t values in the first num slots of the vals[] array and * place the evaluated values back into the same array. Only evaluate t * values that are within the range <0, 1>, including the 0 and 1 ends of * the range iff the include0 or include1 booleans are true. If an * "inflection" equation is handed in, then any points which represent a * point of inflection for that cubic equation are also ignored. */ private static int evalCubic(double vals[], int num, boolean include0, boolean include1, double inflect[], double c1, double cp1, double cp2, double c2) { int j = 0; for (int i = 0; i < num; i++) { double t = vals[i]; if ((include0 ? t >= 0 : t > 0) && (include1 ? t <= 1 : t < 1) && (inflect == null || inflect[1] + (2 * inflect[2] + 3 * inflect[3] * t) * t != 0)) { double u = 1 - t; vals[j++] = c1 * u * u * u + 3 * cp1 * t * u * u + 3 * cp2 * t * t * u + c2 * t * t * t; } } return j; }
Mrs. Anna Miesse, Local Doctor’s Wife This chapter relays the limited information available about Ott’s early life, and focuses on her live as the wife of Dr. Jonathan Miesse in Chillicothe, Ohio. The household of this small-town physician and his wife, the household in which their children grew, was complicated, unhappy, and sometimes ugly. Jonathan filed for divorce in 1849, after twelve years of marriage, but withdrew his application within two months, and attempted a separation agreement in 1853. He again filed for divorce in late 1855. This chapter focuses on marriage laws and the contrast between marriage ideals and realities.
50,000 March in Tunisia to Launch World Social Forum TUNIS, Tunisia—Marching down a boulevard ringed in razor wire, and in view of armored vehicles mounted with water canons, tens of thousands from across the world called for new measures of liberty and dignity as they descended Tuesday afternoon on Tunis to open the weeklong World Social Forum. Organizers estimated some 50,000 people took part in the Forum’s opening march, which set off around 4pm under glaring sunshine and blustery winds from Avenue Habib Bourguiba—the site where weeks of demonstrations forced Tunisian dictator Zine El Abidine Ben Ali from power on January 14, 2011, and kicked off the Arab Spring. The decision to hold this year’s Forum in Tunisia, where the two-year-old popular revolution is now facing mounting threats of a Salafist religious takeover, suggests the importance the country plays as a model for successful democratic transitions. “If the Tunisian experience leads to a kind of democracy that brings a different conception to countries in Africa and the Middle East, it will give people hope to think that democracy is possible,” said Ahmed Ben Messaoud, an audiologist and supporter of the opposition Patriotic Democratic Unified Party, whose leader, Chokri Belaid, was assassinated in February The killing of Chokri—a revered figure who had begun, in under half a year, to mount a serious challenge to Tunisia’s new establishment through the skilled organizing of opposition groups into a coalition known as the Popular Front—unleashed outrage and street demonstrations that led to the resignation of the country’s president. Meanwhile, Tunisia’s public is quickly losing faith in a government it elected on a temporary basis last October to write and approve the country’s new Constitution—but which, so far, has failed to set new election dates, provide jobs or carry through on other key promises, leading to fears of a longer-term power grab. “The country is in a big dilemma: Islamists want a totalitarian government, and civil society wants a more open, more secular government,” said Messaoud, speaking at an outdoor café in downtown Tunis several hours before the World Social Forum march began. In contrast to Egypt, where President Mohammed Morsi and his Muslim Brotherhood rammed through a Constitution widely unfavored by a population that saw it as religious overreach, in Tunisia “they’re taking longer—and paying for it with instability.” “It’s not going to be an easy task to write this Constitution and to lead the country from dictatorship to democracy,” he added. “We’re not just making a Constitution ‘to go’—one that gives the chance of another dictatorship in the future to take over. We want a Constitution for the next generation.” The global next generation will be on full display here throughout the week as activists and civic organizations engage in hundreds upon hundreds of workshops, panels and strategic discussions to redress universal challenges to human freedom and dignity. On Tuesday’s march to inaugurate the 2013 World Social Forum—an event which began in 2001 in Porto Alegre, Brazil, and was held there again last year—people chanted and sang and danced as the mile-long tide of bodies swept across Tunis, a sea of flags and banners flowing from the historic boulevard of the Revolution on to Avenue Mohamed V. From anti-capitalists to Catholics, from white-robed groups representing Western Saharans’ right of independence, to Tunisian parents demanding justice for the government killings of their children in 2011, the marchers continued for five kilometers, weaving in and out of sunshine as they passed under palm trees and finally, in the evening, reached the Menzah Sport City arena for speeches and a rally. One man who attended, Diego de la Mora, works on budget transparency for the Mexican organization Fundar. He will lead several workshops at the Forum helping civic groups learn how to better hold their governments accountable with budgetary planning. In that sense, he said, the Forum is important as “a means of uniting people with technical expertise.” “It’s very important for people to know how to fight for their rights,” said de la Mora, “and that means learning to talk in government language.” Another man, a Tunisian named Gharasch, had a long silver beard and wore a heavy black coat and tinted sunglasses as he walked with the surging crowd. He said he and his wife “came to encourage the revolution. The Forum is for all the youth to help us build democracy.” This week, we’ll be seeing how some of that building takes place.
Indices for Assessing Potential Environmental Hazard from Future Ship Scrapping Process, Determinable in Ship Design Stage Abstract This paper shortly presents the issue of utilization of ships after their withdrawal from service. Information on number of floating units liquidated in previous years was presented. Hazards to the environment, health and life of workers employed in the Far East ship scrapping yards operating on the beaches, were indicated. Then, the most important rules which have to make the ship recycling process safe were referred to. This author proposed to supplement the rules by environmental hazard indices which would be determined already in ship design stage. According to the concept the indices should take into account amount of dangerous substances used for building the ship as well as degree of their harmfulness (weighing factors). Two approaches to the issue of determining the weighing factors were proposed: deterministic and fuzzy. INTRODUCTION The issue of liquidation of ships is presently a subject of interest of international community, especially in EU countries. According to the NGO Ship-breaking Platform coalition, over 70% ships end their life on the South Asia beaches: in India, Bangladesh and Pakistan . The scrapping is carried out there with the use of cheap labour, often by hands of under-age workers. As a result of bad working conditions lethal accidents happen. In 2016 in Bangladesh only about 20 persons were killed in such accidents . Moreover such practice produces great danger to the environment because the disassembling is performed on the beach tide areas. Dangerous materials and substances contaminate shores and coastal waters. Moreover, they are spread over farther regions due to action of sea currents, consumed by living organisms, including fish caught for consumption. In 2016 as much as 862 ships were liquidated worldwide, including 305 in India, 222 -Bangladesh, 141 -Pakistan, 92 -Turkey, 74 -China, 22 -EU and 6 in other regions of the world. It is essential that as much as 668 ships out of their total number were broken on the beaches. Their total tonnage amounts to 27,4 m. GT, including 23,8 m. GT tonnage of ships scrapped on the beaches. According to the data published by the NGO , on the list of disreputable champions which liquidated their ships on the beaches in 2016 the following EU countries can be found: Germany (with 98 ships scrapped on the beaches, out of 100 altogether), Greece (with 104 ships scrapped on the beaches, out of 113 altogether). LEGAL REGULATIONS To assure safe and environmentally friendly process of ship-breaking there were prepared a number of legal regulations which are presented almost in detail in and . . It contains 18 ship-recycling enterprises, including one located in Poland (Almex firm of Szczecin). ENVIRONMENTAL HAZARD INDICES In order to assure a high level of environmental safety for ship during its scrapping process it is necessary to make appropriate decisions already in design stage. To this end, it should be strived to use as low amount of dangerous materials for building the ship as possible. The materials both hazardous and neutral for the environment should be fit for recycling and using again. Hence they would not fill waste stockpiles (scrap-yards). Therefore it's worth to strive after reaching a high susceptibility to recycling of a ship during its design stage. The next issue is to decide as early as in ship design stage in what way ship disassembling process would be carried out in future. It should be so designed as to obtain a scrapping process characterized by low energy and time consumption, as well as low emission of noxious substances during such operations as paint removal, plate cutting etc. It is required to provide new designed ships with a list of dangerous materials. It should be worked out already in ship design and building stage. According to the PRS publication on ship recycling , such list should cover all dangerous materials used for ship construction and outfit, operational waste as well as reserves. The list specifies forbidden materials as well as those of limited allowable content, such as asbestos, polychlorinated biphenyls (PCB), substances reducing ozone layer (CFC), tin-organic compounds in antigrowth systems, heavy metals (e.g. lead, mercury), radioactive compounds. The list comprises also fuel and lubricating oils and oily bilge water. Environmental hazard resulting from ship scrapping should be determined in compliance with the standard format of the list of dangerous materials given in the second appendix to the above mentioned publication . Hence it is proposed to introduce indices which estimate potential environmental hazard caused by a ship during process of its scrapping. This would be a useful supplement to the rules worked out in this area. The index which takes into account dangerous materials used for building a ship would have the following form: where: I -environmental hazard index, W i -harmfulness weighing factor of i-th dangerous material, (taking values in the range between 0 and 1), M i -mass of i-th dangerous material, expressed in kg. Of course, the larger value of the index the greater environmental hazard from a ship under scrapping work. It's worth to introduce one index more in order to take into account that a part of elements which contain dangerous materials may be recycled and used again. The other index would cover possible degree of the materials recycling. Its numerical value would be the smaller the greater amount of the materials could be useful again, i.e. not subjected to storage. The index would have the form as follows: where: I R -environmental hazard index which covers possible degree of recycling, W i -harmfulness weighing factor of i-th dangerous material (taking values in the range between 0 and 1), M i -mass of i-th dangerous material, expressed in kg. R i -recycling degree, i.e. recyclability of elements containing i-th dangerous material, (expressed by percentage number in the range between 0 and 100). The above given values of the indices are of absolute form. They represent real mass of dangerous substances comprised in a ship. As a result, they will be advantageous for small ships but non-advantageous for large ones. In a sense it seems to be correct because under similar conditions scrapping the ships of small tonnage will be less hazardous to the environment than that of the ships of large tonnage. In order to make it possible to compare potential environmental hazard from ships of different size the above mentioned indices should be related to ship mass. This way they will become relative ones expressed by the dimensionless ratio of mass of dangerous substances and mass of ship itself. Designer should strive to assure possibly low values of the indices. However, on the other hand he/she must take into account economic aspects and proceed in compliance with the ALARP approach (As Low As Reasonably Practicable). The ALARP approach (principle) was described, a. o., in . It says that impermissible risk is to be lowered regardless of cost. Ship designer would have to deal with such situation if he/she used unpermitted materials or exceeded their allowable contents. In consequence, he/she would be forced to resign from them. ALARP area is another component of the risk. It requires from the designer to perform an analysis of possible reduction of risk and cost associated with this approach. The risk should be reduced to as low level as rationally justified for economic reasons. The third area deals with negligible risk when it is as much low that there is not necessary to attempt to its lowering. In practice, an impermissible risk does not ought to occur as it is associated with violating the rules. In ship scrapping a negligible risk would rather not happen. Therefore it should be taken into account that such process will be in ALARP area, which is connected with necessity to conduct an analysis of possible reduction of the risk and cost associated with this. WEIGHING FACTORS The above discussed potential environmental hazard indices require to establish weighing factors. They have to represent harmfulness degree of used dangerous material. Determination of the weighing factors will require forming the group of experts and conducting the tests of their opinions. Following the Norwegian method for the environmental indexing of ships , one assumed that the experts will assign values in the range from 0 to 10, where zero stands for a non-dangerous material and 10 -for an extremely dangerous material. Then, the values obtained as a result of elaboration of experts' opinions will be standardized, i.e. scaled down to the numerical interval from 0 to 1, where 1 will stand for weighing factor for an extremely dangerous material. The below presented calculation example follows the document which contains supplements to recommendations of the formal ship safety assessment method (FSA) worked out under auspices of International Maritime Organization (IMO). In fact, one should expect several dozen dangerous materials for which it will be necessary to determine weighing factors. In the presented example only five materials are assumed to be considered and that only five experts will be at one's disposal. The below presented Tab. 1 shows hypothetical results of experts' activity which consisted in assigning rank values from the interval (0, 10) to the five selected materials. The next step in determining values of weighing factors is their standardization. It should be expected that the experts would not be fully unanimous in their opinions. Some divergence in the opinions may be observed in the example data of Tab. 1. In such situation a conformity level of the achieved opinions should be estimated. For differences in weighing factors assigned to given dangerous materials either standard deviation from mean value or range may be used. For material No.1 the standard deviation from the mean equal to 5 amounts to 0, the range -0, and the relative range -also 0%, because full conformity of experts' opinions was reached in this case. For material No.2 the standard deviation from the mean equal to 8,4 amounts to 0,48, the range -1, and the relative range -10%. It may be said that we have to do with low discrepancy of experts' opinions, i.e. high conformity level. An experts' conformity coefficient which simultaneously takes into account all estimates made for all considered cases is described in . It is called Kendall -Smith coefficient which can be determined from the following formula: where: W K-S -conformity coefficient, I -number of considered dangerous materials, J -number of experts. It is assumed that W K-S > 0,7 stands for a high conformity of experts' opinions. In the considered example W K-S = 0,904; it means that the conformity level of the opinions given in Tab. 1 is high. In case when opinion conformity level obtained during analysis seems too low it should be checked whether this concerns weighing factors assigned by experts to all dangerous materials or only to some of them. In case when the discrepancies are large it would be useful to engage additional experts or another group of them. It is advised to strive after achieving a high conformity of experts' opinions as only in such case obtained values of weighing factors can be deemed appropriate. FUZZY WEIGHING FACTORS If to achieve a high conformity of opinions of experts asked on numerical values appears impossible even after limitation of scale range down to values between 0 and 3, then it will be at one's disposal to try another approach, namely to apply fuzzy logic. It was developed for investigating uncertain or unclear issues . This author already used it for analyzing reliability and risk of technical systems. A reference to fuzzy logic can be found in . The idea to use fuzzy numbers for finding values of weighing factors consists in putting questions to experts in another way. Firstly, amount of numbers possible for selection should be reduced from 10 to 3. Secondly, instead of the numbers, to use linguistic variables such as: -extremely dangerous material, -very dangerous material, -dangerous material. To ask experts to assign, by means of brainstorming or voting, particular materials to the sets defined by the above given linguistic variables. Thirdly, it is necessary to replace the linguistic variables by fuzzy numbers with the use of the so called fuzzy values of Baldwin truth, described in . Membership functions of fuzzy numbers corresponding to linguistic variables, hence also materials harmfulness weighing factors, take the following form: This way we obtain weighing factors in the form of three fuzzy numbers with membership functions given in Fig. 1,2 and 3, respectively. Having them in this form we can to substitute them into the formulae (1) and (2) as it is allowed to multiply fuzzy number by real numbers. As a result, we obtain the indices in the form of fuzzy numbers. It makes it possible to achieve index values in the form of "about x "instead of the number"x", and the form of membership functions provides us with information on uncertainty level of the performed assessment. Designers who do not intend to make use of fuzzy numbers, may use values of fuzzified weighing factors "hardened" to the form of real numbers. By applying the calculation method of abscissa of centre of gravity: (4) where: Wo -weighing factor; f(W) -membership function ( Fig. 1 through 3), W -a value on abscissa axis ( Fig.1 through 3) the factors in question take the following values: 1 -for extremely dangerous materials, 0,75 -for very dangerous materials and 0,67 -for dangerous materials. SUMMARY The issue of scrapping the ship after its withdrawal from service should be taken into account already during ship design stage. It should be strived after use of possibly small amount of dangerous materials, ensure as large as possible application of materials which would be suitable for recycling, as well as apply modular system to ship power plant, which would facilitate disassembling the ship. Environmental hazard connected with ship scrapping should be estimated already in ship design stage by using the proposed indices based on the compulsory list of dangerous materials and which constitute a proposal for supplementing the rules.
<gh_stars>10-100 package petablox.android.analyses; import petablox.analyses.alias.Ctxt; import petablox.project.ClassicProject; import petablox.project.analyses.JavaAnalysis; import petablox.project.analyses.ProgramRel; import soot.SootMethod; import petablox.bddbddb.Rel.RelView; import petablox.project.Petablox; import petablox.util.tuple.object.Pair; import petablox.util.tuple.object.Trio; @Petablox(name = "context-label-java", consumes = { "InLabelArg", "InLabelRet", "OutLabelArg", "OutLabelRet", "CM" }, produces = { "CLbl", "CCL", "LCL" }, namesOfTypes = { "CLbl" }, types = { DomCLbl.class }, namesOfSigns = { "CCL", "LCL" }, signs = { "C0,CLbl0:C0_CLbl0", "Lbl0,CLbl0:Lbl0_CLbl0" } ) public class ContextLabelAnalysis extends JavaAnalysis { private ProgramRel relCM; private DomCLbl domCL; public void run() { domCL = (DomCLbl) ClassicProject.g().getTrgt("CLbl"); relCM = (ProgramRel) ClassicProject.g().getTrgt("CM"); relCM.load(); CL(); relCM.close(); ProgramRel relCCL = (ProgramRel) ClassicProject.g().getTrgt("CCL"); relCCL.zero(); ProgramRel relLCL = (ProgramRel) ClassicProject.g().getTrgt("LCL"); relLCL.zero(); int numCL = domCL.size(); for(int clIdx = 0; clIdx < numCL; clIdx++){ Pair<String,Ctxt> pair = (Pair<String,Ctxt>) domCL.get(clIdx); String label = pair.val0; Ctxt ctxt = pair.val1; relCCL.add(ctxt, pair); relLCL.add(label, pair); } relCCL.save(); relLCL.save(); } private void processLabelArg(String relName) { ProgramRel relLabelArg = (ProgramRel) ClassicProject.g().getTrgt(relName); relLabelArg.load(); Iterable<Trio<String,SootMethod,Integer>> it1 = relLabelArg.getAry3ValTuples(); for(Trio<String,SootMethod,Integer> trio : it1) { String label = trio.val0; SootMethod meth = trio.val1; for(Ctxt ctxt : getContexts(meth)){ domCL.getOrAdd(new Pair(label,ctxt)); } } relLabelArg.close(); } private void processLabelRet(String relName) { ProgramRel relLabelRet = (ProgramRel) ClassicProject.g().getTrgt(relName); relLabelRet.load(); Iterable<Pair<String,SootMethod>> it2 = relLabelRet.getAry2ValTuples(); for(Pair<String,SootMethod> pair : it2) { String label = pair.val0; SootMethod meth = pair.val1; for(Ctxt ctxt : getContexts(meth)){ domCL.getOrAdd(new Pair(label,ctxt)); } } relLabelRet.close(); } private void CL() { processLabelArg("InLabelArg"); processLabelArg("OutLabelArg"); processLabelRet("InLabelRet"); processLabelRet("OutLabelRet"); domCL.save(); System.out.println("PRT print domCL: "+domCL.toString()); } private Iterable<Ctxt> getContexts(SootMethod meth) { RelView view = relCM.getView(); view.selectAndDelete(1, meth); return view.getAry1ValTuples(); } }
#include<bits/stdc++.h> #define f(i,t) for(int i=0;i<t;i++) #define ll long long using namespace std; int main(){ll t,s;cin>>t>>s;ll p[t]; f(i,t){cin>>p[i]; }ll k,sum=0,b1[s],b2[s]; f(g,s){cin>>b1[g]>>b2[g]; k=min(p[b1[g]-1],p[b2[g]-1]);sum=sum+k; }cout<<sum<<endl;}
<reponame>victorkurauchi/react-native-skeleton<filename>src/store/modal/actions.ts import * as types from '@/store/modal/types'; import { noPayload, forwardPayload } from '@/utils/actionHelpers'; export const openModal = forwardPayload<types.ModalConfig>(types.OPEN_MODAL); export const setConfirmButtonEnabled = forwardPayload<boolean>(types.SET_CONFIRM_BUTTON_ENABLED); export const closeModal = noPayload(types.CLOSE_MODAL); export const setLoading = forwardPayload<boolean>(types.SET_LOADING);
Also new: Geofencing, integration with Nest and Honeywell Wi-Fi thermostats Lutron is adding new features and new dimmer and keypad options to its RadioRA 2 and HomeWorks QS series of custom-installed lighting-control systems, including remote access from a smartphone or tablet. The company is also expanding the functionality of its iOS and Android control apps. The new keypad options consist of a designer-oriented Palladiom keypad; a new Grafik T dimmer that is now phase-selectable for use with almost all types of lighting loads; and a new seeTouch C.L. hybrid keypad that adds LED-light-control ability. The Grafik T product series also gets its first hybrid keypad, providing local-zone dimming as well as whole-home scene-based control. New app-control capabilities for both systems include geo-fencing and Apple Watch notification. All new keypads and features will be available in early 2016. Here’s what’s coming: Lutron Connect App, Connect Bridge: The company’s new $299-suggested Connect Bridge, which now connects to the cloud, and the new Lutron Connect App will add such new features as free remote access, geo-fencing, support for Apple and Android widgets, and the like. With AppleWatch connectivity, users will be able to make changes to, and receive alerts from, RadioRA 2 or HomeWorks QS system from a remote location. With geo-fencing, lighting and shade scenes and thermostat changes will be triggered from a smartphone when the systems detect that a user is within a select distance from home. With support for Apple and Android widgets, users will get quick access to the Lutron Connect app without unlocking their phone. Also new is integration with Nest and Honeywell Wi-Fi thermostat support, enabling control of the thermostats through Lutron’s systems. The app will also display the battery status of Lutron’s Radio Powr Savr occupancy sensors, Serena battery-powered shades, and the Pico wireless remote control. Keypads: The Palladiom keypad, starting at $300 depending on finish, connects to a HomeWorks QS system and offers a new aesthetic with minimalist design said to complement any décor. It also features buttons and faceplates made of the same material, whether plastic, glass or metal. The keypads feature large, tactile buttons with backlit, engraved text and backlighting that changes intensity automatically with ambient light conditions. The prices are $300 for plastic finishes, $400 for glass finishes, and $500 for metal finishes. The new $299 Grafik T phase-selectable dimmer, intended for RadioRA 2 and HomeWorks QS, works with incandescent halogen lights like the previous version but adds ability to work with almost any load type, including LED lights. It maintains a minimalist design and a faceplate that appears to float off the wall. It lacks knobs, buttons, or sliders. Users touch anywhere along the dimmer’s LED light bar to set lights to the right level. Also new: the seeTouch C.L. hybrid dimmer/keypad, priced at $399 for RadioRA 2 and $499 for HomeWorks QS. The keypad replaces a light switch and offers local-zone dimming while also functioning as a keypad that enables whole-home scene-based control from a single device. The new model adds ability to control LED loads while maintaining custom-engraved buttons with backlighting. The $549 Grafik T hybrid keypad, compatible with RadioRA2 and HomeWorks QS, is the first hybrid keypad in the Grafik line, providing local-zone dimming and whole-home scene-based control. Key features include compatibility with many LED loads, engraved back-lit text, and dynamic back-light management to ensure the correct backlight intensity based on ambient light conditions. It will be available with two, four, five or six fully programmable buttons in several finishes, including metal and glass. New accessory devices let installers gang Palladiom and Grafik T dimmers. The devices include 15-amp and 20-amp duplex receptacles and USB and GFCI receptacles.
/* { dg-do compile } */ /* { dg-options "-fdump-tree-phiopt-details -ffat-lto-objects isa>=4" } */ /* { dg-skip-if "code quality test" { *-*-* } { "-O0" "-O1" } { "" } } */ /* This is testing for errors which can only happen in assembly generation. dg-error does not guarantee assembly generation, so we need to do it manually by using -ffat-lto-objects. */ typedef struct s { int v; int b; struct s *l; struct s *r; } S; /* Test requires conditional moves. */ NOMIPS16 int foo(S *s) { S *this; S *next; this = s; if (this->b) next = this->l; else next = this->r; return next->v; } /* { dg-final { scan-tree-dump "Hoisting adjacent loads" "phiopt1" } } */
import { cloneDeep } from 'lodash'; import { reducerTester } from '../../../../test/core/redux/reducerTester'; import { getVariableTestContext } from '../state/helpers'; import { VariablesState } from '../state/types'; import { IntervalVariableModel } from '../types'; import { toVariablePayload } from '../utils'; import { createIntervalVariableAdapter } from './adapter'; import { createIntervalOptions, intervalVariableReducer } from './reducer'; describe('intervalVariableReducer', () => { const adapter = createIntervalVariableAdapter(); describe('when createIntervalOptions is dispatched', () => { describe('and auto is false', () => { it('then state should be correct', () => { const id = '0'; const query = '1s,1m,1h,1d'; const auto = false; const { initialState } = getVariableTestContext<IntervalVariableModel>(adapter, { id, query, auto }); const payload = toVariablePayload({ id: '0', type: 'interval' }); reducerTester<VariablesState>() .givenReducer(intervalVariableReducer, cloneDeep(initialState)) .whenActionIsDispatched(createIntervalOptions(payload)) .thenStateShouldEqual({ '0': { ...initialState['0'], id: '0', query: '1s,1m,1h,1d', auto: false, options: [ { text: '1s', value: '1s', selected: false }, { text: '1m', value: '1m', selected: false }, { text: '1h', value: '1h', selected: false }, { text: '1d', value: '1d', selected: false }, ], } as IntervalVariableModel, }); }); }); describe('and auto is true', () => { it('then state should be correct', () => { const id = '0'; const query = '1s,1m,1h,1d'; const auto = true; const { initialState } = getVariableTestContext<IntervalVariableModel>(adapter, { id, query, auto }); const payload = toVariablePayload({ id: '0', type: 'interval' }); reducerTester<VariablesState>() .givenReducer(intervalVariableReducer, cloneDeep(initialState)) .whenActionIsDispatched(createIntervalOptions(payload)) .thenStateShouldEqual({ '0': { ...initialState['0'], id: '0', query: '1s,1m,1h,1d', auto: true, options: [ { text: 'auto', value: '$__auto_interval_0', selected: false }, { text: '1s', value: '1s', selected: false }, { text: '1m', value: '1m', selected: false }, { text: '1h', value: '1h', selected: false }, { text: '1d', value: '1d', selected: false }, ], } as IntervalVariableModel, }); }); }); describe('and query contains "', () => { it('then state should be correct', () => { const id = '0'; const query = '"kalle, anka","donald, duck"'; const auto = false; const { initialState } = getVariableTestContext<IntervalVariableModel>(adapter, { id, query, auto }); const payload = toVariablePayload({ id: '0', type: 'interval' }); reducerTester<VariablesState>() .givenReducer(intervalVariableReducer, cloneDeep(initialState)) .whenActionIsDispatched(createIntervalOptions(payload)) .thenStateShouldEqual({ '0': { ...initialState['0'], id: '0', query: '"kalle, anka","donald, duck"', auto: false, options: [ { text: 'kalle, anka', value: 'kalle, anka', selected: false }, { text: 'donald, duck', value: 'donald, duck', selected: false }, ], } as IntervalVariableModel, }); }); }); describe("and query contains '", () => { it('then state should be correct', () => { const id = '0'; const query = "'kalle, anka','donald, duck'"; const auto = false; const { initialState } = getVariableTestContext<IntervalVariableModel>(adapter, { id, query, auto }); const payload = toVariablePayload({ id: '0', type: 'interval' }); reducerTester<VariablesState>() .givenReducer(intervalVariableReducer, cloneDeep(initialState)) .whenActionIsDispatched(createIntervalOptions(payload)) .thenStateShouldEqual({ '0': { ...initialState['0'], id: '0', query: "'kalle, anka','donald, duck'", auto: false, options: [ { text: 'kalle, anka', value: 'kalle, anka', selected: false }, { text: 'donald, duck', value: 'donald, duck', selected: false }, ], } as IntervalVariableModel, }); }); }); }); });
package oauth import ( "encoding/json" "errors" "fmt" "io/ioutil" "os" "github.com/dotenx/dotenx/ao-api/models" "github.com/dotenx/dotenx/ao-api/oauth/provider" "github.com/dotenx/dotenx/ao-api/pkg/utils" "github.com/dotenx/goth" ) var providers []models.OauthProvider var gothProviders map[string]*goth.Provider var gothNotSupported []string func init() { gothNotSupported = append(gothNotSupported, "typeform") jsonFile, err := os.Open("providers.json") if err != nil { fmt.Println(err) } defer jsonFile.Close() byteValue, _ := ioutil.ReadAll(jsonFile) err = json.Unmarshal(byteValue, &providers) if err != nil { fmt.Println(err) } fmt.Println("############") fmt.Println(providers) fmt.Println("############") } // GetProviders returns a slice of providers formed from the corresponding config section func GetProviders(cbURIBase string) (map[string]*goth.Provider, error) { gothProviders = make(map[string]*goth.Provider) if providers == nil { return gothProviders, nil } for _, v := range providers { if utils.ContainsString(gothNotSupported, v.Name) { continue } uri := cbURIBase + v.Name p, err := provider.New(v.Name, &v.Secret, &v.Key, uri, v.Scopes...) if err != nil { return gothProviders, err } gothProviders[v.Name] = p } return gothProviders, nil } func GetProviderByName(name string) (*goth.Provider, error) { p, ok := gothProviders[name] if !ok { return nil, errors.New("Provider not found") } return p, nil } func GetProviderModelByName(name string) (*models.OauthProvider, error) { for _, v := range providers { if v.Name == name { return &v, nil } } return nil, errors.New("provider not found") } func GetProvidersMap() map[string]models.OauthProvider { res := make(map[string]models.OauthProvider) for _, p := range providers { res[p.Name] = p } return res }
<reponame>LiudasRepkovas/baigiamasisnemokami import { Meteor } from 'meteor/meteor'; import { Counts } from 'meteor/tmeasday:publish-counts'; import { Items } from '../../../both/collections/items.collection'; Meteor.publish('locations', function(query, options) { Counts.publish(this, 'numberOfLocations', Items.collection.find({}), { noReady: true }); return Items.find({}); });
Army civilian police Capt. Andrew Poulos Jr. helped bust a counterfeiter last year who produced templates for federal law enforcement credentials and sold them over the Internet. Then he launched a second investigation, using the fake credentials and fraudulent badges to penetrate security at two federal courthouses, three state buildings and six military installations. The Army gave him a commendation and a cash bonus and published his findings in a terrorism bulletin to other federal agencies. "Keep up the great work to keep our Armed Forces safe and our posts secure," Denis P. McGowan of the Federal Protective Service, which is responsible for security at federal facilities, wrote in an e-mail. But now the investigator is being investigated. The U.S. Marshals Service, which is responsible for security at federal courthouses, complained to the Army that Poulos's investigation was "inappropriate, impermissible, and not taken lightly," according to e-mails obtained by The Washington Post. The Army Criminal Investigation Command, which had previously highlighted Poulos's work, then launched a criminal investigation of the captain. In January, Poulos, 33, was relieved of his command and stripped of his national security clearance. Investigators searched his office, forcing his door open. "This is all because the captain embarrassed the U.S. Marshals Service with his findings, which show a critical weakness in our national security," said a law enforcement source close to the investigation who spoke on the condition of anonymity because he feared losing his job. An official with the Marshals Service said he alerted the Army because the police captain's action could have resulted in "deadly" consequences. "The unauthorized entry of a court facility while carrying a weapon and impersonating law enforcement could be a violation of rules, regulations, policy or law," Michael J. Prout, assistant director for judicial security in the Marshals Service, said in a telephone interview. "For that purpose, once this was disclosed, the Marshals Service advised the U.S. Army of this incident and requested they examine it." "The Marshals Service is not embarrassed," Prout said. "The Marshals Service is concerned about the security of its facilities." Sources close to the investigation say Poulos's work was authorized by his superior, John A. Hazel, director of emergency services at Fort Monmouth, N.J. Hazel declined to comment.
Lamon Reccord, right, stares and yells at a Chicago police officer "Shoot me 16 times" as he and others march through Chicago's Loop Wednesday, Nov. 25, 2015, one day after murder charges were brought against police officer Jason Van Dyke in the killing of 17-year-old Laquan McDonald. (AP Photo/Charles Rex Arbogast) The Associated Press By DON BABWIN and SOPHIA TAREEN, Associated Press CHICAGO (AP) — Small groups of demonstrators gathered throughout the day Wednesday to protest the death of a black teen shot 16 times by a white police officer, and they urged supporters to join them in trying to shut down Chicago's famous Michigan Avenue shopping district during the Black Friday shopping bonanza. About two dozen protesters gathered outside Mayor Rahm Emanuel's office a day after authorities released the graphic video, footage that President Barack Obama said "deeply disturbed" him. The video shows officer Jason Van Dyke, who was charged Tuesday with first-degree murder, firing an entire magazine into 17-year-old Laquan McDonald. The protesters held banners showing photos of other black people fatally shot by police in Chicago and elsewhere. Several said they were parents of black men killed by Chicago officers. "You cannot kill our children and expect us to be quiet any longer," protester Quovadis Green said. "It is unacceptable." Activist Mark Carter called on people to "rise up" and shut down the Magnificent Mile shopping area on Friday. Protesters also planned to target the Board of Trade and other landmarks in the coming days, he said. Carter and others want the Department of Justice to investigate the Chicago Police Department and its history of covering up bad behavior. The Urban League of Chicago joined in the call for a federal investigation, alleging a pattern of "discriminatory harassment" against black people. The Rev. Jesse Jackson said other officers involved in McDonald's death should be fired or at least suspended. He also wants a special prosecutor appointed to the case, complaining that Cook County State's Attorney Anita Alvarez took too long to bring a murder charge in the shooting, which happened more than a year ago. Obama said in a statement Wednesday night he was "deeply disturbed" by the video footage. In a Facebook post, the president said he is asking Americans to "keep those who've suffered tragic loss in our thoughts and prayers" this Thanksgiving "and to be thankful for the overwhelming majority of men and women in uniform who protect our communities with honor." Obama said he is personally grateful to the people of his hometown — Chicago — for keeping protests peaceful. Democratic presidential candidate Hillary Rodham Clinton also weighed in, saying McDonald's family and Chicago residents "deserve justice and accountability." Clinton, who made the comments Wednesday in an emailed statement, added that police officers across the country are doing their duty honorably "without resorting to unnecessary force." One of Clinton's rivals, Sen. Bernie Sanders, said in his own statement that all Americans "should be sickened" by the video. For months, Chicago leaders had feared that the release of the video could provoke the kind of turmoil that rocked cities such as Baltimore and Ferguson, Missouri, after young black men were slain by police or died in police custody. Van Dyke was the subject of 18 civilian complaints over 14 years, including allegations that he used racial epithets and excessive force, police and court records show. Complaints against police are not uncommon. But the number filed against Van Dyke was high compared with other officers. At least one person he arrested was later awarded $350,000 in damages in a lawsuit. Van Dyke's lawyer, Daniel Herbert, did not return a message left Wednesday by The Associated Press. Also Wednesday, a Cook County judge dismissed a charge against a protester accused of hitting a police officer in the hours after the video was made public. Judge Peggy Chiampas said the state's attorney's office recommended dropping the charge against 22-year-old Malcolm London and told London he was free to go. London, who was wearing a T-shirt with the phrase "Unapologetically black" on it, walked outside the courthouse to loud cheers. Prosecutors did not explain why they sought to dismiss the charge. London was among five people arrested on charges that included weapons possession and resisting arrest. At least two people were detained amid protests late Wednesday when demonstrators disrupted traffic in the city's financial district. On the South Side, about 100 people marched about a half mile to Chicago police headquarters after a community meeting. Crowds remained on Michigan Avenue late into the night, marching and chanting. Meanwhile, in Minneapolis on Wednesday afternoon, hundreds of people filled a church to pay their respects to a man whose death in an unrelated confrontation with police sparked more than a week of ongoing protests. A picture of a smiling Jamar Clark adorned the program for his funeral. The relevant portion of the Chicago video runs for less than 40 seconds and has no sound. McDonald swings into view on a four-lane street where police vehicles are stopped in the middle of the roadway. As he jogs down an empty lane, he appears to pull up his pants and then slows to a brisk walk, veering away from two officers who are emerging from a vehicle and drawing their guns. Almost immediately, one of the officers appears to fire from close range. McDonald spins around and collapses on the pavement. The car with the camera continues to roll forward until the officers are out of the frame. Then McDonald can be seen lying on the ground, moving occasionally. At least two small puffs of smoke are seen coming off his body as the officer continues firing. In the final moments, an officer kicks something out of McDonald's hands. Police have said the teen had a knife. Alvarez said Tuesday that a 3-inch knife with its blade folded into the handle was recovered from the scene. ___ Associated Press writers Sara Burnett, Jason Keyser and Michael Tarm contributed to this report.
def load_derived_data(filename, train_test_split_date, dropna=True, filter_counter_gradient=False): all_data = pd.read_csv(filename, index_col="Time", parse_dates=["Time"]) if dropna: all_data = all_data.dropna() if filter_counter_gradient: all_data = filter_counter_gradient_data(all_data) data = dict() data["train"] = all_data.loc[all_data.index < pd.Timestamp(train_test_split_date)] data["test"] = all_data.loc[all_data.index >= pd.Timestamp(train_test_split_date)] return data
<filename>src/Icon/Trademark.tsx /* tslint:disable */ import * as React from 'react'; import styled from 'styled-components'; import { SVGIcon, IconSpacing } from '../mixins/SVGIcon'; import IconProps from '../interfaces/IconProps'; const SvgTrademark = (props: IconProps) => ( <svg viewBox="0 0 640 512" {...props}> <path d="M260.6 96H12c-6.6 0-12 5.4-12 12v43.1c0 6.6 5.4 12 12 12h85.1V404c0 6.6 5.4 12 12 12h54.3c6.6 0 12-5.4 12-12V163.1h85.1c6.6 0 12-5.4 12-12V108c.1-6.6-5.3-12-11.9-12zM640 403l-24-296c-.5-6.2-5.7-11-12-11h-65.4c-5.1 0-9.7 3.3-11.3 8.1l-43.8 127.1c-7.2 20.6-16.1 52.8-16.1 52.8h-.9s-8.9-32.2-16.1-52.8l-43.8-127.1c-1.7-4.8-6.2-8.1-11.3-8.1h-65.4c-6.2 0-11.4 4.8-12 11l-24.4 296c-.6 7 4.9 13 12 13H360c6.3 0 11.5-4.9 12-11.2l9.1-132.9c1.8-24.2 0-53.7 0-53.7h.9s10.7 33.6 17.9 53.7l30.7 84.7c1.7 4.7 6.2 7.9 11.3 7.9h50.3c5.1 0 9.6-3.2 11.3-7.9l30.7-84.7c7.2-20.1 17.9-53.7 17.9-53.7h.9s-1.8 29.5 0 53.7l9.1 132.9c.4 6.3 5.7 11.2 12 11.2H628c7 0 12.5-6 12-13z" /> </svg> ); export default styled(SvgTrademark)` ${SVGIcon}; ${IconSpacing}; `;
// ReadStream implements FileServiceServer.ReadStream func (service *fileService) ReadStream(req *pb.ReadStreamRequest, stream pb.FileService_ReadStreamServer) error { chunkSize := req.GetChunkSize() if chunkSize <= 0 { return rpctypes.ErrGRPCInvalidChunkSize } reader, writer := io.Pipe() ctx := stream.Context() group, ctx := errgroup.WithContext(ctx) group.Go(func() (err error) { defer func() { e := writer.Close() if e != nil { if err == nil { err = e } log.Errorf("error while closing (*fileService).ReadStream's PipeWriter: %v", e) } }() switch v := req.GetInput().(type) { case *pb.ReadStreamRequest_Key: if len(v.Key) == 0 { return rpctypes.ErrGRPCNilKey } var metadata *metatypes.Metadata metadata, err = service.metaClient.GetMetadata(v.Key) if err != nil { return mapZstorError(err) } return service.client.Read(*metadata, writer) case *pb.ReadStreamRequest_Metadata: if v.Metadata == nil { return rpctypes.ErrGRPCNilMetadata } metadata := convertProtoToInMemoryMetadata(v.Metadata) return service.client.Read(metadata, writer) default: return rpctypes.ErrGRPCNilKey } }) group.Go(func() error { var ( n int err error buf = make([]byte, chunkSize) ) for { n, err = reader.Read(buf) if err != nil { if err == io.EOF { return nil } return err } err = stream.Send(&pb.ReadStreamResponse{DataChunk: buf[:n]}) if err != nil { return err } } }) err := group.Wait() if err != nil { return mapZstorError(err) } return nil }
<filename>fhi_unet.py import os import sys import cv2 import numpy as np import tensorflow as tf import unet_utils as utils class UNET(): def __init__(self, model_dir): model_dir = os.path.join(os.getcwd(), model_dir) self.model_path =os.path.join(model_dir, 'trained_model.ckpt') def initialize(self): self.sess = tf.Session() # tf.Session() meta_path = self.model_path + '.meta' model_loader = tf.train.import_meta_graph(meta_path) model_loader.restore(self.sess, self.model_path) graph = tf.get_default_graph() self.tf_x = graph.get_tensor_by_name('tf_x:0') self.tf_predict = graph.get_tensor_by_name('predict:0') self.is_training = graph.get_tensor_by_name('is_training:0') ''' try: self.is_training = graph.get_tensor_by_name('is_training_1:0') except KeyError as e: print("[warn] %s -> is_training_1 not found. Try to Find is_training ..." % e) ''' def detect(self, img): height, width = self.tf_x.get_shape().as_list()[1:3] image = utils.cv_letterbox(img, height, width) image_in = np.expand_dims(np.float32(image) / 255 - 0.5, axis=0) predict_mask = self.sess.run(self.tf_predict, feed_dict={self.tf_x: image_in, self.is_training: False}) predict_mask = np.uint8(np.clip(predict_mask[0, ..., 0] + 0.5, 0., 1.) * 255) #predict_mask = cv2.cvtColor(predict_mask, cv2.COLOR_GRAY2BGR) return predict_mask def close_session(self): self.sess.close() if __name__ == '__main__': img_path = os.path.join(os.getcwd(), r'test_data\IMG_1222_0_1.jpg') img = cv2.imread(img_path) un = UNET() un.initialize() un.detect(img)
Note, these maps are designed to depict a broad overview of the current field, and are considered to be more qualitative than quantitative. In particular, the specific coordinates are not very accurate. Please use the GLOS Point-Query Tool to generate a timeseries of modeled currents at a particular location, http://data.glos.us/glcfs/. Click on map to zoom; click and drag to pan. This animation may not work on all browsers.The flow patterns depicted in these visualizations of lake currents are based on simulations from the Great Lakes Coastal Forecasting System operated by NOAA's Great Lakes Environmental Research Laboratory The "Latest" and "3hrs Previous" visualizations depict water motion corresponding to aof lake currents at the present time and three hours previous to the present time. Lake currents can change rapidly with changing wind conditions.Surface currents tend to follow the wind direction more closely than currents at depth. Depth-averaged currents represent the average water motion from surface to bottom and tend to follow shoreline and bottom contours.This map uses copyrighted technology developed by Fernanda Viegas and Martin Wattenberg ( hint.fm ) for their stunning wind map application . They have graciously allowed us to show visualization of lake currents here until license issues are resolved.Data is acquired from from the GLOS THREDDS Data Server via OPeNDAP using python scripts (courtesy of Rich Signell ).Please send feedback and inquiries to: Eric Anderson
Lymphocytes stimulated with recombinant human interleukin-2: relationship between motility into protein matrix and in vivo localization in normal and neoplastic tissues of mice. Murine splenic lymphocytes nonspecifically stimulated with recombinant human interleukin-2 in vitro were fractionated according to their ability to migrate through type I collagen gel during a 24-hour period. After labeling with 111In and adoptive transfer into hosts of subcutaneous mammary tumors, motile fractions exhibited approximately twofold greater tumor localization and approximately twofold lower lung localization than nonmotile fractions. The results suggest that lymphocyte properties associated with motility through extracellular matrix also influence patterns of lymphocyte localization in vitro. It should be possible to identify these properties by comparative analysis of motile and nonmotile fractions obtained from various matrices.
// NewIntakeReverseProxy returns the AppSec Intake Proxy handler according to // the agent configuration. func NewIntakeReverseProxy(transport http.RoundTripper) (http.Handler, error) { disabled := func(reason string) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Add("Content-Type", "application/json") w.WriteHeader(http.StatusMethodNotAllowed) if err := json.NewEncoder(w).Encode(reason); err != nil { log.Error(err) } }) } cfg, err := newConfig(coreconfig.Datadog) if err != nil { return disabled(fmt.Sprintf("appsec agent disabled due to a configuration error: %v", err)), errors.Wrap(err, "configuration: ") } if !cfg.Enabled { log.Info("AppSec proxy disabled by configuration") return disabled("appsec agent disabled by configuration"), nil } return newIntakeReverseProxy(cfg.IntakeURL, cfg.APIKey, cfg.MaxPayloadSize, transport), nil }
<filename>seamm_dashboard/routes/api/roles.py<gh_stars>1-10 """ API endpoint for groups """ from flask_jwt_extended import jwt_required from seamm_dashboard import authorize from seamm_datastore.database.models import Role from seamm_datastore.database.schema import RoleSchema @jwt_required(optional=True) @authorize.has_role("admin", "group manager") def get_roles(): roles = Role.query.all() role_schema = RoleSchema(many=True) roles = role_schema.dump(roles) return roles, 200
def classify(self): size = self.train_x.shape[0] for i in range(size): pt = self.train_x[i] final_c = -1; dist = float('inf') for j, c_pt in enumerate(self.centers): cur_dist = np.linalg.norm(pt - c_pt) if cur_dist < dist: dist = cur_dist final_c = j self.classification[i] = final_c
/** * Inserts a TupleBatch into the SQLite database. * * @param sqliteInfo SQLite connection information * @param relationKey the table to insert into. * @param tupleBatch TupleBatch that contains the data to be inserted. * @throws DbException if there is an error in the database. */ public static synchronized void tupleBatchInsert( final SQLiteInfo sqliteInfo, final RelationKey relationKey, final TupleBatch tupleBatch) throws DbException { SQLiteAccessMethod sqliteAccessMethod = null; try { sqliteAccessMethod = new SQLiteAccessMethod(sqliteInfo, false); sqliteAccessMethod.tupleBatchInsert(relationKey, tupleBatch); } catch (DbException e) { throw e; } finally { if (sqliteAccessMethod != null) { sqliteAccessMethod.close(); } } }
/** Deletes a node following the given node. @param before Node before the node to be deleted */ public void deleteAfter(T before) { T n,nn; if (before!=null) { n=getNextLink(before); if (n==null) return; nn=getNextLink(n); setNextLink(before,nn); } else { n=head; if (n==null) return; head=nn=getNextLink(n); } if (nn==null) tail=before; count--; }
/** * A collection of common objects such as namespaces, dictionaries used * in the GamessUS system * @author pm286 * */ public class GamessUSXCommon extends AbstractCommon { @SuppressWarnings("unused") private final static Logger LOG = Logger.getLogger(GamessUSXCommon.class); public static final String GAMESSUS_PREFIX = "gamessus"; public static final String GAMESSUS_URI = "http://wwmm.ch.cam.ac.uk/dict/gamessus"; public static final String NSERCH = "NSERCH"; public static final String STEP = "STEP"; public static final String RESULTS = "RESULTS"; public static final String KEYWORD = " $"; public static final String END = " $END"; public static final String NCYC = "ncyc"; public static final String GAMESSUS_LOG = "gamesuss_log"; public static final String GAMESSUS_LOG_XML = "gamess_log_xml"; public static final String GAMESSUS_LOG_CML = "gamess_log_cml"; public static final String PUNCH = "gamessus_punch"; public static final String PUNCH_XML = "gamessus_punch_xml"; protected String getDictionaryResource() { return "org/xmlcml/cml/converters/compchem/gamessus/gamessusDict.xml"; } public String getPrefix() { return GAMESSUS_PREFIX; } public String getNamespace() { return GAMESSUS_URI; } }
#include <mirrage/renderer/object_router.hpp> namespace mirrage::renderer { namespace { auto norm_plane(glm::vec4 p) { return p / glm::length(glm::vec3(p.x, p.y, p.z)); } auto extract_planes(const glm::mat4& cam_view_proj) -> detail::Frustum_planes { return { norm_plane(row(cam_view_proj, 3) + row(cam_view_proj, 0)), // left norm_plane(row(cam_view_proj, 3) - row(cam_view_proj, 0)), // right norm_plane(row(cam_view_proj, 3) - row(cam_view_proj, 1)), // top norm_plane(row(cam_view_proj, 3) + row(cam_view_proj, 1)), // bottom norm_plane(row(cam_view_proj, 3) + row(cam_view_proj, 2)), // near norm_plane(row(cam_view_proj, 3) - row(cam_view_proj, 2)) // far }; } } // namespace Object_router_base::~Object_router_base() = default; auto Object_router_base::add_viewer(const glm::mat4& view_proj, bool is_camera) -> Culling_mask { _frustums.emplace_back(extract_planes(view_proj), is_camera); return Culling_mask(1) << (_frustums.size() - 1); } } // namespace mirrage::renderer
Gender and Gender Mainstreaming In Engineering Education in Africa : In Africa, a lot of debates on the issues of gender gap and gender inequality has raised concerns in engineering education (EE) and engineering workforce. Thus, gender inequality and equity are significant in realizing Sustainable Development Goals (SDGs), and in recent years much has been done to address gender gaps, yet women are still excluded, under-represented, segregated and relegated inengineering profession and academia. With much sensitization on gender equality, Africa is still far from addressing gender gaps in EE; hence the crux of this paper. This paper was guided by Liberal Feminism theory, focusing on women’s freedom as an autonomy to be free from coercive interference, due to‘gender system’ or patriarchal nature of inherited traditions and institutions. This paper takes a broad look at the concepts of gender and gender mainstreaming in EE in Africa. Specifically, it explores gender and inequality in EE and how gender mainstreaming canbe enacted to address gender gaps in EE, as well as its implications in Africa. Thus, to address these gaps, recommendations such as developing gender-sensitive curriculum for EE, adopting policies in facilitating women’s access to training and employment opportunities as well as creating gender-sensitive career counselling were advocated. INTRODUCTION The concept of gender mainstreaming is interpreted differently which result into an important level of misperception within various disciplines. The evolution of this concept was dated back to the clamouring for gender equality and equity from the 1980"s United Nations Decade for women and from there the fourth World conference on women was held in Beijing in 1995 that pushed for gender mainstreaming globally (UN, 1995). The Beijing Platform for gender action conveyed the need for mainstreaming gender in all sub-divisions focusing on areas such as political, economic and social sphere. The vital purpose of the Beijing Platform for gender action was to sensitize and implement gender equality policies and programmes, where men and women should benefit from the programmes equally and inequality will not be continued (UN, 1995). In year 2000, the concept of gender mainstreaming was reinforced in the Economic and Social Council (ECOSOC) agreed conclusions (1997/2 & the 23rd Special Session of the United Nations General Assembly. It got approval in the Gender and Development (GAD) dialogue, as it adopts a transformatory process and practice that seeks to engage and serve as an advantage to both male and female through a systematic combination of obvious attention to sex and gender issues within workforce(UNESCO, 2018). The interests of gender mainstreaming can be incorporated in the evaluation of issues faced by a particular sex, informing policiesthat will address gender gap. Such an approach involves agenda setting characterized by monitoring and evaluation tools with the support of frameworks. Thus, this approach is often adopted in addressing gender issues as it centred on existing development agenda by making out for change amid gender inequality issues (Wang and Degol, 2015). In Africa, the inclusion of gender perspective in engineering education (EE) involves teaching of engineering as important gender initiatives, that addresses gender disparities in engineering fields. This allow engineering students to have a better insight in matters that concerns gender and how gender mainstreaming can be applied to the content of engineering subjects, with the likelihood to positively affect engineering students directly or indirectly (USAID, 2015). Hence, it is unusual for engineering students to be affected differently by technological developments by being gender sensitive. Importantly, it becomes imperative for engineering academia to avoid gender biases on the content ofEE curriculum and programmes, as EE modules do not consciously identify gender differences or possible omissions in relation to gender, sex (in biological) or social and cultural aspects (exploring gender)(UNESCO, 2018a). This may produce outputs based on gender stereotypes or masculinity patterns and such interests will tend towards oversimplification as if it was germane to the engineering profession. Mainstreaming gender is very key in engineering education as it improves the quality of instructional materials, revealing the social relevance of the resulting understanding and innovations that will enhance impact of gender equality in Africa engineering institutions (David, 2017;UNESCO, 2017a). This will facilitate an indepth understanding of gender perspective and help in consideringcentral views from engineering academia and industrial professionals. Thus, theoretical conceptualisations and empirical evaluation of gender and gender mainstreaming in EE can not be avoided. But imploring teaching with gender perspective in EE will enhance EE students to think critically in identifying social stereotypes, gender norms and roles (Jones et al., 2013;David, 2017). Hence, engineering students will learn to problematize predominant socialisation patterns and at the same time developing unique skills that will enable them to avoid gender blindness in their future career. Teaching gender perspective inculcated in EE curriculum programmes guarantee that technological impacts are developed by engineers established on scientific proof which buttressthe relevance and quality of gender mainstreaming in EE(OECD, 2012). The gender dimension of skills in EE requires engineering academics and students to be concerned in developing logical thinking along a shared commitment with multiplicity in modern engineering society. As this ranges from respect for fundamental rights to the equality of women and men as well as non-discrimination in engineering academia and industries. Hence, this paper presents a systematic review methodology which explore gender and gender mainstreaming in EE in Africa. The systematic review method adopted in this paper identifies and appraise published articles from year 2013 to 2020 in the fields of Engineering, Education and Sociology systematically. This was to appraise published reviews on the study objectives and to discuss its implications with recommendations. The main objective of this paper is to fill the research gap by contributing to the overall understanding of gender and gender mainstreaming excellence in EE in Africa. Specifically, we explore gender and inequality in EE; and to examine the enactment of gender mainstreaming in EE; as well as its implications for EE in Africa, hence, recommendations were advocated. II. THE RESEARCH GAP Engineering education has an economic and social implication it has on development and growth across the globe, Africa region inclusive. Thus, the education of engineers in the 21st century is beginning to adopt gender perspective in EE, as efforts are needed to address gender gaps in engineering profession (Wang and Degol, 2015). Such efforts include addition of gender perspectives in EE curriculum programmes and learning instructional materials as well as gender skills" development in EE. This aid in raising consciousness in gender and gender mainstreaming in EE in Africa (Morley, 2010).Yet, there is a growing concern on gender gaps that has a perturbing reflection in EE, as gender issues have still remained marginal to what is contemplated to be imperative and critical issues in EE, as debates on gender inequality cannot be secluded from wider engineering workforce milieus. Hence, there is a clear discrepancy and a wide gap in EE interrogating African traditional policies and practices that is associated with marginalization of women in EE and ascribing engineering profession as a male-dominated field(WEF, 2017). The fact remains however, that the debates on gender issues cannot be isolated from disparities in cultural interplay and religious mix-up that controls the general society; hence the crux of this paper. The critical questions are: What is the overall understanding of gender and gender mainstreaming in EE in Africa? How does gender inequality affects EE in Africa? What is the impact of gender mainstreaming enactment on EE in Africa? What can EE do to address this gender gap in order to rebuilding gender equality and equity in engineering profession? What are the implications of gender gaps in EE in Africa? The answer to these questions are critical, urgent and unavoidable by all stakeholders within or outside engineering institutions, hence recommendations were advocated. III. LITERATURE REVIEW The rapid growing impact of engineering field isfundamental to economic growth and development, as gender gap is one of the greatest challenges of the 2030 Agenda for Sustainable Development Goal plaguing EE and industrial profession(UNESCO, 2016; 2017). Therefore, a significant increase in the number of women entering and remaining in engineering careers are needed to achieve SDG progression in modern and technological demanding society. In year 2018, the 70th anniversary of the Universal Declaration of Human Rights was celebrated, in which gender equality and access to science and engineering fields were recognized as human rights(UN, 2020). This was organized to remind individuals that gender equality in science, technology and innovation (STI) is both critical for sustainable development, but primarily a human right. Besides, sustainable development itself requires more engineering science and scientists, who will expatiate on gender perspectives in EE. The 2030 Agenda and its 17 SDGs recognize this obviously, as deliberated upon that if the world needs more scientists in engineering areas, it cannot afford losing half of its population from the scientific and engineering workforce (Aiston and Jung, 2015). That is why gender equality is one of the two global priorities at United Nations Educational, Scientific and Cultural Organization (UNESCO). Gender equality in engineering science and education cannot be achieved without working to overcome gender gaps and disparities in access to, impact on, and benefit of STI(Alblooshi and May, 2018).There are three keypurposes for promoting gender equality in STI. First, is the rights-based, or social justice argument as year 2018 is the 70th anniversary of the Universal Declaration of Human Rights, in which gender equality and access to science and engineering are recognized as human rights(WEF, 2017). Second, sustainable development itself requires more engineering, science and scientists, as the 2030 Agenda and its 17 SDGs recognize that the world needs more scientists and cannot be exempted from the scientific and engineering workforce. Third, sustainable development also requires new and better science, including interdisciplinary science, which has been established and seen where women have contributed in their particular perspectives, approaches and priorities as the outcomes were more varied and relevant (Elu, 2018). Over the past decades, the global community has made significant efforts to inspire and engage women and girls in engineering education and other careers. Yet, despite amazing gains, women and girls continue to be excluded from participating fully in engineering programmes, and progress is uneven(AAS, 2018). While a growing number of women are enrolling in science studies at the university levels, many drop out before reaching the higher levels in their careers. UNESCO Institute for Statistics (2018) revealed that only 28.8% of the world"s researchers are women and only onefourth (27%) of all countries had reached parity in 2016 (UNESCO, 2013; UNESCO, 2018). Therefore, contemporary gender imbalance in engineering field is partly a major consequences of long-term policies established at various levels, in addition to social and cultural factors. The historical background displayspath of women"s increased participation in science and engineering as well as the expansion of the latter since the mid-20th century. However, evidence highlights that women have been proportionally higher in number in early higher educational levels; but women in science and engineering fields were reducing consistently and subsequently through each level of career development, as well as establishing perpendicular discrimination (Strachan et al., 2018). Besides, the 2018 global average proportion of female researchers was 28.8% and only 35% of all students enrolled in Science, Technology, Engineering and Mathematics (STEM)-related fields study were women(Alblooshi and May, 2018; AAS, 2018). Gender parity amongacademics has been reached in Central Asia with 48.1% and Latin America with 45.4% of them were women researchers respectively; while at the national level, 30% of all countries with such available data had reached parity in 2016. Notably, other regions of the globe appear relatively far from reaching parity as only 23.4% of researcher were women in East Asia and the Pacific, and in South and West Asia has a cumulative statistical figure of 18.5%(UNESCO, 2013). In Africa, women scientists have a key role to play in scientific leadership in contributing to the continent"s development and transformation; yet they remain significantly underrepresented in higher education and in science, technology, engineering, and mathematics (STEM)(AAS, 2018). Globally, only 53% of the world"s Bachelor"s and Master"s degree graduates accounted for women and 43% of women were PhD graduates, while 28% of them were reported to be researcher in all fields of discipline. Thus, only 30% of women in higher education have moved into STEM-related field. Comparably, only 30% of researchers in all subject areas were women in sub-Saharan Africa (Aiston and Jung, 2015). For instance, in Cameroon in West Africa region, enrollment in tertiary education was estimated at 20% for men and 15% for women in year 2017, and women only constituted 22% of Cameroonian researchers, while only 7% of academics have rose to the rank of full professor. Generally, an optimistic tendency is conspicuous in countries wheredata were available on female researchers" distribution. At the national level, 30% of all countries with available data on the national share of female researchers had reached parity in 2016(UNESCO, 2017). However, countries that have reached gender parity in terms of researchers are still facing key challenges in accomplishing it in all facets, since perpendicular and parallel discrimination persevere as impediments to women in engineering workforce. Most recent available UIS data show that, out of 87 countries, 24% have more than 45%, the lowest bound for gender parity, female researchers in the fields of natural sciences and engineering, whereas 54% have more than 45% in social sciences and humanities (Strachan et al., 2018). In addition, women occupy only a small minority of top-level positions, despite an improvement in recent years, and only 20 women have been awarded a Nobel prize in scientific discipline to date. Gender as a significant precepts of development were obvious in gender issues across social groups, which has attracted attention in higher learning environment, affecting quality of education in Africa(Fredua-Kwarteng Effah, 2017). Several World conventions has deliberated on gender issues pertaining to women in EE, as they pointed the negative implication of gender gaps on nations" development. Engineering learning institutions play and act as a role model and engineers of gender mainstreaming as well as practicing the considerations of the approved policy documents. Thus, EE is a threshold of future decision and policy makers that have received training and exposed to the principles aligned with gender mainstreaming. Debates on gender mainstreaming in EEwill aid a better understanding ofgender perspectives with more systematic approaches to engineering profession (Strachan et al., 2018). This pose a serious implication on quality of EE focusing on mainstreaming gender to address gender gaps and disparities. Although, change needs to happen in EE in order to build better policy-relevant indicators that will address all aspects of inequality hounding women in engineering profession(UNESCO, 2017). Importantly, policy makers and relevant engineering advisory boardhave to create new approaches to map out21st century gender-related policies in making STI relevant to gender mainstreaming. International organisations have made a lot of efforts and showed commitments in eliminating gender issues and promoting females in EE in Africa (David, 2017). This has assisted in reducing the present global statistics for gender gap in EE, which have increase the visibility, participation and women"s recognition of their contribution to engineering fields, from countries around the world. This strategy will strengthen policy framework that are made to transform gender perspective that are informed from evidence drawn from data collected to monitor and evaluate gender equality progress. Engineering profession have been perceived as male-dominated career as women are not given privilege to access and work in engineering industry. Women"s efforts towards EE might be affected by factors that act as impediments to their accomplishment were environment, culture, religion, and institutional policies in the society (Hills, 2015;Elu, 2018). Gender mainstreaming is applicable form that can be used to close gender gaps in all spheres of life, which involves taking into consideration the gender differences in terms of rights, responsibilities and opportunities, taking into account the different gender needs in EE(USAID, 2015; UNESCO, 2018; 2018a). Thus, gender equality is not advocated as a subject of social justice in EEbut gender mainstreaming has rebuilt its central features that aimed at restructuring gender equality policies. This has an implication on the quality of teaching, learning and research in EE as it is applied to evaluate quality assurance in EE programmes. IV. METHODOLOGICAL APPROACH Systematic literature review was conducted as recommended by Gough et al. (2017) where the author derived formulated review questions, identified relevant studies, selected studies that fit the inclusion criteria, and evaluated the quality of the research studies, as well as summarized the evidence by use of an unequivocal methodology. Based on the study research questions, relevant studies have been identified through reviewed articles archive. The descriptors "gender", "gender in EE", "gender mainstreaming in EE", and "gender mainstreaming enactment in EE" were used to locate key bases. These descriptors were also used in permutation with other descriptors such as "USA", "global", "developed countries", "developing countries" and "Africa" as these are all understatedfactors in EE (Borrego et al., 2015). Several papers(Inês Doreito et al., 2019) were identified as prospective sources of information, however,quality articles who met the following inclusion criteria were reviewed: (1) Published between 2013 and 2020; (2) enumerated gender and inequalityin EE in Africa as one of the lenses for analysis; and (3) examined gender mainstreaming enactment in EE in Africa. Again, the papers were allocated into subsections relating to study objectives and identified types of analytical themes in sampled articles were itemized. In total, there were 32 reviewed articles indicating systematic review articles. Of those 32 articles, only 25 articles clearly that were identified with the theme "gender and gender mainstreaming in EE in Africa" were used. The other seven articles simply specified more of "logicaltheoretical papers". To recap the validation found in these sampled articles, appraisal of the sampled articles was completed, along with adeveloped coding sheet. These identifiedgrouping on the coding sheet involve study"s objectives, research questions, methods, type of data collected, study population, and relevant findings. Also, methods used to evaluate and review published articles scientifically, were drawn from existing studies (Gough et al., 2017; Torres-Carrion et al., 2018;). This was followed by adopting logical research practices and reporting of systematic reviews were explained explicitly. After review, the principal investigator synthesized the preliminary findings and patterns recorded in summaries. The author reviewed these summaries and preliminary findings to guide the final review. After the articles were analysed, the principal investigator developed a typical sampling framework in which the articles were used to outlined the research questions. These illustrative samples were not taken verbatim, but rather synthesized to illustrate how analytical research in EE can reframe to redefine key themes and research questions in gender and gender mainstreaming in EE, as indicated byBrown et al. (2015). In addition, identifying and appraising process of published reviews allows researchers to describe the quality of compiled existing studies, summarize and compare the conclusions of the reviews as well as discussing the implications and recommendations of the reviews (Gough et al., 2017). The principles of systematic review methodology were emphasized in the studies to explore how enactment of gender mainstreaming policies will primarily address gender gaps in EE in Africa (Torres-Carrion et al., 2018). In the discussion section, we illustrated how we outlined representative research questions presented in the reviews highlighted by the studies analysed. V. LIMITATIONS While this study focused on published articles in relation to gender and gender mainstreamingin EE in Africa, it is possible that the authors may have omitted important studies that may have included gender mainstreaming within social contexts of EEareas of research. Furthermore, it is documented that by selecting publications from 2015 to 2020, the principal investigator may have omitted earlier studies that have discuss gender and gender mainstreaming in EE in Africa. However, the focus of this work was to explore the state-of-the-art of these types of studies on gender mainstreamingin EE. Finally, we want to acknowledge that some of the studies reviewed used more than one rational theoretical framework. However, within our inclusion criteria, we focused on studies that used at least one logical context and did not analyse the impact of those that may have used a combination of these outlines. Exploring Gender and Inequality in Engineering Education in Africa Gender is an all-encompassing idea which not only entails what men and women do in society, but to embrace cultural ideas and interpretations about their "masculinity" and "femininity" with structural inequalities that emanate from these differences.Thus, gender equality demands intervention that closesexisting gender gaps in EE, as differences in meaning to "gender equality" entails different intervention frameworks and actions in engineering institutions (Egne, 2014;David, 2017). Gender equality can be viewed as equal gratification of rights and access to opportunities plus outcomes, including resources by men and women. This means that irrespective of gender, individuals are free to develop their personal abilities and make choices without set limitations such as stereotyping, placing rigid gender roles and prejudices. Thus, the core interpretation of gender equality does not mean that men and female folks are equal in all ramifications but are different with biological and physical characteristics in terms of rights, responsibilities and opportunities accessible to them both ( This policy documents lay emphasis on importance of education in promoting gender equality: "states parties shall take all appropriate measures to eliminate discrimination against women in order to ensure to them equal rights with men in the field of education." To CEDAW, gender is practiced collectively with other social constructed identities such as social class, ethnicity, race, sexuality and disability. As a result of this, a gender-sensitive education needs to be based on an intersectional approach, involving the combination of diverse types of inequality producing specific forms of discrimination(UN, 2020). However, sexual and intellectual tensionhas dominated and complicated interactions between female and male counterpartsin EE ( Over recent decades, other countries, such as Norway, France, Portugal, and Bulgaria, have witnessed a sharp rise in women's existence in engineering fields. Although, gender discrimination was not completely eradicated in EE, but the experiences of women in non-American engineering studies have followed a particular unusual history (Bix, 2014). Examining if Gender Mainstreaming has been Enacted in Engineering Education in Africa Mainstreaming a gender perspective is the process of assessing the consequences for women and men of any planned action, including legislation, policies or programmes, in all areas and at all levels (Nyarko and Eshun, 2013). Also, it is an approach for making women's as well as men's concerns and experiences an integral dimension of the design, implementation, monitoring and evaluation of policies and programmes in all political, economic and societal spheres so that women and men can benefit equally and inequality is not maintained (Tsvere, 2012). The dynamic goal of mainstreaming a gender perspective is to achieve gender equality in all spheres of life. Presently, women are dramatically under-represented in science-and technology-based innovation activities, as a result of actions from barriers andbiases that were unfair to women, preventing them from full exploitation of their potentials in all ramifications (Varma, 2018). In sub-Saharan Africa (SSA), EE has an important role in addressing gender gap and since many SSA countries have implemented gender equality policies within in the university systems in the 1990s. The SSA universities in Africa has recorded some remarkable progress in female enrolment in STI and STEM programmes, however, stereotypes continue to be evident within subject areas(AAS, 2018; Alblooshi and May, 2018). Studies conducted in some SSA countries revealed that, women represent around 50% in humanities and arts and up to 95% in nursing and social welfare, while physics, mathematics, and engineering programmes have proportions as low as below 10%. Thus, the way forward to overcome these challenges is to remove negative stereotypes and promoting positive role models for women, by introducing gender perspective in school curriculum that will address gender gap(UNESCO, 2017). Conversely, there is a decline in the proportion of women taking engineering as a career path, as genderinequalities continue to persist in EE and in engineering workforce (Strachan et al., 2018). This limits the different standpoints that confirm robust engineering research output, as women"s contributions and career progression are relegated. For instance, gender dimension in the university curriculum content and research practices has failed to consider the genetic differences in research assumptions that has led to inaccurate results, involving serious implications on drug effectiveness and medical protocols that were not tailored to women"s needs (Frosina and Mwaura, 2016). Therefore, a gender lens is crucial for distinguishing women"s input and needs for development and transformation in all spheres of life. Despite several efforts made to address gender inequalities in EE, gender gaps still exist at in EE academia and industries (Aiston and Jung, 2015). These gaps are relatively and partly connected with the consequences of social and cultural factors promoting patriarchal society. Moreover, women have a role to play as they are essential catalysts for change, in order to act as role modeland empower other women in the same profession. Women engineers need to be more visible in research and in the media, in order to challenge stereotypes in engineering profession. Several national or regional initiatives on gender mainstreaming have started to support gender diversity and inclusion in EE (Nyarko and Eshun, 2013;Wang and Degol, 2015). But The question in mind still remains whether gender mainstreaming policies and programmes as well as its practices have been structured to counterbalance stereotypes attitudes and biases found within society and in EE? In most countries especially in Africa, and in the Fourth Industrial Revolution (4IR) era, life sciences are feminized while engineering and experimental sciences are mostly male-dominated fields(Fredua-Kwarteng and Effah, 2017; UNESC0, 2018; 2018a). Thus, perpendicular discrimination started emerging in engineering profession as the degree of feminisation continues to prevail in EE. Therefore, the feminization within EE in most countries is a well-known phenomenon, but less attention has been paid to the continued imbalance in men"s favour in education management and industrial positions (Strachan et al., 2018). This imbalance has a context: women often make up the majority of civil servants, but they tend to be concentrated in positions with less authority. In OECD countries, women represent 57% of the government workforce, equating to 65% of secretarial positions, 35% of middle managers and 27% of top managers in 2010 (OECD, 2012). Thus, the attention of women in education leadership positions have been called to provide role models that can help encourage female student retention, especially important in countries with low educational attainment for girls. This is key in achieving SDGs, as education and training help in developing professional capacities in EE that will stimulate better development outcomes. However, the enactment of gender mainstreaminghas been implemented in developed and developing countries, such as African region. Hitherto, many African countries are yet to domesticate gender mainstreaming enactment in their local state and communities(WEF, 2017). This has further propelled the increase in gender disparities in Africa and in EE respectively. EE programmes should have a critical review that will design their programmes to accommodate gender perspective as this will help to curb gender gap in the profession. Also, to guarantee a successful enactment of gender mainstreaming in EE, engineering educators are obliged to adopt gender perspective in their teaching and learning through staff development training in collaboration with gender advisory board (Wang and Degol, 2015). Such training will be structured explicitly by gender entities and view points that will address gender gaps in day to day activities with engineering students. In addition, teaching with a gender perspective in EE will help in detecting potential gender imbalance in learning environment with assessment of the diversity of engineering students. Also, there are various ways in communicating to engineering students with memorandums that will reinforce inequality between women and men, generating a "concealed curriculum" in EE(Fredua-Kwarteng and Effah, 2017). Here,the "concealed curriculum" reflects gender gaps that persist in EE, in which female academics are not promoted in their contribution to course programmes and research publications through cited studies. But most scientific expert is represented as inherently male, as gender stereotyping are strengthen in EE (Jones et al., 2013). Therefore, closing gender gaps on these grounds is not only significant in terms of granting equal opportunities to all, but a way to pool talented women that will efficiently contribute their quota to EE positively. VI. THEORETICAL FRAMEWORK This paper was guided by Liberal Feminism, acquired from gender inequality theory, which waspropounded by early liberal feminists such as Mary Wollstonecraft, Judith Sargent Murray, and Frances Wright (Marilley, 1996). The theory argued that women may claim equality with men on the basis of an essential human capacity for reasoned moral agency, that gender inequality is as a result of a sexist patterning of labour division through repatterning of key institutions  law, work, family, education, and media (Bem, 1993;Lorber, 2000;Pateman, 1999;Rhode, 1997;Schaeffer, 2001). This theory can be applied to gender and gender mainstreaming in EE in Africa, as it focuses on women"s ability to maintain their equality through their actions and choices.In Africa, women are accounted for minority in EE, in spite of global commitments to gender justice. United Nations (2020)has advocated gender equality as an essential part of national strategies for poverty reduction, as countries with high prevalence of female enrolment in education, are much likely to have higher level of economic output. The question of women"s representation in higher education in Africa has received lot of attention, and has called for several debate in the context of broader gender equality and equity discourse in development. James Aggrey (1875-1927) stated a slogan as: "if you educate a man, you educate an individual, but if you educate a woman you educate a family, indeed a nation". This slogan suggests that the education of women is significant to the development of Africa (Varma, 2018). Thus, the first part in the liberal feminist argument is the claim for gender equality and it rest on beliefs that (1) all human beings have certain essential featurescapacities for reason, moral agency, and self-actualization; (2) the exercise of these capacities can be secured through legal recognition of universal rights; (3) the inequalities between men and women assigned by sex are social constructions having no basis in "nature"; and (4) social change for equality can be produced by an organized appeal to a reasonable public and the use of the state (Pateman, 1999;Schaeffer, 2001). African women are still under-represented in EE across the continent as enrollment statistics in engineering programme is still low (Varma, 2018). Under-representation of women in EE cannot be ascribed exclusively to lack of interest, ability, or intellectual capacity, but traditional representation of engineering as a male domain discipline, and cultural practices that highlight education of men over women, as well an unsupportive STEM-related teaching environment in secondary school has contributed to the paucity of African women opting for engineering in African universities. Thus, it is clearly an issue of social injustice, which involves an inequitable distribution of engineering educational opportunities to women also(AAS, 2018; Alblooshi and May, 2018).Thus, Liberal feminists argue that society holds the false belief that women are, by nature, less intellectually and physically capable than men; thus it tends to discriminate against women in the academy, in the forum, and in the marketplace. Liberal feminists believe that "female subordination is rooted in a set of customary and legal constraints that blocks women"s entrance to and success in the so-called public world"; and women are made to strive for sexual equality via political and legal reform. Conversely, analysis of gender equality is hampered by many factors (Fajardo and Erasmus, 2017;Odaga, 2020). Factors such as income and social class are frequently interwoven with gender, for the more educated the girl parents are, the less gender stereotypes that are applied to their daughters. The systematic segregation of women in the world"s workforce and their restricted access to basic amenities is imposing a tremendous cost on productivity, sustainable development and economic progress worldwide(WEF, 2009). Gender disparities remain high in a variety of sectors, even in advanced nations as this diminishes an economy"s capacity to grow and raise living standards.Programs and policies to reduce gender gap have the potential to significantly improve economic growth and standards of living for women and girls in Africa, with impact on poverty reduction, health and wellbeing.Girls and women are yet to experience full and equal opportunities for education and employment around the world (Strachan et al., 2018). Even though there has been progress towards gender equality in developing nations at the primary level, the secondary and tertiary educational levels remain problematic. VII. DISCUSSION Globally women are often under-represented in the STEMrelated fields such as the medical field and engineering and the underrepresentation gets even worse with African women compared to the rest of the world. This has created gender gaps in engineering field as gender gaps are detrimental as they are associated with the loss of potential talent (Nyarko and Eshun, 2013;UNESCO, 2013;2018a;Varma, 2018). A lot of international organizations within and out of Africa have made a lot of efforts in working towards closing the gaps and stereotyping of women. Thus, stereotyping threat can contribute to underrepresentation of women in engineering, as engineering is perceived as traditionally male-dominated field, which made women to be less assertive about their skills, even when performing equally with their male counterparts (Jones et al., 2013;UNESCO, 2016). A lot of young girls from high school do not express the same level of interest in engineering as boys, possible due in part to gender stereotypes. Significantly, indication of the persisting occurrence of inherent bias against female engineers were due to the acceptance that men are mathematically excellent and better suited to engineering profession (AAS, 2018). Simply, women who persist were able to overcome these problems, which enable them to find fulfilling and rewarding experiences in engineering profession. Due to this gender bias, women"s choice in entering an engineering field was highly correlated to the background and exposure they had with mathematics and other science courses during high school. Most women who choose to study engineering have significant experience with regarding themselves better at these types of courses and as a result, they think they are capable of studying in a male-dominated field (David, 2017;UNESCO, 2018;Elu, 2018). Therefore, women"s self-efficacy is a contributor to gender stereotype that plays a major role in the underrepresentation of women in engineering profession. Women"s ability to think critically that they can be successful and perform accomplishments is associated to the choices they have when choosing a college career; and women who show high self-efficacy personalities are more disposed to choose to study in the engineering field. Selfefficacy is linked to gender roles since men often present higher self-efficacy than women, which can also be a cause to why when choosing a major course, most women opt to not choose the engineering major (UNESCO, 2017). Women are under-represented in engineering education programmes as in the workforce, as admission and graduation rates of women in post-secondary engineering programmes are very important bases of how many women go on to become engineers. Since undergraduate degrees are acknowledged as the latest point of typical entry into scientific fields, the under-representation of women in undergraduate programs contributes directly to underrepresentation in scientific fields. Besides, in the USA, women who had degrees in science, technology, and engineering fields are less likely than their male counterparts to have jobs in these fields (Bix, 2014;UNESCO, 2013;2017;). This degree disparity varies across engineering disciplines. Women tend to be more interested in the engineering disciplines that have societal and humane developments, such as agricultural and environmental engineering. They are therefore wellrepresented in environmental and biomedical engineering degree programmes, receiving 40-50% of awarded degrees in the USA (2014-2015), and women are far less likely to receive degrees in fields like mechanical, electrical, and computer engineering (Bix, 2014). Despite the fact that fewer women enrol in engineering programmes, the representation of women in STEM-based careers can potentially increase when college and university administrators work on implementing mentoring programmes and work-life policies for women. Studies shows that these rates have a hard time increasing since women are judged as less competent than men to perform supposedly male jobs(Morley, 2010; Egne, 2014; USAID, 2015; UN, 2020). Another possible reason for lower female participation in engineering fields is the occurrence of values associated with male gender role in workplace culture. For instance, women in engineering have found it difficult to re-enter workforce after a period of absence and men are less likely to take time off to raise a family; this inexplicably affects women. Issues related to class collaborations, teacher-student crescendos or the language used in the learning environment also have a role to play in gender gap. Gender blindness can obscure differences during class interactions between engineering educators and students. Notably, gender perspective allows paying attention to significant differences between academic results of male and female engineering students caused by teaching and assessment methods, which often go unnoticed in engineering academia (Strachan et al., 2018). Gender perspective supports consideration of professional thinking and organisational philosophy of engineering disciplines itself, as there are gender differences in access to courses and curriculum revision, in which such part is played by social roles and stereotypes in EE in Africa (Jones et al., 2013;UN, 2020). It is necessary to comprehend how these mechanisms determine the choice of studies and profession, so that better actions can be taken with a better view in eliminating gender gaps. Gender-blind projects and programmes do not always take account of diverse roles and different needs attributed to male and female gender. The society often make a major mistake in maintaining the status quo that will aid in transforming the unequal structure of gender relations and learning institutions are affected by gender blindness(WEF, 2009; OECD 2012; UNESCO, 2018). Regarding the content of engineering programmes and curriculum, gender blindness has many consequences in EE as over-generalisation of phenomena studied on the basis of men experiences had made the situation of women invisible in EE. A false representation of such attitudes towards female folks has made women"s needs seen as deviating from the ideal male model, making a blurring differences between men and women as if they were homogeneous groups. Several authors have cited and documented explanations of these differences based on gender stereotypes of the female gender (Varma, 2018). VIII. CONTRIBUTION OF NEW KNOWLEDGE For EE to improve, integrating internal and external drivers of gender perspective through rebuilding gender-based EE curriculum programmes is advocated for. This paper has shown inferences drawn from systematic review of sampled articles and recommendations were incorporated into the paper. The contribution of this paper to new knowledge is in general form. Firstly, it present concrete instances of gender and inequalities in EE as engineering gender gap initiatives can be sustained in domesticating engineering reforms and enactment which can be applied to EE perspectives in Africa. Secondly, the paper shows that sustaining development of gender mainstreaming enactment in EE are dependent onengineering academics and professionals as well as other relevant stakeholder, by collaborating to put more efforts in addressing gender gaps in Africa. This will aid in increasing number of enrolment of female into engineering institutions and can be achieved via working groups and advisory committees that will develop or strengthen policies and programmes to support multiple roles of women in society, including in the field of science and engineering. Open discussions will assist to set concrete goals, targets and benchmarks, as appropriate, while supporting a merit-based approach, to achieve equal participation of women and men in decision-making at all levels, especially in engineering institutions. In addition, redesigning of gender-based engineering curriculum will foster an open idea and vision in teaching and learning that is aligned with gender and gender mainstreaming curriculum revision.Thus, creating awareness of women"s needs in engineering is key and should be encouraged by showcasing women"s contributions in the media to sponsor engineering gender-based programmes that will expose and address gender inequalities.Though, it is tough but it can be achievable in engineering education in 21st century Africa. Hence, reflections from engineering academics on values and support of university interdisciplinary collaboration engineering research-based gender projects is imperative in curbing gender gaps in EE in Africa. IX. IMPLICATION OF THE STUDY This paper presented systematic review of relevant articles to the study objectives, as cited instances and different approaches in the articles collected were reflected in the discussion. It identifies the significance and importance of gender mainstreaming in EE and how gender perspective can be reconstructed in EE curriculum programmes to address gender gaps. This becomes an important public issue in EE and provide discussions on how gender inequalities in EE can be eliminated in Africa(Oanda and Akudolu, 2010; Fajardo and Erasmus, 2017; Odaga, 2020), hence the need to engage in this research review. By focusing the study around two objectives, that reveals the strengths and benefits of the reforms of engineering curriculum and staff development will bring to students the knowledge and awareness on gender sensitive concerns that will be continually commended. Additionally, the need to sign up to the 2030 SDGs on gender will create a more inclusive, just and equitable world in the sense of sustainable development that will ensure male and female, can lead empowered and dignified lives. This is an inclusive way and gender equitable education of good quality in achieving the SDG on gender targets (UNESCO, 2018; 2018a; UN, 2020). Also, women should be allowed to have equal access to quality education and economic resources as well as equal opportunities with their male counterparts at all levels. This will bring about a significant increase in investing to close gender gap and strengthen support for institutions in relation to gender equality and empowerment of women in EE. The benefits of this approach, linking theory with practice, is that it can help educators and students to linkgender issues by inspiring and motivating students to engage in full gender-based programme participation in classroom activities. Thus, this will promote sensitization of gender disparities in EE and will give them better opportunities to have an open discussion to provide solutions on how to curb gender gaps in EE. Bearing in mind that integrating gender perspective in engineering education, especially ones initiated by individual engineering educators or departments, will be made to harmonize with the SDG approaches for the sustenance of gender mainstreaming in EE in Africa. X. CONCLUSION AND RECOMMENDATIONS This paper attempts to show that in Africa, gender mainstream is very important in improving the underrepresentation of women in engineering fields and at different career stages. Besides, women need to acquire and develop new set of skills and abilities to gain access to quality education.
An Anatomic Study of the Relationship Between the Iliocapsularis Muscle and Iliofemoral Ligament in Total Hip Arthroplasty Background The preservation of soft tissues is an important factor for preventing dislocation after total hip arthroplasty. Anatomical studies have revealed that the inferior iliofemoral ligament (ILFL) contributes significantly to the native stability of the hip. This study aimed to investigate the anatomical structures of the iliocapsularis muscle (ICM) and ILFL from a surgical perspective. Methods In total, we assessed 50 hip specimens from 25 embalmed cadavers. The size and location of ICM and ILFL (at the upper, middle, and lower parts of the femoral head) were assessed in a neutral position. The ratio of ICM and ILFL widths to the femoral head was evaluated. Results The mean ICM and ILFL widths were 7.5 and 14.6, 12.0 and 14.2, and 12.8 and 15.2 mm at the upper, middle, and lower levels, respectively. The mean ICM thicknesses were 1.3, 9.0, and 9.1 mm at the upper, middle, and lower levels, respectively. The distributions of ICM and ILFL from the lateral edge of the femoral head were 21.1%–37.4% and 4.9%–36.5%, 9.5%–35.6% and 7.9%–38.7%, and 11.0%–38.7% and 9.5%–42.4% at the upper, middle, and lower levels, respectively. Conclusion ICM and ILFL were located at the anterolateral side of the femoral head, and the medial edge of the ILFL corresponded to that of the ICM. ICM can serve as a landmark for preserving ILFL in total hip arthroplasty using the anterior approach. Introduction Dislocation is a common complication of total hip arthroplasty (THA), and recurrent dislocation can be an indication for revision THA . Although dislocation can occur due to several reasons, soft-tissue tension is a critical factor . The use of the direct anterior approach (DAA) and the anterolateral approach in hip surgery has been increasing owing to the less invasive nature of surgical approaches . When applying the anterior/anterolateral approach, whether exposure of the proximal femur is essential for the insertion of implant components and is ideal for the release of soft tissues to achieve good exposure and joint stability remains controversial . Recent anatomical studies have reported that the capsular ligament plays a repressive role in external and internal rotation and traction force of the hip joint. The capsular ligament comprises three primary fibrous ligaments: iliofemoral, ischiofemoral, and pubofemoral. The iliofemoral ligament consists of the superior and inferior branches, which are inserted together into the anterior inferior iliac spine (AIIS) of the pelvis, each extending out to attach along the intertrochanteric line of the femur. The inferior iliofemoral ligament (ILFL) is strained during hip extension and external rotation . While the clinical importance of capsular ligament preservation for hip stability after THA is controversial , some studies claimed that the role of the ILFL was to facilitate stability after THA and prevent excessive leg lengthening . Although it is considered important, its actual anatomical structure is challenging to identify because of limitations in surgical view . The iliocapsularis muscle (ICM) is a small muscle that originates from AIIS and is located distal to the lesser trochanter overlying the No author associated with this paper has disclosed any potential or pertinent conflicts which may be perceived to have impending conflict with this work. For full disclosure statements refer to https://doi.org/10.1016/j.artd.2021.09.010. ILFL . Recent studies have revealed its role in achieving stability and anatomical structure . However, the clinical role of the ICM is only a speculation based on the results of anatomical and radiographic studies, and limited data are available on the anatomical structure of this muscle from a surgical viewpoint. This study aimed to investigate the anatomical structures of the ICM and ILFL using the anterior/anterolateral approach. In addition, whether the ICM can serve as a landmark for preserving the ILFL in THA using the anterior approach was assessed. Material and methods This study was performed in accordance with the principles of the Declaration of Helsinki, and it was approved by the institutional review board of our university. Written informed consent was obtained from all patients before death. At the clinical anatomy laboratory of our institution, 50 hip specimens from 25 embalmed cadavers (10 men and 15 women) were used in this study. Eight hips were excluded because of bilateral contracture and a previous surgery. In total, 40 hips were paired; the remaining 2 hips were unpaired because the contralateral hips were excluded because of a previous surgery. The mean age of the specimens at death was 84.0 (range: 58e99) years (Table 1). There was no case of osteoarthritis or previous trauma upon visual inspection. All cadavers were dissected while in the supine position, with each lower leg in the neutral position. The skin and subcutaneous tissue were removed from the top of the iliac crest to the middle thigh (Fig. 1a). The sartorius muscle and tensor fasciae latae were peeled and removed from the anterior superior iliac spine (Figs. 1b and 2a). The rectus femoris and iliopsoas muscles were identified proximally and transected distally (Fig. 1c). After arthrotomy around the hip, ICM was found attached to AIIS and was overlying the anterior hip capsule (Figs. 1d and 2b). Subsequently, ILFL was detected anteromedial to the femoral head ( Fig. 1e, f, 2c, and d). After assessing ICM and ILFL, each hip was dislocated, and the capsule was resected to obtain a whole view of the femoral head. A reference line parallel to the femur was drawn at the lateral edge of the femoral head. The distance from the line to the lateral and medial edges of ICM and ILFL were measured. The width and thickness of ICM and the width of ILFL were assessed while in the neutral position. Hence, the ICM and ILFL widths corresponded to the distance between the lateral and medial edges. These measurements were performed at the upper, middle, and lower parts of the femoral head using a digital caliper with the hip at 0 flexion, 0 abduction, and neutral rotation (Fig. 2d). The femoral head diameter was measured after hip dislocation. Each measurement was conducted by two board-certified hip surgeons, and the average was obtained for analysis. The intraclass correlation coefficient (ICC) was used to evaluate the interobserver reliability of each measurement. The distance from the reference line and the ICM and ILFL widths were compared using the t-test. The ratio of the ICM and ILFL widths to the femoral head diameter in each hip was calculated to minimize the impact of femoral head size and identify the distribution. The Pearson correlation coefficient was assessed to determine the association between the width and thickness of the ICM at the middle level and the femoral length and femoral head diameter. An ICC of 1 indicated perfect reliability; >0.80, very good reliability; >0.60, good reliability; and >0.40, moderate reliability . A two-sided P value of <0.05 was considered significant, and all results were calculated using the SAS for Windows (version 9.4; SAS Institute). Results The thigh length and femoral head diameter are shown in Table 1. The distance from the reference line, ICM and ILFL widths, and ICM thickness values are shown in Table 2. ICM and ILFL significantly differed in terms of the distance from the reference line to the lateral edge and width at the upper level (P < .001, respectively) and at the middle and lower levels (P ¼ .003 and P ¼ .002, respectively). The ratio of the ICM and ILFL widths to the femoral head diameter are shown in Figure 3. The distributions of the ICM and ILFL were 21.1%e37.4% and 4.9%e36.5%, 9.5%e35.6% and 7.9%e38.7%, and 11.0%e38.7% and 9.5%e42.4% at the upper, middle, and lower levels, respectively. The ICM thickness was positively correlated with the femoral length (r ¼ 0.52, P < .001). Moreover, there was a positive association between the ICM width and the femoral head diameter (r ¼ 0.53, P < .001). The ICCs were 0.83, 0.84, and 0.94 at the upper, middle, and lower levels, respectively, for the distance from the reference line and 0.89 for the ICM thickness. Discussion This study investigated the anatomical structures and sizes of ICM and ILFL. The anatomical measurements were reliable, and the medial edge of ILFL corresponded to that of ICM. ILFL and overlying ICM covered the lateral side of the anterior aspect of the femoral head. A correlation was found between ICM thickness with the femoral length and ICM width with the femoral head diameter. Dislocation is a common complication of THA that occurs in approximately 0.3%e10% of all primary cases, leading to revision surgery . Soft-tissue tension is a factor contributing to dislocation . The preservation and repair of the capsular ligament can prevent dislocation in surgery using the conventional posterolateral approach . Recently, DAA and the anterolateral approach have both gained more attention because they are associated with faster functional recovery and lower dislocation risk . Although the ability of anterior capsule preservation to prevent dislocation remains controversial , one report demonstrated lower leg length discrepancy after preservation and repair of the capsule using the DAA . Moreover, a previous study proposed the use of the stepwise approach for soft-tissue releases . The capsular ligament can help achieve hip joint stability during dynamic and static motions . Biomechanical cadaveric studies of the capsular ligament have shown that ILFL can facilitate primary restraint while in extension and external rotation . Although existing evidence on the preservation of the ILFL to prevent dislocation after THA is controversial , given that these positions are essential for dislocation and exposure of the femur during hip surgery using the DAA and anterolateral approach, the resection of entire ILFL can increase the risk of dislocation after THA. In addition, a radiographic study has reported that ICM is an important anterior stabilizer of the femoral head . An anatomical study has reported that the iliofemoral ligament is composed of superior and inferior branches, which is inserted together into the inferior edge of AIIS via the fibrocartilage, thereby forming the inverted Y-shaped ligament . ILFL ran across the anterior surface of the femoral head, and it was attached into the inferior portion of the intertrochanteric line . ICM is the deepest portion of the iliopsoas and is attached to ILFL via the deep aponeurosis . The origin of ICM corresponded with the inferior edge of AIIS , which is consistent with the measurements of the present study. The morphological measurements around the hip have been conventionally described using the clock-face method . It might be reasonable to describe the anatomical structure referring to the acetabular rim. However, the clock-face reference point cannot be obtained using the anterior approach before osteotomy and dislocation of the femur. Thus, we applied a reference line and coordinate system, which could be identified from the surgical view. We believe that this coordinate system and measurement are reliable based on the interobserver ICC and are clinically applicable for hip exposure during surgery. Our study showed that the overwrapping structure of ILFL and ICM corresponded to the medial edge. In addition, there was a correlation between the ICM width and femoral head diameter. Based on the lateral distribution of ILFL and ICM to the femoral head, it might be difficult to preserve the whole ligament for hip exposure, particularly in cases of contracture. However, based on our findings, surgeons can identify the extent of excision and release the lateral ILFL in the surgical view with consideration of the ICM and femoral head. This study had several limitations. First, the average age of the cadavers was 84.0 years. Age-dependent factors including muscle atrophy could influence the ICM thickness and width. Second, all specimens were collected from cadavers with normal hips (without osteophytes and contracture). Therefore, our measurements may not be applicable to patients with osteoarthritis of the hip. Third, the dynamic change in hip motion after THA was not examined because our specimens were obtained from formalin-embalmed cadavers and did not contain any implant components. Thus, the clinical importance of ILFL preservation remains unclear. This study evaluated the anatomical structure of ICM and ILFL. They were found at the anterolateral side of the femoral head, and the medial edge of ILFL corresponded to that of ICM. Thus, ICM could be a landmark for preserving ILFL in THA using the anterior approach. Nevertheless, further clinical studies must be performed to assess the utility of ICM as a landmark in THA for osteoarthritis.
def __search_channel(self, id: int) -> Set[int]: query = self.sql_fetcher["search_channel.sql"] with self.conn as conn: with conn.cursor() as cursor: cursor.execute(query, {"channel_id": id}) ids = {row[0] for row in cursor.fetchall()} return ids or {}
<filename>basic-application/basic-application-webapp/src/main/java/org/iglooproject/basicapp/web/application/security/login/component/SignInFooterPanel.java package org.iglooproject.basicapp.web.application.security.login.component; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.spring.injection.annot.SpringBean; import org.iglooproject.basicapp.core.business.user.model.User; import org.iglooproject.basicapp.core.security.service.ISecurityManagementService; import org.iglooproject.basicapp.web.application.security.password.page.SecurityPasswordRecoveryRequestCreationPage; import org.iglooproject.basicapp.web.application.security.password.page.SecurityPasswordRecoveryRequestResetPage; import org.iglooproject.wicket.more.condition.Condition; public class SignInFooterPanel extends Panel { private static final long serialVersionUID = -7042210777928535702L; @SpringBean private ISecurityManagementService securityManagementService; public SignInFooterPanel(String wicketId) { super(wicketId); add( Condition.anyChildVisible(this) .thenShow() ); add( SecurityPasswordRecoveryRequestCreationPage.linkDescriptor() .link("passwordRecoveryRequestCreation") .add( Condition.isTrue(() -> securityManagementService.getSecurityOptions(User.class).isPasswordUserRecoveryEnabled()) .thenShow() ), SecurityPasswordRecoveryRequestResetPage.linkDescriptor() .link("passwordRecoveryRequestReset") .add( Condition.isTrue(() -> securityManagementService.getSecurityOptions(User.class).isPasswordUserRecoveryEnabled()) .thenShow() ) ); } }
Wondering if your spouse is cheating on you? Check to see how comfortable they are with sex, or how happy they are in the relationship. A new study performed by researchers at the University of Guelph in Ontario, Canada, and at Indiana University is the first to consider not only demographic information when it comes to determining infidelity, but interpersonal factors and sexual personality as well. When it comes to a cheating spouse, several issues can come into play. “This research shows that demographic variables may not influence decision-making as much as previously thought — that personality matters more, especially for men,” said Robin Milhausen, a professor and sexuality researcher in Guelph’s Department of Family Relations and Applied Nutrition who conducted the study with Kristen Mark and Erick Janssen of Indiana University. The study involved 506 men and 412 women. All participants in the study reported being in monogamous relationships lasting from three months to 43 years. They also provided basic demographic information, including income, religion and education. The participants also completed scales that measured their sexual personality variables as well as answered questions about their relationships. While there was little difference between the sexes in the rates of infidelity – 23 percent for men, 19 percent for women – the reasons for why they cheated varied. For men, the driving causes behind infidelity included ease of sexual excitement and concern about sexual performance failure. The latter might seem like an odd reason to cheat on a partner, but as Milhausen explains, “People might seek out high-risk situations to help them become aroused, or they might choose to have sex with a partner outside of their regular relationship because they feel they have an ‘out’ if the encounter doesn’t go well – they don’t have to see them again.” For women, the primary reason for committing infidelity was unhappiness in the relationship. Such women were twice as likely to cheat, and women who felt sexually incompatible with their partners were three times as likely. “For women, in the face of all other variables, it’s still the relationship that is the most important predictor,” said Milhausen. While Milhausen warns that the study should not be used to support sexual stereotypes, she does say that these personality traits are useful for determining potential actions and will allow couples to seek therapeutic solutions. This study appears in the journal Archives of Sexual Behaviour. Cheating spouse? New study gives clues Wondering if your spouse is cheating on you? Check to see how comfortable they are with sex, or how happy they are in the relationship.
#include <bits/stdc++.h> using namespace std; const int N = 1e5; int n, b; int srodek; long long d; int lewo[N + 7]; int prawo[N + 7]; int ostl[N + 7]; int ilel[N + 7]; int ostr[N + 7]; int iler[N + 7]; int ta[N + 7]; int na[N + 7]; void licz_zasiegi() { int j = 1; for (int i = 1; i <= n; ++i) { while (j <= n) { long long poz = j; poz -= d * i; if (poz > i) break; lewo[j] = i; j++; } } int czas = 1; j = n; for (int i = n; i > 0; --i, ++czas) { while (j > 0) { long long poz = j; poz += d * czas; if (poz < i) break; prawo[j] = i; j--; } } j = 1; int sum = ta[j]; for (int i = 1; i <= srodek; ++i) { while (j + 1 <= n && lewo[j + 1] <= i) { j++; sum += ta[j]; } ostl[i] = j; ilel[i] = sum; } j = n; sum = ta[n]; for (int i = n; i > srodek; --i) { while (j > 1 && prawo[j - 1] >= i) { j--; sum += ta[j]; } ostr[i] = j; iler[i] = sum; } } int fLewa(int x) { int res = 0; for (int i = srodek; i > 0; --i) { x = min(x, ilel[i]); if (x >= b) { res++; x -= b; } } return srodek - res; } int fPrawa(int x) { int res = 0; for (int i = srodek + 1; i <= n; ++i) { x = min(x, iler[i]); if (x >= b) { res++; x -= b; } } return n - srodek - res; } int main() { ios_base::sync_with_stdio(0); cin >> n >> d >> b; for (int i = 1; i <= n; ++i) cin >> ta[i]; int sa = 0; for (int i = 1; i <= n; ++i) sa += ta[i]; srodek = (n + 1) / 2; licz_zasiegi(); int pp = 0; int kk = sa; int wynik = 1e9; while (pp <= kk) { int s = (pp + kk) / 2; int rl = fLewa(s); int rp = fPrawa(sa - s); wynik = min(wynik, max(rl, rp)); if (rl > rp) pp = s + 1; else kk = s - 1; } cout << wynik << endl; return 0; }
package gr.di.hatespeech.readers; import java.util.ArrayList; import java.util.List; import org.apache.commons.csv.CSVRecord; import gr.di.hatespeech.entities.Text; import gr.di.hatespeech.utils.Utils; /** * Implementation of CsvReader for extracting texts from a file containing * Tweets * @author sissy */ public class TweetCsvReader extends CsvReader<List<Text>> { private static String startingMessageLog = "[" + TweetCsvReader.class.getSimpleName() + "] "; protected List<Text> texts = new ArrayList<>(); /** * Reads data from a file and creates a list of Text objects * @param fileName, the name of the file to read * @return Text */ @Override public List<Text> readData(String fileName) { String[] headers = { Utils.ID, Utils.BODY, Utils.LABEL, Utils.OLD_LABEL, Utils.DATASET, Utils.TWEET_ID, Utils.PROCESSED_BODY}; Iterable<CSVRecord> csvRecords = getCsvRecords(headers, fileName); if (csvRecords != null) { texts = new ArrayList<>(); for (CSVRecord record : csvRecords) { Text text = createTextFromLine(record); if (text != null) { texts.add(text); } } } return texts; } public Text createTextFromLine(CSVRecord record) { try { Text text = new Text(); text.setId(Long.parseLong(record.get("id"))); text.setBody(record.get("body")); text.setLabel(record.get("label")); text.setOldLabel(record.get("old_label")); text.setDataset(Integer.parseInt(record.get("dataset"))); text.setPrepMessage(record.get("processed_body")); if (text.getDataset().equals(0)) { text.setTweetId(record.get("tweet_id")); } Utils.FILE_LOGGER.info(startingMessageLog + text.getId() + " " + text.getBody() + " " + text.getLabel()); return text; } catch (NumberFormatException e) { Utils.FILE_LOGGER.error(e.getMessage(), e); Utils.FILE_LOGGER.error(startingMessageLog + "#########Text that could not be parsed=> id = " + record.get("id") + ", body = " + record.get("body") + ", label = " + record.get("label")); return null; } } public List<Text> getTexts() { return texts; } public void setTexts(List<Text> texts) { this.texts = texts; } }
/** * @file manual_control.cpp * @brief Example that demonstrates how to use manual control to fly a drone * using a joystick or gamepad accessed using sdl2. * * Requires libsdl2 to be installed * (for Ubuntu: sudo apt install libsdl2-dev). * * @authors Author: <NAME> <<EMAIL>>, */ #include <chrono> #include <future> #include <memory> #include <iostream> #include <mavsdk/mavsdk.h> #include <mavsdk/plugins/action/action.h> #include <mavsdk/plugins/telemetry/telemetry.h> #include <mavsdk/plugins/manual_control/manual_control.h> #include "joystick.h" using namespace mavsdk; // This config works for Logitech Extreme 3D Pro struct JoystickMapping { int roll_axis = 0; int pitch_axis = 1; int yaw_axis = 2; int throttle_axis = 3; bool roll_inverted = false; bool pitch_inverted = true; bool yaw_inverted = false; bool throttle_inverted = true; } joystick_mapping{}; void wait_until_discover(Mavsdk& mavsdk) { std::cout << "Waiting to discover system..." << std::endl; std::promise<void> discover_promise; auto discover_future = discover_promise.get_future(); mavsdk.subscribe_on_new_system([&mavsdk, &discover_promise]() { const auto system = mavsdk.systems().at(0); if (system->is_connected()) { std::cout << "Discovered system" << std::endl; discover_promise.set_value(); } }); discover_future.wait(); } void usage(std::string bin_name) { std::cout << "Usage : " << bin_name << " <connection_url>" << std::endl << "Connection URL format should be :" << std::endl << " For TCP : tcp://[server_host][:server_port]" << std::endl << " For UDP : udp://[bind_host][:bind_port]" << std::endl << " For Serial : serial:///path/to/serial/dev[:baudrate]" << std::endl << "For example, to connect to the simulator use URL: udp://:14540" << std::endl; } int main(int argc, char** argv) { Mavsdk mavsdk; std::string connection_url; ConnectionResult connection_result; if (argc == 2) { connection_url = argv[1]; connection_result = mavsdk.add_any_connection(connection_url); } else { usage(argv[0]); return 1; } if (connection_result != ConnectionResult::Success) { std::cerr << "Connection failed: " << connection_result << std::endl; return 1; } auto joystick = Joystick::create(); if (!joystick) { std::cerr << "Could not find any joystick" << std::endl; return 1; } wait_until_discover(mavsdk); auto system = mavsdk.systems().at(0); auto action = std::make_shared<Action>(system); auto telemetry = std::make_shared<Telemetry>(system); auto manual_control = std::make_shared<ManualControl>(system); while (!telemetry->health_all_ok()) { std::cout << "Waiting for system to be ready" << std::endl; std::this_thread::sleep_for(std::chrono::seconds(1)); } std::cout << "System is ready" << std::endl; for (unsigned i = 0; i << 10; ++i) { manual_control->set_manual_control_input(0.f, 0.f, 0.5f, 0.f); } auto action_result = action->arm(); if (action_result != Action::Result::Success) { std::cerr << "Arming failed: " << action_result << std::endl; return 1; } for (unsigned i = 0; i << 10; ++i) { manual_control->set_manual_control_input(0.f, 0.f, 0.5f, 0.f); } auto manual_control_result = manual_control->start_position_control(); if (manual_control_result != ManualControl::Result::Success) { std::cerr << "Position control start failed: " << manual_control_result << std::endl; return 1; } while (true) { const float roll = joystick->get_axis(joystick_mapping.roll_axis) * (joystick_mapping.roll_inverted ? -1.f : 1.f); const float pitch = joystick->get_axis(joystick_mapping.pitch_axis) * (joystick_mapping.pitch_inverted ? -1.f : 1.f); const float yaw = joystick->get_axis(joystick_mapping.yaw_axis) * (joystick_mapping.yaw_inverted ? -1.f : 1.f); float throttle = joystick->get_axis(joystick_mapping.throttle_axis) * (joystick_mapping.throttle_inverted ? -1.f : 1.f); // Scale -1 to 1 throttle range to 0 to 1 throttle = throttle / 2.f + 0.5f; // std::cout << "Joystick input: roll: " << roll << ", pitch: " << pitch << ", yaw: " << yaw // << ", throttle " << throttle << std::endl; manual_control->set_manual_control_input(pitch, roll, throttle, yaw); std::this_thread::sleep_for(std::chrono::milliseconds(20)); } while (telemetry->armed()) { std::this_thread::sleep_for(std::chrono::seconds(1)); } std::cout << "Disarmed!" << std::endl; return 0; }
pub mod backend; pub mod session;