content
stringlengths
10
4.9M
#include<stdio.h> typedef struct node { int e; int v; }node; void swap(node* a,node* b) { node tmp=*a; *a=*b; *b=tmp; return; } int main() { int f,i,j,n; scanf("%d",&n); int b[2][2]; node a[n+1]; for(i=1;i<=n;i++) { scanf("%d",&a[i].v); a[i].e=i; } for(i=1,f=0;i<n;i++) for(j=i+1;j<=n;j++) if(a[i].v>a[j].v) swap(a+i,a+j); for(i=1;i<n;i++) for(j=i+1;j<=n;j++) if(a[i].v==a[j].v && f<2) { b[f][0]=i; b[f][1]=j; f++; } if(f<2) { puts("NO"); return 0; } puts("YES"); for(i=1;i<=n;i++) printf("%d ",a[i].e); puts(""); swap(a+b[0][0],a+b[0][1]); for(i=1;i<=n;i++) printf("%d ",a[i].e); puts(""); swap(a+b[1][0],a+b[1][1]); for(i=1;i<=n;i++) printf("%d ",a[i].e); puts(""); return 0; }
<reponame>shadowcrypto1/dsynths-app-v2 export const makeHttpRequest = async function ( url: string, options: { [x: string]: string } = { cache: 'no-cache', } ) { try { const response = await fetch(url, options) return await response.json() } catch (err) { console.error(`Error fetching ${url}: `, err) return null } }
/** * Inits the. * * @throws Exception the exception */ @BeforeClass public static void init() throws Exception { contentService = new ContentServiceJpa(); mappingService = new MappingServiceJpa(); workflowService = new WorkflowServiceJpa(); handler = new WorkflowQaPathHandler(); for (Concept c : contentService.getConcepts().getIterable()) { for (Relationship r : c.getRelationships()) { contentService.removeRelationship(r.getId()); } } for (Concept c : contentService.getConcepts().getIterable()) { contentService.removeConcept(c.getId()); } for (MapProject mp : mappingService.getMapProjects().getIterable()) mappingService.removeMapProject(mp.getId()); for (MapUser mu : mappingService.getMapUsers().getIterable()) if (!mu.getUserName().equals("guest") && !mu.getUserName().equals("loader") && !mu.getUserName().equals("qa")) { mappingService.removeMapUser(mu.getId()); } for (TrackingRecord tr : workflowService.getTrackingRecords().getIterable()) workflowService.removeTrackingRecord(tr.getId()); concept = new ConceptJpa(); concept.setActive(true); concept.setDefaultPreferredName("Test Concept"); concept.setDefinitionStatusId(0L); concept.setEffectiveTime(new Date()); concept.setModuleId(0L); concept.setTerminology("sourceTerminology"); concept.setTerminologyVersion("sourceTerminologyVersion"); concept.setTerminologyId("1"); concept.setLabel("integration-test"); contentService.addConcept(concept); viewer = new MapUserJpa(); viewer.setApplicationRole(MapUserRole.VIEWER); viewer.setEmail("none"); viewer.setName("Viewer"); viewer.setUserName("view"); mappingService.addMapUser(viewer); specialist = new MapUserJpa(); specialist.setApplicationRole(MapUserRole.VIEWER); specialist.setEmail("none"); specialist.setName("Specialist"); specialist.setUserName("spec"); mappingService.addMapUser(specialist); loader = mappingService.getMapUser("loader"); mapProject = new MapProjectJpa(); mapProject.setSourceTerminology("sourceTerminology"); mapProject.setSourceTerminologyVersion("sourceTerminologyVersion"); mapProject.setDestinationTerminology("destinationTerminology"); mapProject .setDestinationTerminologyVersion("destinationTerminologyVersion"); mapProject.setGroupStructure(false); mapProject.setMapRefsetPattern(MapRefsetPattern.ExtendedMap); mapProject.setMapRelationStyle(RelationStyle.MAP_CATEGORY_STYLE); mapProject.setName("Test Project"); mapProject.setPropagatedFlag(false); mapProject .setProjectSpecificAlgorithmHandlerClass("org.ihtsdo.otf.mapping.jpa.handlers.ICD10ProjectSpecificAlgorithmHandler"); mapProject.setPublic(true); mapProject.setRefSetId("refsetId"); mapProject.setRuleBased(true); mapProject.setWorkflowType(WorkflowType.REVIEW_PROJECT); mapProject.addMapSpecialist(specialist); mapProject.addScopeConcept("1"); mappingService.addMapProject(mapProject); workflowService.computeWorkflow(mapProject); }
<gh_stars>1-10 import {Tracker} from '@croct/plug/sdk/tracking'; import {PluginSdk} from '@croct/plug/plugin'; import {Evaluator} from '@croct/plug/sdk/evaluation'; import {SessionFacade, Tab, UserFacade} from '@croct/plug/sdk'; export function createPluginSdkMock(): PluginSdk { const { Evaluator: EvaluatorMock, } = jest.genMockFromModule<{Evaluator: {new(): Evaluator}}>('@croct/plug/sdk/evaluation'); const { Tracker: TrackerMock, } = jest.genMockFromModule<{Tracker: {new(): Tracker}}>('@croct/plug/sdk/tracking'); const { SessionFacade: SessionFacadeMock, } = jest.genMockFromModule<{SessionFacade: {new(): SessionFacade}}>('@croct/plug/sdk'); const { UserFacade: UserFacadeMock, } = jest.genMockFromModule<{UserFacade: {new(): UserFacade}}>('@croct/plug/sdk'); const { Tab: TabMock, } = jest.genMockFromModule<{Tab: {new(): Tab}}>('@croct/plug/sdk'); return { evaluator: new EvaluatorMock(), session: new SessionFacadeMock(), tab: new TabMock(), tracker: new TrackerMock(), user: new UserFacadeMock(), getTabStorage: jest.fn(), getBrowserStorage: jest.fn(), getLogger: jest.fn().mockReturnValue({ debug: jest.fn(), info: jest.fn(), warn: jest.fn(), error: jest.fn(), }), }; }
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: MIT package util import ( "github.com/aws/amazon-cloudwatch-agent/translator" "github.com/aws/amazon-cloudwatch-agent/translator/config" "github.com/aws/amazon-cloudwatch-agent/translator/util" ) const File_State_Folder_Linux = "/opt/aws/amazon-cloudwatch-agent/logs/state" func GetFileStateFolder() (fileStateFolder string) { if translator.GetTargetPlatform() == config.OS_TYPE_WINDOWS { fileStateFolder = util.GetWindowsProgramDataPath() + "\\Amazon\\AmazonCloudWatchAgent\\Logs\\state" } else { fileStateFolder = File_State_Folder_Linux } return }
def _simulate_request(self, url, method='GET', data=None, headers=None, params=None): if headers is None: headers = self.headers headers = self._sanitize_headers(headers) if data is None: body = '' elif isinstance(data, str): body = data else: body = json.dumps(data, ensure_ascii=False) parsed_url = urllib_parse.urlparse(url) query = parsed_url.query if params is not None: extra = '&'.join([key + '=' + str(value) for key, value in params.items()]) query += '&' + extra environ = ftest.create_environ(method=method, path=parsed_url.path, query_string=query, headers=headers, body=body) srmock = ftest.StartResponseMock() wsgi_result = self.app(environ, srmock) return ResponseMock(srmock, wsgi_result)
/// Update state with a `data` and pad it to blocksize with 0, if not /// evenly divisible by blocksize. pub(crate) fn process_pad_to_blocksize( &mut self, data: &[u8], ) -> Result<(), UnknownCryptoError> { if self.is_finalized { return Err(UnknownCryptoError); } if data.is_empty() { return Ok(()); } let mut blocksize_iter = data.chunks_exact(POLY1305_BLOCKSIZE); for block in &mut blocksize_iter { self.process_block(block).unwrap(); } let remaining = blocksize_iter.remainder(); if !remaining.is_empty() { let mut pad = [0u8; POLY1305_BLOCKSIZE]; pad[..remaining.len()].copy_from_slice(remaining); self.process_block(&pad).unwrap(); } Ok(()) }
<filename>src/ui/Selector/SelectorList.tsx import React from 'react'; import { FixedSizeList, ListChildComponentProps } from 'react-window'; import AutoSizer from "react-virtualized-auto-sizer"; import Box from '@mui/material/Box'; import ListItem from '@mui/material/ListItem'; import Typography from '@mui/material/Typography'; import Simulation from 'sim/Simulation'; import SelectorItem, { SelectorItemHeight } from './SelectorItem'; const toggleSetItem = (nameSet: Set<string>, simName: string) => { const newSelected = new Set(nameSet); if (newSelected.has(simName)) { newSelected.delete(simName); } else { newSelected.add(simName) } return newSelected; }; interface Props { sims: Record<string, Simulation[]>; searched?: Set<string>; selected: Set<string>; onSelectedChange: (sims: Set<string>) => void; } const SelectorList: React.FC<Props> = ({ sims, searched, selected, onSelectedChange }) => { const simNames = Object.keys(sims); const primaryNames: string[] = []; const secondaryNames: string[] = []; simNames.forEach((simName) => { if (!searched || searched.has(simName)) { primaryNames.push(simName); } else if(selected.has(simName)) { secondaryNames.push(simName); } }); const numExtras = secondaryNames.length ? secondaryNames.length + 1 : 0; const Row = ({ index, style }: ListChildComponentProps) => { let simName: string; if (index < primaryNames.length) { simName = primaryNames[index]; } else if (index > primaryNames.length) { simName = secondaryNames[index - (primaryNames.length + 1)]; } else { return ( <ListItem key="nonSearchSelBreak" style={style} sx={{ height: SelectorItemHeight }} > <Typography sx={{ mx: 2, mt: 2, fontStyle: 'italic' }} variant="body2"> The following simulations are currently selected but don't match the search: </Typography> </ListItem> ); } return ( <SelectorItem sims={sims[simName]} style={style} selected={selected.has(simName)} onToggle={() => onSelectedChange(toggleSetItem(selected, simName))} /> ); }; return ( <Box sx={{ flex: '1 1 auto' }}> <AutoSizer> {({ height, width }) => ( <FixedSizeList height={height} itemCount={primaryNames.length + numExtras} itemSize={SelectorItemHeight} width={width} itemData > {Row} </FixedSizeList> )} </AutoSizer> </Box> ); }; export default SelectorList;
/** * Tree node representing reference to the key of a Map association. * * @author Steve Ebersole */ public class MapKeyNode extends AbstractMapComponentNode { private MapKeyEntityFromElement mapKeyEntityFromElement; @Override protected String expressionDescription() { return "key(*)"; } @Override protected String[] resolveColumns(QueryableCollection collectionPersister) { this.mapKeyEntityFromElement = findOrAddMapKeyEntityFromElement( collectionPersister ); if ( mapKeyEntityFromElement != null ) { setFromElement( mapKeyEntityFromElement ); } final FromElement fromElement = getMapFromElement(); return fromElement.toColumns( fromElement.getCollectionTableAlias(), "index", // the JPA KEY "qualifier" is the same concept as the HQL INDEX function/property getWalker().isInSelect() ); } @Override protected Type resolveType(QueryableCollection collectionPersister) { return collectionPersister.getIndexType(); } public MapKeyEntityFromElement getMapKeyEntityFromElement() { return mapKeyEntityFromElement; } }
<reponame>ant512/EarthShakerDS #ifndef _WETSOILBMP3_H_ #define _WETSOILBMP3_H_ #include <bitmapwrapper.h> class WetSoilBmp3 : public WoopsiGfx::BitmapWrapper { public: WetSoilBmp3(); }; #endif
Binarity in Cool Asymptotic Giant Branch Stars: A GALEX Search for Ultraviolet Excesses The search for binarity in AGB stars is of critical importance for our understanding of how planetary nebulae acquire the dazzling variety of aspherical shapes which characterizes this class. However, detecting binary companions in such stars has been severely hampered due to their extreme luminosities and pulsations. We have carried out a small imaging survey of AGB stars in ultraviolet light (using GALEX), where these cool objects are very faint, in order to search for hotter companions. We report the discovery of significant far-ultraviolet excesses toward nine of these stars. The far-ultraviolet excess most likely results either directly from the presence of a hot binary companion or indirectly from a hot accretion disk around the companion.
def jaccard_similarity(correct_duplicates: List, retrieved_duplicates: List) -> float: if len(retrieved_duplicates) == 0 and len(correct_duplicates) == 0: return 1.0 if not len(retrieved_duplicates) or not len(correct_duplicates): return 0.0 set_correct_duplicates = set(correct_duplicates) set_retrieved_duplicates = set(retrieved_duplicates) intersection_dups = set_retrieved_duplicates.intersection(set_correct_duplicates) union_dups = set_retrieved_duplicates.union(set_correct_duplicates) jacc_sim = len(intersection_dups) / len(union_dups) return jacc_sim
// GetPkgMetadata reads and parses the package // metadata from the metadata.xml file func GetPkgMetadata(path string) Pkgmetadata { xmlFile, err := os.Open(path) if err != nil { logger.Error.Println("Error during reading package metadata") logger.Error.Println(err) } defer xmlFile.Close() byteValue, _ := ioutil.ReadAll(xmlFile) var pkgmetadata Pkgmetadata xml.Unmarshal(byteValue, &pkgmetadata) return pkgmetadata }
package main import "fmt" var version string var build string type CmdVersion struct{} func (c *CmdVersion) Execute(args []string) error { fmt.Printf("%s (%s) - build %s\n", bin, version, build) return nil }
class TaskType(object): def __init__(self, typename, handlername): self.name = typename self.handler = handlername
Karl Rove is most famous for being architect of one of the worst presidencies in American history and then a Superpac strategist/delusional Romney campaign-night dead-ender. I’m a Rove junkie, and just as a snobbish fan of any popular band must have some obscure album he finds superior to the band’s most popular work, the Rove career function I find most delightful and rewarding is his work as a Wall Street Journal op-ed columnist. This is the medium that truly pulls back the curtain on Rove’s fascinating combination of insularity from facts outside the conservative pseudo-news bubble, delusional optimism, and utter lack of self-awareness. The Journal column is a weekly gift to amateur Rove psychoanalysts everywhere. Today’s column begins with Rove’s bizarre belief that the health exchanges in Obamacare are a “single-payer” system, reflecting his apparent confusion about what this term means. (The single-payer in a single-payer system is the government, not the insurance companies in the exchanges.) But the main point is the Orwellian proposition that “Mr. Obama’s pattern is to act, or fail to act, in a way that will leave his successor with a boatload of troubles.” What kind of president would bequeath a boatload of troubles to his successor? Oh, the irresponsibility. The first count in Rove’s indictment is the budget deficit, which “was equal to roughly 40% of GDP when Mr. Obama took office. At last year’s end it was 72% of GDP.” One possible cause of this deficit might be the over-trillion-dollar annual deficit, that one George W. Bush handed over when he left office, along with the massive economic collapse. The missions had been accomplished. Now Obama has un-accomplished it. Photo: STEPHEN JAFFE/AFP/Getty Images Rove’s column goes on to express very strong views on the need for fiscal responsibility: Then there’s Medicare, whose Hospital Insurance Trust Fund will go bankrupt in 2026. For five years, Mr. Obama has failed to offer a plan to restore Medicare’s fiscal health as he is required by the law establishing Medicare Part D. When Medicare goes belly-up, he will be out of office. The Congressional Budget Office projects the Affordable Care Act will reduce deficits by more than a trillion dollars in its second decade. Yes, the Hospital Insurance Trust Fund is expected to reach insolvency by 2026, but when Bush left office, that projected insolvency date was nine years earlier. Meanwhile, Medicare’s projected spending has fallen by nearly $600 billion since the passage of Obamacare: You can plausibly argue that these changes, combined with other cuts to long-term deficits, including partial expiration of the Bush tax cuts, don’t go far enough. But Rove is trying to make the case that Obama’s policies made the long-term budget outlook worse, which is false. You know whose policies made the long-term outlook way, way worse? Yes, of course you do. Literally the entire Bush agenda – tax cuts, new domestic spending, major expansions of the military — was financed by debt. Rove tries to paint Bush as fiscally responsible because Obama has “failed to offer a plan to restore Medicare’s fiscal health as he is required by the law establishing Medicare Part D.” That sentence is really the best. The point of the column is that Obama is terrible for leaving deficits to his successor. Rove is supporting this charge by citing a law his president passed, that created a major new debt-financed entitlement that Obama inherited. And he’s presenting this as Obama’s irresponsibility because the debt-financed entitlement Bush passed required the next president to come up with a law solving Medicare’s problems. And because Obama has alleviated but not completely solved Medicare’s problems, this shows that Obama has sloughed problems off onto the future. What a slacker, Obama is, sloughing off problems onto his successor rather than solve them as the president who came before him required him by law to do. This leads us to the most Rove-ian paragraph in the column, and possibly in the entire history of the Rove oeuvre: From the record number of Americans on food stamps to the worst labor-force participation rate since the 1970s to rising political polarization to retreating U.S. power overseas and increasing Middle East chaos and violence, Mr. Obama’s successor—Republican or Democratic—will inherit a mess. What kind of president would leave his successor with a bad economy and a violent Middle East?
import { InvoiceEntity } from "./invoice-entity"; export class PaymentEntity { id?:number; mode :string; datePayment? :Date; montant :number ; invoice? :InvoiceEntity; }
// changeAdminPassword changes the password for the given admin user. // The password is validated first. // This method also deletes all sessions for the user. func changeAdminPassword(userName string, appContext *MailAppContext, w http.ResponseWriter, r *http.Request) error { body, readErr := ioutil.ReadAll(r.Body) if readErr != nil { appContext.Logger.WithError(readErr).Info("Invalid request syntax to change admin password") http.Error(w, "Invalid request syntax", 400) return nil } var pwData struct { Password string } jsonErr := json.Unmarshal(body, &pwData) if jsonErr != nil { appContext.Logger.WithError(jsonErr).Info("Invalid request syntax to change admin password") http.Error(w, "Invalid request syntax", 400) return nil } if pwErr := passwordValid(pwData.Password); pwErr != nil { appContext.Logger.WithError(pwErr).WithField("admin-name", userName).Warn("Invalid password for admin user") http.Error(w, pwErr.Error(), 400) return nil } if updateErr := appContext.UserHandler.UpdatePassword(userName, []byte(pwData.Password)); updateErr != nil { return updateErr } adminID, getIDErr := appContext.UserHandler.GetUserID(userName) if getIDErr != nil { appContext.Logger.WithField("admin-user", userName).Error("Can't get admin id for user after changing password") return nil } if _, delSessionsErr := appContext.SessionController.DeleteEntriesForUser(adminID); delSessionsErr != nil { appContext.Logger.WithField("admin-user", userName).Error("Can't delete sessions for user after changing password, user may be still logged in!") return nil } return nil }
Measurements and Numerical Simulations of Interior Scattering from Simple PEC cavities with Complex Terminations To understand the scattering effect of complex termination, the authors have designed, fabricated and measured two kinds of PEC cavities, i.e. rectangular and circular cavities, with different terminations. To confirm and verify the accuracy of the measured results, all cavities have also been simulated at selected frequencies using our in-house higher-order finite element-boundary integral (FE-BI) code. All measured and simulated results are in good agreement
import { EMPTY, of, Subject } from 'rxjs'; import { ServerDateService } from 'app/shared/server-date.service'; import { ArtemisVersionInterceptor } from 'app/core/interceptor/artemis-version.interceptor'; import { AlertService, AlertType } from 'app/core/util/alert.service'; import { MockService } from 'ng-mocks'; import { MockArtemisServerDateService } from '../helpers/mocks/service/mock-server-date.service'; import { discardPeriodicTasks, fakeAsync, tick } from '@angular/core/testing'; import { HttpHeaders, HttpRequest, HttpResponse } from '@angular/common/http'; import { SwUpdate } from '@angular/service-worker'; import { ApplicationRef } from '@angular/core'; import { ARTEMIS_VERSION_HEADER, VERSION } from 'app/app.constants'; describe(`ArtemisVersionInterceptor`, () => { let appRef: ApplicationRef; let alertService: AlertService; let serverDateService: ServerDateService; let swUpdate: any; let checkForUpdateSpy: any; let activateUpdateSpy: any; beforeAll(() => { jest.useFakeTimers(); }); afterAll(() => { jest.useRealTimers(); }); beforeEach(() => { swUpdate = { isEnabled: true, activated: EMPTY, available: EMPTY, checkForUpdate: () => Promise.resolve(true), activateUpdate: () => Promise.resolve(true), }; checkForUpdateSpy = jest.spyOn(swUpdate, 'checkForUpdate'); activateUpdateSpy = jest.spyOn(swUpdate, 'activateUpdate'); appRef = { isStable: of(true) } as any as ApplicationRef; alertService = MockService(AlertService); serverDateService = new MockArtemisServerDateService(); }); afterEach(() => { jest.restoreAllMocks(); }); it('should check for an update immediately and after 60 seconds again if app is stable', fakeAsync(() => { new ArtemisVersionInterceptor(appRef, swUpdate as any as SwUpdate, serverDateService, alertService, {} as any as Window); expect(checkForUpdateSpy).toHaveBeenCalledTimes(1); tick(60000); expect(checkForUpdateSpy).toHaveBeenCalledTimes(2); discardPeriodicTasks(); })); it('should check for an update after 30s if app is not stable', fakeAsync(() => { const sub = new Subject<boolean>(); new ArtemisVersionInterceptor({ isStable: sub.asObservable() } as any as ApplicationRef, swUpdate as any as SwUpdate, serverDateService, alertService, {} as any as Window); sub.next(false); expect(checkForUpdateSpy).toHaveBeenCalledTimes(0); tick(30000); expect(checkForUpdateSpy).toHaveBeenCalledTimes(1); discardPeriodicTasks(); })); it('should show the update alert and have functional callback', fakeAsync(() => { const funMock = jest.fn(); const addAlertSpy = jest.spyOn(alertService, 'addAlert').mockImplementation(funMock); new ArtemisVersionInterceptor(appRef, swUpdate as any as SwUpdate, serverDateService, alertService, { location: { reload: jest.fn() } } as any as Window); tick(); expect(addAlertSpy).toHaveBeenCalledTimes(1); expect(funMock).toHaveBeenCalledTimes(1); expect(funMock).toHaveBeenCalledWith(expect.objectContaining({ type: AlertType.INFO, message: 'artemisApp.outdatedAlert' })); expect(activateUpdateSpy).not.toHaveBeenCalled(); funMock.mock.calls[0][0].action.callback(); expect(activateUpdateSpy).toHaveBeenCalledTimes(1); discardPeriodicTasks(); })); it('should tell the worker to look for updates in HTTP requests (only) if the version is not equal to current', fakeAsync(() => { const requestMock = new HttpRequest('GET', '/test'); const intercept = new ArtemisVersionInterceptor(appRef, swUpdate as any as SwUpdate, serverDateService, alertService, {} as any as Window); tick(); expect(checkForUpdateSpy).toHaveBeenCalledTimes(1); let mockHandler = { handle: jest.fn(() => of(new HttpResponse({ status: 200, body: {}, headers: new HttpHeaders({ [ARTEMIS_VERSION_HEADER]: VERSION }) }))), }; intercept.intercept(requestMock, mockHandler).subscribe(); tick(); expect(checkForUpdateSpy).toHaveBeenCalledTimes(1); mockHandler = { handle: jest.fn(() => of(new HttpResponse({ status: 200, body: {}, headers: new HttpHeaders({ [ARTEMIS_VERSION_HEADER]: '0.0.0' }) }))), }; intercept.intercept(requestMock, mockHandler).subscribe(); expect(checkForUpdateSpy).toHaveBeenCalledTimes(2); discardPeriodicTasks(); })); });
/* * prefetch.cpp * * Copyright (C) 2021 Alibaba Group. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * See the file COPYING included with this distribution for more details. */ #include <memory> #include <vector> #include <map> #include <queue> #include <unistd.h> #include <fcntl.h> #include <sys/stat.h> #include <sys/mman.h> #include "prefetch.h" #include "overlaybd/fs/forwardfs.h" #include "overlaybd/fs/localfs.h" #include "overlaybd/fs/zfile/crc32/crc32c.h" #include "overlaybd/alog.h" #include "overlaybd/alog-stdstring.h" #include "overlaybd/photon/thread11.h" using namespace std; namespace FileSystem { class PrefetcherImpl; class PrefetchFile : public ForwardFile { public: PrefetchFile(IFile* src_file, uint32_t layer_index, Prefetcher* prefetcher); ssize_t pread(void* buf, size_t count, off_t offset) override; private: uint32_t m_layer_index; PrefetcherImpl* m_prefetcher; }; class PrefetcherImpl : public Prefetcher { public: explicit PrefetcherImpl(const string& trace_file_path) { // Detect mode size_t file_size = 0; m_mode = detect_mode(trace_file_path, &file_size); m_lock_file_path = trace_file_path + ".lock"; m_ok_file_path = trace_file_path + ".ok"; LOG_INFO("Prefetch: run with mode `, trace file is `", m_mode, trace_file_path); // Open trace file if (m_mode != Mode::Disabled) { int flags = m_mode == Mode::Record ? O_WRONLY : O_RDONLY; m_trace_file = FileSystem::open_localfile_adaptor(trace_file_path.c_str(), flags, 0666, 2); } // Loop detect lock file if going to record if (m_mode == Mode::Record) { int lock_fd = open(m_lock_file_path.c_str(), O_RDWR | O_CREAT | O_TRUNC | O_EXCL, 0666); close(lock_fd); auto th = photon::thread_create11(&PrefetcherImpl::detect_lock, this); m_detect_thread = photon::thread_enable_join(th); } // Reload if going to replay if (m_mode == Mode::Replay) { reload(file_size); } } ~PrefetcherImpl() { if (m_mode == Mode::Record) { m_record_stopped = true; if (m_detect_thread_interruptible) { photon::thread_shutdown((photon::thread*) m_detect_thread); } photon::thread_join(m_detect_thread); dump(); } else if (m_mode == Mode::Replay) { m_replay_stopped = true; for (auto th : m_replay_threads) { photon::thread_shutdown((photon::thread*) th); photon::thread_join(th); } } if (m_trace_file != nullptr) { m_trace_file->close(); m_trace_file = nullptr; } } IFile* new_prefetch_file(IFile* src_file, uint32_t layer_index) override { return new PrefetchFile(src_file, layer_index, this); } void record(TraceOp op, uint32_t layer_index, size_t count, off_t offset) override { if (m_record_stopped) { return; } TraceFormat trace = {op, layer_index, count, offset}; m_record_array.push_back(trace); } void replay() override { if (m_mode != Mode::Replay) { return; } if (m_replay_queue.empty() || m_src_files.empty()) { return; } LOG_INFO("Prefetch: Replay ` records from ` layers", m_replay_queue.size(), m_src_files.size()); for (int i = 0; i < REPLAY_CONCURRENCY; ++i) { auto th = photon::thread_create11(&PrefetcherImpl::replay_worker_thread, this); auto join_handle = photon::thread_enable_join(th); m_replay_threads.push_back(join_handle); } } int replay_worker_thread() { static char buf[MAX_IO_SIZE]; // multi threads reuse one buffer while (!m_replay_queue.empty() && !m_replay_stopped) { auto trace = m_replay_queue.front(); m_replay_queue.pop(); auto iter = m_src_files.find(trace.layer_index); if (iter == m_src_files.end()) { continue; } auto src_file = iter->second; if (trace.op == PrefetcherImpl::TraceOp::READ) { ssize_t n_read = src_file->pread(buf, trace.count, trace.offset); if (n_read != (ssize_t) trace.count) { LOG_ERROR("Prefetch: replay pread failed: `, `, respect: `, got: `", ERRNO(), trace, trace.count, n_read); continue; } } } photon::thread_sleep(3); if (!m_buffer_released) { m_buffer_released = true; madvise(buf, MAX_IO_SIZE, MADV_DONTNEED); } return 0; } void register_src_file(uint32_t layer_index, IFile* src_file) { m_src_files[layer_index] = src_file; } private: struct TraceFormat { TraceOp op; uint32_t layer_index; size_t count; off_t offset; }; struct TraceHeader { uint32_t magic = 0; size_t data_size = 0; uint32_t checksum = 0; }; static const int MAX_IO_SIZE = 1024 * 1024; static const int REPLAY_CONCURRENCY = 16; static const uint32_t TRACE_MAGIC = 3270449184; // CRC32 of `Container Image Trace Format` vector<TraceFormat> m_record_array; queue<TraceFormat> m_replay_queue; map<uint32_t, IFile*> m_src_files; vector<photon::join_handle*> m_replay_threads; photon::join_handle* m_detect_thread = nullptr; bool m_detect_thread_interruptible = false; string m_lock_file_path; string m_ok_file_path; IFile* m_trace_file = nullptr; bool m_replay_stopped = false; bool m_record_stopped = false; bool m_buffer_released = false; int dump() { if (m_trace_file == nullptr) { return 0; } if (access(m_ok_file_path.c_str(), F_OK) != 0) { unlink(m_ok_file_path.c_str()); } auto close_trace_file = [&]() { if (m_trace_file != nullptr) { m_trace_file->close(); m_trace_file = nullptr; } }; DEFER(close_trace_file()); TraceHeader hdr = {}; hdr.magic = TRACE_MAGIC; hdr.checksum = 0; // calculate and re-write checksum later hdr.data_size = sizeof(TraceFormat) * m_record_array.size(); ssize_t n_written = m_trace_file->write(&hdr, sizeof(TraceHeader)); if (n_written != sizeof(TraceHeader)) { m_trace_file->ftruncate(0); LOG_ERRNO_RETURN(0, -1, "Prefetch: dump write header failed"); } for (auto& each : m_record_array) { hdr.checksum = crc32::crc32c_extend(&each, sizeof(TraceFormat), hdr.checksum); n_written = m_trace_file->write(&each, sizeof(TraceFormat)); if (n_written != sizeof(TraceFormat)) { m_trace_file->ftruncate(0); LOG_ERRNO_RETURN(0, -1, "Prefetch: dump write content failed"); } } n_written = m_trace_file->pwrite(&hdr, sizeof(TraceHeader), 0); if (n_written != sizeof(TraceHeader)) { m_trace_file->ftruncate(0); LOG_ERRNO_RETURN(0, -1, "Prefetch: dump write header(checksum) failed"); } unlink(m_lock_file_path.c_str()); int ok_fd = open(m_ok_file_path.c_str(), O_RDWR | O_CREAT | O_TRUNC | O_EXCL, 0666); if (ok_fd < 0) { LOG_ERRNO_RETURN(0, -1, "Prefetch: open OK file failed"); } close(ok_fd); return 0; } int reload(size_t trace_file_size) { // Reload header TraceHeader hdr = {}; ssize_t n_read = m_trace_file->read(&hdr, sizeof(TraceHeader)); if (n_read != sizeof(TraceHeader)) { LOG_ERRNO_RETURN(0, -1, "Prefetch: reload header failed"); } if (TRACE_MAGIC != hdr.magic) { LOG_ERROR_RETURN(0, -1, "Prefetch: trace magic mismatch"); } if (trace_file_size != hdr.data_size + sizeof(TraceHeader)) { LOG_ERROR_RETURN(0, -1, "Prefetch: trace file size mismatch"); } // Reload content uint32_t checksum = 0; TraceFormat fmt = {}; for (int i = 0; i < hdr.data_size / sizeof(TraceFormat); ++i) { n_read = m_trace_file->read(&fmt, sizeof(TraceFormat)); if (n_read != sizeof(TraceFormat)) { LOG_ERRNO_RETURN(0, -1, "Prefetch: reload content failed"); } checksum = crc32::crc32c_extend(&fmt, sizeof(TraceFormat), checksum); // Save in memory m_replay_queue.push(fmt); } if (checksum != hdr.checksum) { queue<TraceFormat> tmp; m_replay_queue.swap(tmp); LOG_ERROR_RETURN(0, -1, "Prefetch: reload checksum error"); } LOG_INFO("Prefetch: Reload ` records", m_replay_queue.size()); return 0; } int detect_lock() { while (!m_record_stopped) { m_detect_thread_interruptible = true; int ret = photon::thread_sleep(1); m_detect_thread_interruptible = false; if (ret != 0) { break; } if (access(m_lock_file_path.c_str(), F_OK) != 0) { m_record_stopped = true; dump(); break; } } return 0; } friend LogBuffer& operator<<(LogBuffer& log, const PrefetcherImpl::TraceFormat& f); }; LogBuffer& operator<<(LogBuffer& log, const PrefetcherImpl::TraceFormat& f) { return log << "Op " << char(f.op) << ", Count " << f.count << ", Offset " << f.offset << ", Layer_index " << f.layer_index; } PrefetchFile::PrefetchFile(IFile* src_file, uint32_t layer_index, Prefetcher* prefetcher) : ForwardFile(src_file), m_layer_index(layer_index), m_prefetcher((PrefetcherImpl*) prefetcher) { if (m_prefetcher->get_mode() == PrefetcherImpl::Mode::Replay) { m_prefetcher->register_src_file(layer_index, src_file); } } ssize_t PrefetchFile::pread(void* buf, size_t count, off_t offset) { ssize_t n_read = m_file->pread(buf, count, offset); if (n_read == (ssize_t) count && m_prefetcher->get_mode() == PrefetcherImpl::Mode::Record) { m_prefetcher->record(PrefetcherImpl::TraceOp::READ, m_layer_index, count, offset); } return n_read; } Prefetcher* new_prefetcher(const string& trace_file_path) { return new PrefetcherImpl(trace_file_path); } Prefetcher::Mode Prefetcher::detect_mode(const string& trace_file_path, size_t* file_size) { struct stat buf = {}; int ret = stat(trace_file_path.c_str(), &buf); if (file_size != nullptr) { *file_size = buf.st_size; } if (ret != 0) { return Mode::Disabled; } else if (buf.st_size == 0) { return Mode::Record; } else { return Mode::Replay; } } }
Hemorrhage in the Floor of the Mouth After Second-Stage Surgery: Case Report Placement of dental implants in the interforaminal region of the mandible is generally considered a routine, simple, and safe procedure. However, severe bleeding and hematoma in the floor of the mouth have been reported as a rare but potentially fatal complication related to the placement of an implant in this region. The following report describes a case of life-threatening hemorrhage in the floor of the mouth after second-stage surgery to place the healing abutment. The implants were forced to match with the prosthesis in a severely atrophic upper jaw, resulting in a perforation of the lingual cortex and mucosa of the floor of the mouth. Clinicians who place implants should be knowledgeable in the treatment of such a serious complication.
package mongodbclient import ( "context" "time" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" ) type DBConfigForMongo struct { UserName string Password string Host string Port string AppName string DataBaseName string } var dbClient *mongo.Client var dbName string func InitMongoDB(config *DBConfigForMongo) (err error) { dbName = config.DataBaseName connectionString := "mongodb://" + config.UserName + ":" + config.Password + "@" + config.Host + config.Port + "/?appName=" + config.AppName dbClient, err = mongo.Connect(context.Background(), options.Client().ApplyURI(connectionString)) return } func GetDB() (db *mongo.Database) { db = dbClient.Database(dbName) return } func CloseClient() { ctx, _ := context.WithTimeout(context.Background(), 5*time.Second) _ = dbClient.Disconnect(ctx) }
/** * Get a read only set containing the entries in the Tar archive. * @return A read only set containing the Tar archive's entries. * @throws IllegalStateException If the Tar archive has been closed. */ public Set<Map.Entry<AbsoluteLocation, TarEntry>> entrySet() throws IllegalStateException { assertNotClosed(); return m_entries.entrySet(); }
Subir Verma (Ed.), Towards the Next Orbit: A Corporate Odyssey, 2010, National HRD Network, New Delhi: Response Books, pp. 466, Rs795. Journal of Human Values, 17, 2 (2011): 185–191 Chapter 14 (‘Professional Ethics in the Light of Right Understanding’) starts by defining profession and addresses the issues pertaining to the unethical conduct of profession which has become a widespread concern. The authors take an attempt to make the readers visualize the real competence in professional ethics and how it is ensured. The competence can be manifested by clarity of goals, confidence in oneself, mutually fulfilling behaviour and mutually enriching interactions. The authors discuss the dilemmas and contradictions for various unethical practices for profit maximization. The authors suggest right understanding as the solution to these dilemmas and contradictions. Chapter 15 (‘Vision for Holistic Technologies, Production System and Management Models’) provides a vision of a model of living which is inherently conducive to the needs of all human beings and also compatible with nature. The chapter focuses on identifying the salient criteria for developing these appropriate technologies, production system and management models. The authors also suggest various individual criteria for technology, production system and management models. The authors suggest that we need to learn from the system of the nature and traditional practices and live in harmony at all levels. Chapter16 (‘Journey towards the Holistic Alternative—The Road Ahead’) being the last chapter briefly recapitulates the core message of the book and then helps in visualizing the steps that each of us need to take to transform from our present state towards a holistic way of life. After discussing the transition required at various levels (the four orders), the authors believe that to expedite transition towards a holistic alternative, mass awareness and a widespread of humanistic education is necessary. All in all, the book can be treated as a basic primer, despite delving into foundations of Indian philosophy, and the use of many Sanskrit words. The USP of this book are the frameworks that are aimed at facilitating students to be able to become authentic through self-exploration. The book would, therefore, be helpful for those who seek self-contention and ethical education. However, the book may fall short of expectations of the management educators or practitioners as it does not provide standardized solutions to the current unethical behaviour.
/** * Convenient bean to create JSF info/warn/error messages. * Business exceptions can be mapped to user friendly messages inside the {@link #error(Throwable)} method. */ @Named @Singleton @Lazy(false) public class MessageUtil { private static MessageUtil instance; public static MessageUtil getInstance() { return instance; } public static String toCssFriendly(Severity severity) { if (severity.equals(SEVERITY_INFO)) { return "info"; } else if (severity.equals(SEVERITY_WARN)) { return "warn"; } else if (severity.equals(SEVERITY_ERROR)) { return "error"; } else if (severity.equals(SEVERITY_FATAL)) { return "fatal"; } throw new IllegalStateException("Unexpected message severity: " + severity.toString()); } @Inject private ResourcesUtil resourcesUtil; @Inject private TypeAwarePrinter printer; public MessageUtil() { instance = this; } // -- info public void info(String summaryKey, Object... args) { addFacesMessageUsingKey(SEVERITY_INFO, summaryKey, args); } public void infoEntity(String summaryKey, Identifiable<?> entity) { addFacesMessageUsingKey(SEVERITY_INFO, summaryKey, printer.print(entity)); } public FacesMessage newInfo(String summaryKey, Object... args) { return newFacesMessageUsingKey(SEVERITY_INFO, summaryKey, args); } // -- warning public void warning(String summaryKey, Object... args) { addFacesMessageUsingKey(SEVERITY_WARN, summaryKey, args); } public FacesMessage newWarning(String summaryKey, Object... args) { return newFacesMessageUsingKey(SEVERITY_WARN, summaryKey, args); } // -- error public void error(String summaryKey, Object... args) { addFacesMessageUsingKey(SEVERITY_ERROR, summaryKey, args); } public FacesMessage newError(String summaryKey, Object... args) { return newFacesMessageUsingKey(SEVERITY_ERROR, summaryKey, args); } // -- fatal public void fatal(String summaryKey, Object... args) { addFacesMessageUsingKey(SEVERITY_FATAL, summaryKey, args); } public FacesMessage newFatal(String summaryKey, Object... args) { return newFacesMessageUsingKey(SEVERITY_FATAL, summaryKey, args); } private void addFacesMessage(FacesMessage fm) { if (fm != null) { FacesContext.getCurrentInstance().addMessage(null, fm); } } private void addFacesMessageUsingKey(Severity severity, String summaryKey, Object arg) { addFacesMessageUsingKey(severity, summaryKey, new Object[] { arg }); } private void addFacesMessageUsingKey(Severity severity, String summaryKey, Object[] args) { addFacesMessage(newFacesMessageUsingKey(severity, summaryKey, args)); } private FacesMessage newFacesMessageUsingKey(Severity severity, String summaryKey, Object[] args) { return newFacesMessageUsingText(severity, resourcesUtil.getProperty(summaryKey, args)); } private FacesMessage newFacesMessageUsingText(Severity severity, String text) { FacesMessage fm = new FacesMessage(text); fm.setSeverity(severity); return fm; } }
/*========================================================================= Program: ParaView Module: $RCSfile$ Copyright (c) Kitware, Inc. All rights reserved. See Copyright.txt or http://www.paraview.org/HTML/Copyright.html for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ #include "vtkSIInputProperty.h" #include "vtkAlgorithmOutput.h" #include "vtkClientServerStream.h" #include "vtkObjectFactory.h" #include "vtkSISourceProxy.h" #include "vtkPVXMLElement.h" #include "vtkSMMessage.h" #include <assert.h> vtkStandardNewMacro(vtkSIInputProperty); //---------------------------------------------------------------------------- vtkSIInputProperty::vtkSIInputProperty() { this->PortIndex = 0; } //---------------------------------------------------------------------------- vtkSIInputProperty::~vtkSIInputProperty() { } //---------------------------------------------------------------------------- bool vtkSIInputProperty::ReadXMLAttributes( vtkSIProxy* proxyhelper, vtkPVXMLElement* element) { if (!this->Superclass::ReadXMLAttributes(proxyhelper, element)) { return false; } int port_index; if (element->GetScalarAttribute("port_index", &port_index)) { this->SetPortIndex(port_index); } return true; } //---------------------------------------------------------------------------- bool vtkSIInputProperty::Push(vtkSMMessage* message, int offset) { if(!this->GetCommand()) { // It is fine to have a property without command but then we do nothing // here at the server side... return true; } assert(message->ExtensionSize(ProxyState::property) > offset); const ProxyState_Property *prop = &message->GetExtension(ProxyState::property, offset); assert(strcmp(prop->name().c_str(), this->GetXMLName()) == 0); const Variant *variant = &prop->value(); assert(variant->proxy_global_id_size() == variant->port_number_size()); std::vector<vtkTypeUInt32> proxy_ids; std::vector<int> output_ports; proxy_ids.resize(variant->proxy_global_id_size()); output_ports.resize(proxy_ids.size()); for (int cc=0; cc < variant->proxy_global_id_size(); cc++) { proxy_ids[cc] = variant->proxy_global_id(cc); output_ports[cc] = variant->port_number(cc); } vtkClientServerStream stream; if (this->CleanCommand) { stream << vtkClientServerStream::Invoke << this->SIProxyObject << "CleanInputs" << this->CleanCommand << vtkClientServerStream::End; } for (size_t cc=0; cc < proxy_ids.size(); cc++) { vtkSISourceProxy* siProxy = vtkSISourceProxy::SafeDownCast( this->GetSIObject(proxy_ids[cc])); vtkAlgorithmOutput* input_connection = (siProxy? siProxy->GetOutputPort(output_ports[cc]) : NULL); stream << vtkClientServerStream::Invoke << this->SIProxyObject << "AddInput" << this->PortIndex << input_connection << this->GetCommand() << vtkClientServerStream::End; } if (this->NullOnEmpty && this->CleanCommand == NULL && proxy_ids.size() == 0) { stream << vtkClientServerStream::Invoke << this->SIProxyObject << "AddInput" << this->PortIndex << static_cast<vtkObjectBase*>(NULL) << this->GetCommand() << vtkClientServerStream::End; } // Save to cache when pulled for collaboration this->SaveValueToCache(message, offset); return this->ProcessMessage(stream); } //---------------------------------------------------------------------------- void vtkSIInputProperty::PrintSelf(ostream& os, vtkIndent indent) { this->Superclass::PrintSelf(os, indent); }
In the wake of the ALP’s poor result in the recent Western Australia Senate election, The Conversation is publishing a series of articles looking at the party’s brand, organisation and future prospects. The key problem facing federal Labor leader Bill Shorten in the wake of the Western Australian Senate re-election is one he has in common with every past Labor leader. The Australian Labor Party is always expected to be true to traditional “Labor values” but also has to adapt them to a changing Australia. While the ALP can talk about good and necessary government policies to help particular groups (such as the National Disability Insurance Scheme) and it can talk about health and education broadly, when it comes to economics and the need to help in a globalised age it has lost the ability to talk of government doing public good for all. All it can do is talk about the free market. From Calwell to Whitlam A recent poll asked if Labor looked after the interests of working people. And 54% thought it did, but this was hardly resounding, since 49% thought it was out of touch with ordinary people. Nonetheless, there are clear assumptions that Labor exists to look after the working class. This is a legacy from the past that is a burden and a blessing. The problem is, who are the working people in today’s Australia when most Australians define themselves as “middle class”? The ALP began in the 1890s as a vehicle for blue-collar working-class people who felt they had no political voice. Unions were woven into their lives in the small communities from which they barely strayed. Appealing to the blue-collar vote was acceptable as long as flat caps, heavy boots and misshapen trousers were the uniforms of working-class men. But in the 1940s Australia started to change appreciably. The link between the ALP and the votes of blue-collar workers started to break down. Parliamentary Library Blue-collar workers now make up only a small portion of the labour force. Among other things, industrialisation after World War Two expanded the working class in the factories but also changed its composition with immigration from all sorts of countries. Also, the seemingly widespread affluence of the new consumer society and burgeoning levels of home ownership during the 1950s and 1960s seemed to leave behind the old socialist arguments about poverty, exploitation and depression. Labor was moribund and stuck in the past with leaders like Arthur Calwell who refused to modernise the party. This was the case until Gough Whitlam came along. He broadened party organisation. He dropped the references to the working class while calling everyone “comrade” and attracted new categories of voters and new constituencies for Labor. Whitlam was the first political leader to recognise women as a political category in their own right and not appendages to men as daughters, wives and mothers. He appealed to Aboriginal Australians as voters and to migrants as more than New Australians or refugees. But Whitlam also aimed to keep the core voters of Labor with a general approach that emphasised the general good that the government could do for all. In other words, Whitlam broadened the ALP’s appeal beyond the unions and the traditional constituencies because all major political parties must be seen to appeal to sectional but also national interests. Reclaiming the core constituency The ALP has never just looked to the working class and unions to win office. The working class has never just been a solid Labor voting bloc. The growing sentiment of mythology and party lore over the decades has been an understandable but also distracting nostalgia. From the beginning of the 20th century, the ALP had to look beyond the core constituency of unions and workers to sections of the middle class to win office and to insist on ruling for all groups – not just for the union movement. This is what happens when you have to aim for more than 50% of the vote. And failure to look further afield helped account for the 23 years Labor spent out of power federally between 1949 and 1972. The ALP must get a majority by going beyond its core constituency while risking offence to this constituency. And yet, it must try to keep both. Consequently, there was often conflict in the past between trade unions and political leaders over this strategy. In recent years One of the problems since its 1996 election loss is that too many Labor MPs have swallowed the right-wing Kool-Aid. They took to heart John Howard’s critique that true Labor voters – the “Howard battlers” – left the ALP for him while the party chased the inner-city middle class and “minorities”. Howard defined the ALP in the absence of the party defining itself, even though it was swinging voters – not true Labor supporters – who flocked to him. Consequently, there was the rush of unrequited ALP affection for the “aspirationals” of western Sydney, for instance. However, the party forgot about appeasing its core vote with the promise of help. It forgot about the balancing act. Last year, voters in western Sydney were still squirming like a reluctant teenager embraced by an over-affectionate aunt when Julia Gillard toured the area. AAP/Paul Miller Diagnosing Labor’s malaise The ALP has been unable to define itself as anything but a party devoted to free-market reforms since the Hawke government in the 1980s. The party has lobotomised itself of its history. Although there had been progressive deregulation and reform over the years, this was done as part of an overall plan of state intervention. The Hawke government introduced the Accord, a series of wage and tax deals between the government and the union movement. It also brought in compulsory superannuation (government-legislated providing rivers of gold to a finance sector that hypocritically talks of free market for others) and a series of plans for cars, steel, textile, clothing and footwear that involved the government, appropriate businesses and business organisations, and unions. And helping BHP with almost A$1 billion doesn’t sound free market to me. These plans involved government spending to help modernise in exchange for a timeline for dropping tariffs and targets for exports. But this history has been cut from the corporate memory, and in the process the ALP has lost the ability to talk about government doing public good for ordinary people. This failure of the ALP to find its voice, except briefly in 2007, partly accounts for sentimental attachment to the unions as the ballast that will keep the party on the right social democratic path. This is even so despite union members making up only 18% of the workforce. This failure to find its voice also means the ALP is not knitting together constituencies when there is more than enough evidence that middle and working-class people will be loaded with the burdens of a changing economy.
Posted 27 January 2012 - 08:05 PM #1 Installation Instructions adb remount adb push /path/to/downloaded/apk/MangoToggle-v0.3beta.apk /system/app/MangoToggle.apk Links Releases MangoToggle is a system-level utility application providing a rooted user the ability to control various radios and/or settings based on the lock screen state of the device.* 2G/3G on GSM phones* Mobile Data* WiFi* Invert (Turn on when the phone is locked, rather then off)* Disable toggling while streaming audio (Useful for applications such as Google Music)* Screen Brightness* Recommend more!This applicationand must be run from the /system/app directory andIf someone on a differentROM wants me to make a version for that ROM, link me the source in a reply, and if possible, I'll build a version for that ROM.Please note, this applicationThe ROM is closed source and there is no way for me to get the system keys. In the future I may make a non-root version of this application that provides toggling for settings that do not require the internal API. Until then, sorry!The application is currently open source, released under the GPLv3, and can be found at my github. The application can be built from source rather then using stable releases as well. Information on how to do so can be found in the README file and requires the android SDK.A loader application is in the works and will be downloadable via the Android Market when finished. (It will provide the ability to download the system-level application and push it to your /system/app directory, without the need for using ADB.)Download the application from the releases section below.Use the following commands to push the application to your phone. Once pushed it should appear in your app drawer, if it does not, try rebooting your phone.Source:Issue Tracker/Features Request:v0.3betaRelease Date: Friday, January 27, 2012 Edited by mangoman2, 30 January 2012 - 04:18 PM.
// that sort of renders the ActionUtils.mineData() method useless here. Unless we are able to create a mock lookup with only projects and files. @Override public void run() { final Project[] projects = ActionsUtil.getProjectsFromLookup( context, command ); Runnable r2 = new Runnable() { @Override public void run() { if (command != null && projects.length > 0) { runSequentially(new LinkedList<Project>(Arrays.asList(projects)), ProjectAction.this, command); } else if (performer != null && projects.length == 1) { performer.perform(projects[0]); } } }; if (SwingUtilities.isEventDispatchThread()) { r2.run(); } else { SwingUtilities.invokeLater(r2); } }
18‐1: Invited Paper: Modeling and Optimizing Human‐in‐the‐Loop Visual Perception Using Immersive Displays: A Review New and rapidly-evolving classes of display devices bridge the gap between us and the immersive experiences of the future. The most intimate of these displays are the Virtual- and Augmented-Reality (VR and AR) ones, because they are capable of presenting synthetic environments that rival those in the real world. This ecosystem of personal and highly-immersive displays offers new challenges for research in computer graphics, display technologies, and human visual perception. While the extensive advancements in the areas of display and computer graphics technologies traditionally end at the on-screen “image,” there are several untapped opportunities for advances that exploit the interplay between the display characteristics and how our visual system perceives them. In this article, we review recent progress in understanding and modeling the perception of immersive displays, as well as perceptually optimizing display technologies for immersive experiences. We present this review in the form of a taxonomy that maps the various properties of modern displays with the perceptual phenomenon that most closely interacts with them. From this taxonomy, we deduce several unsolved challenges in understanding human perception of displays, as well as perceptually-optimal characteristics of future displays.
<filename>src/app/pages/registrationmanagement/registrationmanagement.module.ts import { PendingCertificationComponent } from './pendingcertification/pendingcertification.component'; import { PendingCertificationGridButtonViewDetailComponent } from './pendingcertification/pendingcertification-gridbutton-viewdetail'; import { CertificationConsoleComponent } from './certificationconsole/certificationconsole.component'; import { PendingVerificationViewDetailButtonComponent } from './pendingverification/pendingverification.viewdetail.button'; import { RegistrationModule } from './../registration/registration.module'; import { VerificationConsoleComponent } from './verificationconsole/verificationconsole.component'; import { PendingVerificationComponent } from './pendingverification/pendingverification.component'; import { RegistrationManagementRoutingModule } from './registrationmanagement-routing.module'; import { RegistrationManagementComponent } from './registrationmanagement.component'; import { Ng2SmartTableModule } from 'ng2-smart-table'; import { NgModule } from '@angular/core'; import { ThemeModule } from '../../@theme/theme.module'; import { NgUploaderModule } from 'ngx-uploader'; import { PdfViewerModule } from 'ng2-pdf-viewer'; @NgModule({ imports: [ ThemeModule, Ng2SmartTableModule, NgUploaderModule, PdfViewerModule, RegistrationManagementRoutingModule, RegistrationModule, ], declarations: [ RegistrationManagementComponent, PendingVerificationComponent, VerificationConsoleComponent, PendingVerificationViewDetailButtonComponent, CertificationConsoleComponent, PendingCertificationGridButtonViewDetailComponent, PendingCertificationComponent], providers: [], entryComponents: [PendingVerificationViewDetailButtonComponent, PendingCertificationGridButtonViewDetailComponent], }) export class RegistrationManagementModule { }
import click import os import shutil from .vid2slides import v2s @click.command() @click.argument('filename') @click.option('-o', '--output', default='out.pdf') def cli(filename, output): """ Extracts a slideshow from a video presentation """ v2s(filename, output)
<reponame>tobeno/sol export enum LogLevel { DEBUG = 'debug', } export function log(...args: any[]): void { console.log(...args); } export function logDebug(...args: any[]): void { if (process.env.LOG_LEVEL?.toLowerCase() !== LogLevel.DEBUG) { return; } console.debug(...args); } export function logError(...args: any[]): void { console.error(...args); }
// This implementation is used at runtime to check if a given request is // intended for this Route. Format collisions works like this: // * If route specifies format, it only gets requests for that format. // * If route doesn't specify format, it gets requests for any format. // Query collisions work like this: // * If route specifies a query, it only gets request that have queries. // * If route doesn't specify query, requests with & without queries collide. impl<'r> Collider<Request<'r>> for Route { fn collides_with(&self, req: &Request<'r>) -> bool { self.method == req.method() && self.uri.collides_with(req.uri()) && self.uri.query().map_or(true, |_| req.uri().query().is_some()) && match self.format { Some(ref mt_a) => match req.format() { Some(ref mt_b) => mt_a.collides_with(mt_b), None => false }, None => true } } }
/** * CreateStorageContainer enables you to create a Virtual Volume (VVol) storage container. Storage containers are associated with a SolidFire storage system account, and are used for reporting and resource allocation. Storage containers can only be associated with virtual volumes. You need at least one storage container to use the Virtual Volumes feature. **/ @Since("9.0") @ConnectionType("Cluster") public CreateStorageContainerResult createStorageContainer( String name, Optional<String> initiatorSecret, Optional<String> targetSecret, Optional<Long> accountID );
N = int(input()) S1=input() S2=input() m1="" m=10**9+7 S1+="." for i in range(len(S1)-1): if S1[i]==S1[i+1]: m1+='y' else: m1+='t' m1 = m1.replace('yt','y') score = [1]*len(m1) for i in range(1,len(m1)): if m1[i-1]=='t' and m1[i]=='t': score[i]=2 elif m1[i-1]=='y' and m1[i]=='t': score[i]=1 elif m1[i-1]=='t' and m1[i]=='y': score[i]=2 elif m1[i-1]=='y' and m1[i]=='y': score[i] = 3 if m1[0]=='y': score[0]=6 else: score[0]=3 ans = 1 for i in range(len(m1)): ans =(ans*score[i])%m print(ans)
Get the biggest Aston Villa FC stories by email Subscribe Thank you for subscribing We have more newsletters Show me See our privacy notice Could not subscribe, try again later Invalid Email Hidden amongst another day of car-crash news for Aston Villa there was a rumour doing the rounds on forums about a new Chinese owner. Steve Hollis admits that the club are actively in talks for Randy Lerner to sell the club - some internet sites even went as far as naming the man looking to buy the club. Last year there was reportedly a firm interest from a Chinese consortium who sent representatives to England and did due diligence only for a deal to complete the club failing to materialise. The speculation now is that the consortium will be even more interested in the club, at an obviously reduced price. MORE: Aston Villa news LIVE | Gabby Agbonlahor and much more Below: Business Editor Graeme Brown explains why Villa fans might not see a quick sale Video Loading Video Unavailable Click to play Tap to play The video will start in 8 Cancel Play now Fans on forums have suggested that the man behind the consortium is Wang Jianlin, not a name that is familiar with everyone. Is the interest anything more than wild fanciful forum talk? It remains to be seen. Who is he? Good question, one thing is for sure, he could afford Aston Villa. In 2014 he was named by Forbes as the 26th richest man in the world with a net worth of $25 billion. This year he was named as the richest man in Asia with a net worth of $28.7 billion 4 months ago he bought a house for £80m in Kensington, London He has previously donated $197 million to charitable causes. He helped the restoration of an ancient temple in Nanjing. He also paid $200,00 for the restoration of the Electric Fountain in Beverley Hills. Could his love of restoring once great relics be his sole purpose for wanting to buy Villa? He is a former regimental commander of the People's Liberation Army with 17-years service. Handling Gabby Agbonlahor and Joleon Lescott would be a breeze. He was linked with buying a substantial stake in Atletico Madrid last year. Below: Mat Kendrick reacts to the latest Villa fiasco Video Loading Video Unavailable Click to play Tap to play The video will start in 8 Cancel Play now He is CEO, chairman and founder of the Dalian Wanda Group. His business is described online as "21.57 million square metres of investment property, 134 Wanda Shopping Plazas, 82 luxury hotels, 213 cinemas, 99 department stores, and 54 karaoke centres around China. The company became the world's largest theatre owner in 2012 when it acquired AMC Theaters . He bought out U.S.-based AMC Entertainment for $2.6 billion." He is clearly used to a bit of drama, again, that would serve him well. He is also described by the The Economic as "a man of Napoleonic ambition, who enforces "iron discipline" in the workplace, where employees are fined when they violate the company's conservative dress code." Yep, he would definitely whip Villa into shape. Wild rumours? For now at least, yes.
Introduction & Specifications Competing with Apple in the mobile space isn't rocket science, it's just a matter of sticking to a proven formula for success -- build a premium device with superior specs than the one Apple is selling, inject it with Android, and then undercut the iWhatever in price. That's not to say Apple is the only premium slate target of opportunity but we'd surmise Apple would consider it the sincerest for of flattery anyway. Incidentally, we've just described the ASUS ZenPad S 8.0. It's a well designed Android tablet with Intel inside, and it boasts better specs than the iPad mini 3. It's also less expensive -- as configured (see table below), the model we're reviewing runs a penny shy of $300, which is $99 less than the 16GB iPad mini 3, and $199 less than 64GB model. That kind of savings isn't chump change, folks. We're not trying to pit fanboys from both camps against one another, we're simply jumping straight to the value proposition that's being offered here. As far as Android tablets go, the ZenPad S 8.0 brushes shoulders with the Dell Venue 8 7000 and the more recent (and pricier) Samsung Galaxy Tab S2 that we've yet to review (coming soon). Offering an Android tablet with plenty of bang for your buck is quickly becoming an expectation of ASUS. Last year the company introduced us to the MeMO Pad 7, a lower end offering that paired a respectable spec sheet with a budget price tag, and now the ZenPad S 8.0 comes along to shake things in the premium tier. Have a look: ASUS ZenPad S 8.0 (Z580CA-C1-BK) Android Tablet Specifications & Features Processor Intel Atom Z3580 Processor (2M cache, 4 cores, up to 2.33 GHz) OS Android v5.0 Lollipop Audio 2 x front speakers w/ DTS HD Premium Sound, Sonic Master technology Memory 4GB LPDDR3 Storage 64GB eMMC Micro SD card reader (up to 128GB SDXC) Battery 15.2Wh Li-polymer Up to 8 hours Display 7.9-inch QXGA (2048x1536) IPS display Capacitive touchscreen, 10-point multitouch Weight 298 grams (0.65 pounds) Dimensions 203.2 x 134.5 x 6.6 mm (LxWxH) / 8 x 5.3 x 0.25 inches Color Black Cameras Rear: 8MP f2.0 Auto-focus Front: 5MP Sensors G-Sensor, E-compass, GPS, ambient light, hall Connectivity 802.11ac Wi-Fi Bluetooth 4.1 Smart Miracast support Ports 1 x USB Type-C 1 x 2-in-1 audio jack (headphone / mic-in) 1 x Micro SD card Price $299.99 as tested, as low as $199 Contents What you see above is a solid collection of hardware consisting of a quad-core Intel Atom Z3580 processor, 4GB of RAM, 64GB of internal storage, and modern amenities like 802.11ac Wi-Fi and a USB Type-C port.For $100 less, ASUS also sells a version of the ZenPad S 8.0 with half the amount of RAM (2GB versus 4GB) and storage (32GB versus 64GB), and a slightly slower processor (Intel Atom Z3530 versus Z3580). Our benchmarks and performance evaluation represent the higher end configuration ASUS sent us, but otherwise it's the same slate.Tablet makers typically don't deck out their slates with elaborate bundles and the ZenPad 8.0 S is no exception (nor do we expect it to be at this price). As you can see in the above video, ASUS includes a user guide, warranty booklet, AC adapter, and a USB Type-C charging cable with a standard USB connector on one end and a Type-C connector on the other.Let's move on to the design.
/** * Created by dajuejinxian on 2018/6/22. */ @RestController @RequestMapping("/admin") public class DemoAction extends Cors { @Autowired private IAgentUserDao agentUserDao; @Autowired private ILogRecordDao logRecordDao; @Autowired private IUserDao userDao; @Autowired private HomeService homeService; @Autowired private AgentService agentService; @Autowired private IAgentRecordsDao agentRecordsDao; @Autowired private IConstantDao constantDao; @Autowired private IChargeDao chargeDao; private static final Logger logger = LoggerFactory.getLogger(DemoAction.class); // class ConstantForm{ // // public ConstantForm() { // } // // String id; //// double init_money; // String apple_check; // // public String getId() { // return id; // } // // public void setId(String id) { // this.id = id; // } // // public String getApple_check() { // return apple_check; // } // // public void setApple_check(String apple_check) { // this.apple_check = apple_check; // } // } public static String getToken(long userId) { return MD5Util.MD5Encode("salt," + userId + System.currentTimeMillis(), "UTF-8"); } public static boolean isNumber(String str) { //采用正则表达式的方式来判断一个字符串是否为数字,这种方式判断面比较全 //可以判断正负、整数小数 boolean isInt = Pattern.compile("^-?[1-9]\\d*$").matcher(str).find(); boolean isDouble = Pattern.compile("^-?([1-9]\\d*\\.\\d*|0\\.\\d*[1-9]\\d*|0?\\.0+|0)$").matcher(str).find(); return isInt || isDouble; } public static Map<String, Object> getUserInfo(HttpServletRequest request) { return (Map<String, Object>) AgentUtil.caches.get("a"); } public static int getRole(long userId) { int role = 1; if (RedisManager.getAgentRedisService().isExit(userId)) { AgentBean agentBean = RedisManager.getAgentRedisService().getAgentBean(userId); if (agentBean.getIsPartner() == 1) { role = 3; } else { role = 2; } } return role; } @DemoChecker @RequestMapping("/timeSearch") public AgentResponse doSearch(String t1, String t2, int curPage) { if (curPage > 0) { curPage--; } String[] sA = null; if (t1.contains(",")) { sA = t1.split(",", 1000); } String[] sB = null; if (t2.contains(",")) { sB = t2.split(",", 1000); } SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); List<Date> listA = new ArrayList<>(); if (sA != null) { for (String str : sA) { try { Date date = simpleDateFormat.parse(str); listA.add(date); } catch (ParseException e) { // e.printStackTrace(); } } } List<Date> listB = new ArrayList<>(); if (sB != null) { for (String str : sB) { try { Date date = simpleDateFormat.parse(str); listB.add(date); } catch (ParseException e) { e.printStackTrace(); } } } int pageSize = 20; List<User> list = null; long count = 0; AgentResponse agentResponse = new AgentResponse(); if (sA != null && sB == null) { list = homeService.timeQuery(listA, listB, new PageRequest(curPage, pageSize)).getContent(); count = homeService.timeQueryCount(listA, listB); } else if (sB != null && sB == null) { // list = userDao.findUsersByLastLoginDateBetween(listB.get(0), listB.get(1)); list = homeService.timeQuery(listA, listB, new PageRequest(curPage, pageSize)).getContent(); count = homeService.timeQueryCount(listA, listB); } else if (sA != null && sB != null) { // list = userDao.findUsersByRegistDateBetweenAndLastLoginDateBetween(listA.get(0), listA.get(1), listB.get(0), listB.get(1)); list = homeService.timeQuery(listA, listB, new PageRequest(curPage, pageSize)).getContent(); count = homeService.timeQueryCount(listA, listB); } else { agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); agentResponse.setMsg("请选择日期"); return agentResponse; } Map<String, Object> result = new HashMap<>(); result.put("total", count); result.put("list", list); agentResponse.setData(result); return agentResponse; } @DemoChecker @RequestMapping("/roleInfo") public AgentResponse roleInfo(long userId) { long role = getRole(userId); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(role); return agentResponse; } @DemoChecker @RequestMapping("/toAgent") public AgentResponse toAgent(long userId) { AgentResponse agentResponse = new AgentResponse(); long role = getRole(userId); if (role == 2) { agentResponse.setMsg("设置失败!"); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); return agentResponse; } agentService.change2Agent(userId); return agentResponse; } @DemoChecker @RequestMapping("/toUser") public AgentResponse toUser(long userId) { AgentResponse agentResponse = new AgentResponse(); long role = getRole(userId); if (role == 1) { agentResponse.setMsg("设置失败!"); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); return agentResponse; } agentService.change2Player(userId); return agentResponse; } @DemoChecker @RequestMapping("/toPartner") public AgentResponse toPartner(long userId) { AgentResponse agentResponse = new AgentResponse(); long role = getRole(userId); if (role == 3) { agentResponse.setMsg("设置失败!"); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); return agentResponse; } agentService.change2Partner(userId); return agentResponse; } @DemoChecker @RequestMapping("/fetchAllPlayers") public AgentResponse fetchAllPlayers(int pageSize, int curPage, HttpServletRequest request) { if (curPage > 0) { curPage--; } Page page = userDao.findAll(new PageRequest(curPage, pageSize)); List<User> list = page.getContent(); Map<String, Object> result = new HashMap<>(); long count = userDao.count(); result.put("total", count); result.put("list", list); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(result); return agentResponse; } @DemoChecker @RequestMapping("/fetchPlayer") public AgentResponse fetchPlayer(long userId, HttpServletRequest request) { if (userId == 0) { return fetchAllPlayers(20, 1, request); } User user = userDao.findOne(userId); List<User> list = new ArrayList<>(); AgentResponse agentResponse = new AgentResponse(); if (user == null) { agentResponse.setData(list); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); Map<String, Object> result = new HashMap<>(); agentResponse.setMsg("没有记录"); result.put("total", 0); result.put("list", list); agentResponse.setData(result); } else { list.add(user); Map<String, Object> result = new HashMap<>(); result.put("total", 1); result.put("list", list); agentResponse.setData(result); } return agentResponse; } ; @DemoChecker @RequestMapping("/fetchDelegate") public AgentResponse fetchDelegate(long userId) { if (userId == 0) { return fetchDelegates(1); } // GameAgent gameAgent = gameAgentDao.findOne(userId); GameAgent gameAgent = homeService.findOneDelegate(userId); List<GameAgentVo> list = new ArrayList<>(); AgentResponse agentResponse = new AgentResponse(); if (gameAgent == null) { agentResponse.setData(list); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); Map<String, Object> result = new HashMap<>(); agentResponse.setMsg(" 没有记录 "); result.put("total", 0); result.put("list", list); agentResponse.setData(result); } else { GameAgentVo gameAgentVo = new GameAgentVo(); //手动设置上级代理id AgentBean agentBean = RedisManager.getAgentRedisService().getAgentBean(userId); gameAgent.setParentId(agentBean.getParentId()); BeanUtils.copyProperties(gameAgent, gameAgentVo); gameAgentVo.setIsPartnerDes(gameAgent.getIsPartner() == 1 ? "合伙人" : "代理"); list.add(gameAgentVo); Map<String, Object> result = new HashMap<>(); result.put("total", 1); result.put("list", list); agentResponse.setData(result); } return agentResponse; } @DemoChecker @RequestMapping("/fetchDelegates") public AgentResponse fetchDelegates(int curPage) { if (curPage > 0) { curPage--; } int pageSize = 20; // Page<GameAgent> page = gameAgentDao.findAll(new PageRequest(curPage, pageSize)); Page<GameAgent> page = homeService.findDelegates(new PageRequest(curPage, pageSize)); List<GameAgent> list = page.getContent(); List<GameAgentVo> voList = new ArrayList<>(); for (GameAgent gameAgent : list) { GameAgentVo gameAgentVo = new GameAgentVo(); BeanUtils.copyProperties(gameAgent, gameAgentVo); User user = userDao.findOne(gameAgent.getId()); gameAgentVo.setName(user.getUsername()); voList.add(gameAgentVo); } long count = homeService.delegatesCount(); Map<String, Object> rs = new HashMap<>(); rs.put("total", count); rs.put("list", voList); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/fetchPartner") public AgentResponse fetchPartner(long userId) { if (userId == 0) { return fetchPartners(1); } // GameAgent gameAgent = gameAgentDao.findOne(userId); GameAgent gameAgent = homeService.findOnePartner(userId); logger.info("=================================================="); logger.info("userId is{}, game agent is{}", userId, gameAgent); List<GameAgentVo> list = new ArrayList<>(); AgentResponse agentResponse = new AgentResponse(); if (gameAgent == null) { agentResponse.setData(list); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); Map<String, Object> result = new HashMap<>(); agentResponse.setMsg(" 没有记录 "); result.put("total", 0); result.put("list", list); agentResponse.setData(result); } else { GameAgentVo gameAgentVo = new GameAgentVo(); BeanUtils.copyProperties(gameAgent, gameAgentVo); AgentUser agentUser = agentUserDao.findAgentUserByInvite_code(gameAgent.getId() + ""); if (agentUser != null) { gameAgentVo.setPassword(agentUser.getPassword()); } gameAgentVo.setIsPartnerDes(gameAgent.getIsPartner() == 1 ? "合伙人" : "代理"); list.add(gameAgentVo); Map<String, Object> result = new HashMap<>(); result.put("total", 1); result.put("list", list); agentResponse.setData(result); } return agentResponse; } @DemoChecker @RequestMapping("/fetchPartners") public AgentResponse fetchPartners(int curPage) { if (curPage > 0) { curPage--; } int pageSize = 20; Page<GameAgent> page = homeService.findPartner(new PageRequest(curPage, pageSize)); List<GameAgent> list = page.getContent(); List<GameAgentVo> voList = new ArrayList<>(); for (GameAgent gameAgent : list) { GameAgentVo gameAgentVo = new GameAgentVo(); BeanUtils.copyProperties(gameAgent, gameAgentVo); User user = userDao.findOne(gameAgent.getId()); gameAgentVo.setName(user.getUsername()); AgentUser agentUser = agentUserDao.findAgentUserByUsername(gameAgent.getId() + ""); System.out.println("agent user is " + agentUser); if (agentUser != null) { gameAgentVo.setPassword(agentUser.getPassword()); } // gameAgentVo.setPassword(agentUser.getPassword()); // gameAgentVo.setInvite_code(agentUser.getInvite_code()); voList.add(gameAgentVo); } long count = homeService.partnerCount(); Map<String, Object> rs = new HashMap<>(); rs.put("total", count); rs.put("list", voList); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/logout") public AgentResponse logout() { String token = AgentUtil.findTokenInHeader(); if (AgentUtil.caches.keySet().contains(token)) { AgentUtil.caches.remove(token); } AgentResponse agentResponse = new AgentResponse(); return agentResponse; } @DemoChecker @RequestMapping(value = "/doChargeNew", method = RequestMethod.POST) public AgentResponse doChargeNew(HttpServletRequest request, long userId, @RequestParam(value = "money", required = true) long money, String type) { AgentResponse agentResponse = new AgentResponse(); UserBean userBean = RedisManager.getUserRedisService().getUserBean(userId); UserService userService = SpringUtil.getBean(UserService.class); User user = userService.getUserByUserId(userId); String name = ""; if (userBean == null) { if (user != null) { if (type.equals("1")) { user.setMoney(user.getMoney() + money); } else if (type.equals("2")) { user.setGold(user.getGold() + money); } userService.save(user); name = user.getUsername(); } else { agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); return agentResponse; } } else {//在redis里 name = userBean.getUsername(); if (type.equals("1")) { RedisManager.getUserRedisService().addUserMoney(userId, money); GameUserService.saveUserBean(userId); } else if (type.equals("2")) { RedisManager.getUserRedisService().addUserGold(userId, money); GameUserService.saveUserBean(userId); } } Charge charge = new Charge(); charge.setOrderId("" + IdWorker.getDefaultInstance().nextId()); charge.setUserid(userId); charge.setUsername(name); charge.setCreatetime(new Date()); charge.setCallbacktime(new Date()); charge.setOrigin(1); charge.setMoney(money); charge.setMoney_point(0); charge.setRecharge_source("" + IChargeType.AGENT); charge.setStatus(1); charge.setChargeType(type == "1" ? 0 : 1); SpringUtil.getBean(ChargeService.class).save(charge); Map<String, Object> rs = new HashMap<>(); rs.put("money", money); rs.put("type", type); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/downward") public AgentResponse downwardDelegate(HttpServletRequest request, long agentId) { String token = AgentUtil.findTokenInHeader(); int self_agentId = (int) AgentUtil.getUserIdByToken(token); logger.info("self_id:{}, agent id:{}", self_agentId, agentId); AgentUser agentUser = agentUserDao.findOne(self_agentId); logger.info("agentUser:{}", agentUser); int self_code = 0; if (self_agentId != 1) { self_code = Integer.parseInt(agentUser.getUsername()); } // logger.info("self_code:{}", self_code); //先给个demo if (agentId == 0) { Map<String, Object> rrss = assDemo(); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rrss); return agentResponse; } //如果代理是空的 if (RedisManager.getAgentRedisService().getAgentBean(agentId) == null) { AgentResponse agentResponse = new AgentResponse(); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); agentResponse.setMsg("代理不存在"); return agentResponse; } AgentBean agentBean = RedisManager.getAgentRedisService().getAgentBean(agentId); if (agentBean.getPartnerId() != self_agentId && self_agentId != 1 && agentId != self_code) { AgentResponse agentResponse = new AgentResponse(); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); agentResponse.setMsg("没有权限"); return agentResponse; } //直接玩家 List<Long> aList = new ArrayList<>(); //二级代理 List<Long> bList = new ArrayList<>(); //三级代理 List<Long> cList = new ArrayList<>(); agentBean.getChildList().stream() .forEach(x -> { if (RedisManager.getAgentRedisService().isExit(x)) { bList.add(x); } else { aList.add(x); } }); bList.stream() .forEach(x -> { if (RedisManager.getAgentRedisService().isExit(x)) { cList.add(x); } }); List<User> aUsers = userDao.findUsersByIdIn(aList); List<User> bUsers = userDao.findUsersByIdIn(bList); Map<String, Object> rs = assembleDelegateRelationship(agentId, aUsers, bUsers); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); System.out.println(agentResponse); return agentResponse; } public String transformStr(long uid) { User user = userDao.findOne(uid); String str = "ID:" + user.getId() + "名:" + user.getUsername(); return str; } public String transformStr(User user) { String str = "ID:" + user.getId() + "名:" + user.getUsername(); return str; } public Map<String, Object> assembleDelegateRelationship(long agentId, List<User> aList, List<User> bList) { Map<String, Object> nodeRoot = new HashMap<>(); nodeRoot.put("name", transformStr(agentId)); List<Object> childrenRoot = new ArrayList<>(); nodeRoot.put("children", childrenRoot); Map<String, Object> node1_1 = new HashMap<>(); childrenRoot.add(node1_1); node1_1.put("name", "直接玩家"); List<Object> children1_1 = new ArrayList<>(); node1_1.put("children", children1_1); //直接玩家 for (User user : aList) { DChildVo childVo = new DChildVo(); childVo.setName(transformStr(user)); childVo.setValue((int) user.getId()); children1_1.add(childVo); } Map<String, Object> node1_2 = new HashMap<>(); childrenRoot.add(node1_2); node1_2.put("name", "二级代理"); List<Object> children1_2 = new ArrayList<>(); node1_2.put("children", children1_2); // for (int i = 10; i < 20; i++){ // // Map<String, Object> node2_x = new HashMap<>(); // node2_x.put("name", i); // children1_2.add(node2_x); // // List<Object> child2_x = new ArrayList<>(); // node2_x.put("children", child2_x); // // for (int j = 100; j < 110; j++){ // DChildVo childVo = new DChildVo(); // childVo.setValue(j); // childVo.setName("三级代理" + j); // child2_x.add(childVo); // } // } //二级代理 for (User user : bList) { Map<String, Object> node2_x = new HashMap<>(); node2_x.put("name", transformStr(user)); children1_2.add(node2_x); List<Object> child2_x = new ArrayList<>(); node2_x.put("children", child2_x); // //三级代理 // for (int j = 100; j < 110; j++){ // DChildVo childVo = new DChildVo(); // childVo.setValue(j); // childVo.setName("三级代理" + j); // child2_x.add(childVo); // } AgentBean agentBean = RedisManager.getAgentRedisService().getAgentBean(user.getId()); if (agentBean == null) continue; for (Long id : agentBean.getChildList()) { DChildVo dChildVo = new DChildVo(); dChildVo.setName(transformStr(id)); child2_x.add(dChildVo); } } return nodeRoot; } public Map<String, Object> assDemo() { Map<String, Object> nodeRoot = new HashMap<>(); nodeRoot.put("name", "self"); List<Object> childrenRoot = new ArrayList<>(); nodeRoot.put("children", childrenRoot); Map<String, Object> node1_1 = new HashMap<>(); childrenRoot.add(node1_1); node1_1.put("name", "直接玩家"); List<Object> children1_1 = new ArrayList<>(); node1_1.put("children", children1_1); //直接玩家 for (int i = 0; i < 5; i++) { DChildVo childVo = new DChildVo(); childVo.setName(i + ""); childVo.setValue(i); children1_1.add(childVo); } Map<String, Object> node1_2 = new HashMap<>(); childrenRoot.add(node1_2); node1_2.put("name", "二级代理"); List<Object> children1_2 = new ArrayList<>(); node1_2.put("children", children1_2); for (int i = 10; i < 20; i++) { Map<String, Object> node2_x = new HashMap<>(); node2_x.put("name", i); children1_2.add(node2_x); List<Object> child2_x = new ArrayList<>(); node2_x.put("children", child2_x); for (int j = 100; j < 110; j++) { DChildVo childVo = new DChildVo(); childVo.setValue(j); childVo.setName("三级代理" + j); child2_x.add(childVo); } } //二级代理 for (int i = 5; i < 10; i++) { Map<String, Object> node2_x = new HashMap<>(); node2_x.put("name", i + ""); children1_2.add(node2_x); List<Object> child2_x = new ArrayList<>(); node2_x.put("children", child2_x); //三级代理 for (int j = 100; j < 110; j++) { DChildVo childVo = new DChildVo(); childVo.setValue(j); childVo.setName("三级代理" + j); child2_x.add(childVo); } } return nodeRoot; } public Map<String, Object> ass() { Map<String, Object> rs = new HashMap<>(); rs.put("name", "flare"); List<Object> list = new ArrayList<>(); rs.put("children", list); Map<String, Object> inner = new HashMap<>(); list.add(inner); inner.put("name", "analytics"); List<Object> analytics = new ArrayList<>(); inner.put("children", analytics); Map<String, Object> map = new HashMap<>(); map.put("name", "cluster"); analytics.add(map); List<Object> cluster = new ArrayList<>(); map.put("children", cluster); for (int i = 0; i < 35; i++) { DChildVo childVo = new DChildVo(); childVo.setName(i + ""); childVo.setValue(i); cluster.add(childVo); } return rs; } @DemoChecker @RequestMapping("/oFindCharge") public AgentResponse findChargeByOrderId(long orderId) { Charge charge = homeService.findChargeByOrderId(orderId); List<Charge> list = new ArrayList<>(); AgentResponse agentResponse = new AgentResponse(); if (charge == null) { Map<String, Object> rs = new HashMap<>(); rs.put("list", list); rs.put("total", 0); agentResponse.setData(rs); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); } else { list.add(charge); Map<String, Object> rs = new HashMap<>(); rs.put("list", list); rs.put("total", 1); agentResponse.setData(rs); // agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); } return agentResponse; } @DemoChecker @RequestMapping("/uFindCharge") public AgentResponse findChargeByUserId(long userId) { // Charge charge = homeService.findChargeByUserId(userId); List<Charge> chargeList = homeService.findChargesByUserId(userId); // List<Charge> list = new ArrayList<>(); AgentResponse agentResponse = new AgentResponse(); if (chargeList == null || chargeList.size() == 0) { Map<String, Object> rs = new HashMap<>(); rs.put("list", new ArrayList<>()); rs.put("total", 0); agentResponse.setData(rs); agentResponse.setMsg("没有记录"); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); } else { // list.addAll(chargeList); Map<String, Object> rs = new HashMap<>(); rs.put("list", chargeList); rs.put("total", 1); agentResponse.setData(rs); } return agentResponse; } @DemoChecker @RequestMapping("/chargeTimeSearch") public AgentResponse chargeTimerSearch(String time, int curPage,int chargeFrom, long userId, int moneyType) { // moneyType 1 房卡 2 金币 3 房卡和金币 // chargeFrom 充值来源 1 微信 2 代理 3 任意 if (curPage > 0) { curPage--; } String[] sA = null; if (time.contains(",")) { sA = time.split(",", 1000); } SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); List<Date> list = new ArrayList<>(); if (sA != null){ Arrays.stream(sA) .forEach(x -> { try { list.add(simpleDateFormat.parse(x)); } catch (ParseException e) { e.printStackTrace(); } }); } Page<Charge> page = homeService.timeSearchCharges(list, new PageRequest(curPage, 20), moneyType, chargeFrom, userId); AgentResponse agentResponse = new AgentResponse(); Map<String, Object> rs = new HashMap<>(); rs.put("list", page.getContent()); rs.put("total", page.getTotalElements()); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/changePwd") @Transactional public AgentResponse changePwd(String pwd, HttpServletRequest request) { String token = AgentUtil.findTokenInHeader(); int agentId = (int) AgentUtil.getUserIdByToken(token); AgentUser agentUser = agentUserDao.findOne(agentId); agentUser.setPassword(pwd); AgentUser au = agentUserDao.save(agentUser); if (au != null) { AgentResponse agentResponse = new AgentResponse(); return agentResponse; } else { AgentResponse agentResponse = new AgentResponse(); agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); agentResponse.setMsg("修改失败"); return agentResponse; } } @DemoChecker @RequestMapping("/findCharges") public AgentResponse findCharges(int curPage) { if (curPage > 0) { curPage--; } Page<Charge> page = homeService.findCharges(new PageRequest(curPage, 20)); List<DChargeAdminVo> list = new ArrayList<>(); page.getContent().stream() .forEach(x -> { DChargeAdminVo dChargeAdminVo = new DChargeAdminVo(); BeanUtils.copyProperties(x, dChargeAdminVo); list.add(dChargeAdminVo); }); Long count = homeService.chargesCount(); AgentResponse agentResponse = new AgentResponse(); Map<String, Object> rs = new HashMap<>(); rs.put("list", list); rs.put("total", count); agentResponse.setData(rs); return agentResponse; } @RequestMapping("/login") public AgentResponse agentLogin(HttpServletRequest request, HttpServletResponse response, String username, String password) { AgentUser agentUser = agentUserDao.findAgentUserByUsernameAndPassword(username, password); AgentResponse agentResponse = null; Map<String, Object> result = new HashMap<>(); if (agentUser != null) { //todo token 和 玩家的关联 Map<String, Object> rs = new HashMap<>(); rs.put("id", agentUser.getId()); rs.put("username", agentUser.getUsername()); String token = getToken(agentUser.getId()); //清除缓存 AgentUtil.clearUserTokenByUserId(agentUser.getId()); AgentUtil.caches.put(token, rs); agentResponse = new AgentResponse(0, result); Map<String, Object> rrr = new HashMap<>(); rrr.put("token", token); return agentResponse.setData(rrr); } else { agentResponse = new AgentResponse(ErrorCode.ROLE_ACCOUNT_OR_PASSWORD_ERROR, result); agentResponse.msg = "用户不存在"; } return agentResponse; } @DemoChecker @RequestMapping("/info") public AgentResponse userInfo(String token) { //todo token 验证 Map<String, Object> map = (Map<String, Object>) AgentUtil.caches.get(token); Map<String, Object> r = new HashMap<>(); r.put("userId", map.get("id")); System.out.println(map.get("id")); List<String> roles = new ArrayList<>(); if ((Integer) map.get("id") - 1 == 0) { roles.add("admin"); } else { roles.add("delegate"); } r.put("roles", roles); AgentUser agentUser = agentUserDao.findOne((Integer) map.get("id")); r.put("name", agentUser.getUsername()); r.put("avatar", "https://wpimg.wallstcn.com/f778738c-e4f8-4870-b634-56703b4acafe.gif"); return new AgentResponse(0, r); } @DemoChecker @RequestMapping("/onlineInfo") public AgentResponse onlineInfo(String date) { //todo token 验证 LogRecord logRecord = logRecordDao.findOne(date); return new AgentResponse(0, JsonUtil.toJson(logRecord)); } @DemoChecker @RequestMapping("/getLogByDates") public AgentResponse getLogByDates(int num) { LocalDate today = LocalDate.now(); List<String> days = new ArrayList<>(); for (int i = 0; i < num; i++) { LocalDate temp = today.minusDays(i + 1); days.add(temp.toString()); } return new AgentResponse(0, logRecordDao.findByIdIn(days)); } // @DemoChecker @DemoChecker @RequestMapping("/fConstant") public AgentResponse getConstnat() { Constant constant = constantDao.findOne(1l); Map<String, Object> rs = new HashMap<>(); rs.put("constant", constant); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/uConstant") public AgentResponse modifyConstnat(@RequestParam("constantForm") String constantForm) { Map<String, Object> rs = JsonUtil.readValue(constantForm, Map.class); ConstantFormVo vo = JsonUtil.readValue(constantForm, ConstantFormVo.class); Constant constant = constantDao.findOne(1l); constant.setInitMoney(vo.getInit_money()); constant.setAppleCheck(Integer.valueOf(vo.getApple_check()).intValue()); constant.setVersionOfAndroid(vo.getVersion_of_android()); constant.setVersionOfIos(vo.getVersion_of_ios()); constant.setMarquee(vo.getMarquee()); constant.setMarquee1(vo.getMarquee1()); constant.setMarquee2(vo.getMarquee2()); constant.setDownload2(vo.getDownload2()); constant.setDownload(vo.getDownload()); constantDao.save(constant); //刷新内存 ServerManager.constant = constantDao.findOne(1L); AgentResponse agentResponse = new AgentResponse(); return agentResponse; } @DemoChecker @RequestMapping("/partnerRecord") public AgentResponse getChargeRecord(String time, int curPage, int userId) { if (curPage > 0) { curPage--; } if (curPage > 0) { curPage--; } String[] sA = null; if (time.contains(",")) { sA = time.split(",", 1000); } String start = sA[0]; String end = sA[1]; int agentId = (int) AgentUtil.getUserIdByToken(AgentUtil.findTokenInHeader()); start = DateUtil.becomeStandardSTime(start); end = DateUtil.becomeStandardSTime(end); List<String> listA = DateUtil.getDateListIn(end, start); Sort sort = new Sort(Sort.Direction.DESC, "date"); Page<AgentRecords> page = null; List<AgentRecords> agentRecordsList = null; if (agentId == 1){ page = homeService.findAllAgentRecords(userId, listA, new PageRequest(curPage, 20, sort)); agentRecordsList = page.getContent(); }else { page = homeService.findAllAgentRecords(agentId, listA, new PageRequest(curPage, 20, sort)); agentRecordsList = page.getContent(); } Map<String, Object> rs = new HashMap<>(); rs.put("list", agentRecordsList); rs.put("count", page.getTotalElements()); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/todayPartnerRecord") public AgentResponse todayPartnerRecord(int curPage) { if (curPage > 0) { curPage--; } int agentId = (int) AgentUtil.getUserIdByToken(AgentUtil.findTokenInHeader()); // int agentId = 100027; String start = DateUtil.convert2DayString(new Date()); String end = DateUtil.convert2DayString(new Date()); Sort sort = new Sort(Sort.Direction.DESC, "date"); List<String> listA = DateUtil.getDateListIn(end, start); Page<AgentRecords> page = homeService.findAllAgentRecords(agentId, listA, new PageRequest(curPage, 20)); List<AgentRecords> agentRecordsList = page.getContent(); Map<String, Object> rs = new HashMap<>(); rs.put("list", agentRecordsList); rs.put("count", page.getTotalElements()); rs.put("userId", agentId); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); return agentResponse; } @DemoChecker @RequestMapping("/dissolveRoom") public AgentResponse dissolveRoom(String roomId) { System.out.println("admin解散房间"); Map<String, Object> rs = new HashMap<>(); AgentResponse agentResponse = new AgentResponse(); agentResponse.setData(rs); String serverId = RedisManager.getRoomRedisService().getServerId(roomId); if (serverId == null) { rs.put("result", "ok"); // rs.put("") // agentResponse.setMsg("房间不存在"); // agentResponse.setCode(com.code.server.login.action.ErrorCode.ERROR); RedisManager.removeRoomAllInfo(roomId); return agentResponse; } MsgProducer msgProducer = SpringUtil.getBean(MsgProducer.class); Map<String, Object> result = new HashMap<>(); result.put("roomId", roomId); KafkaMsgKey msgKey = new KafkaMsgKey(); msgKey.setUserId(0); msgKey.setRoomId(roomId); msgKey.setPartition(Integer.valueOf(serverId)); ResponseVo responseVo = new ResponseVo("roomService", "dissolutionRoom", result); msgProducer.send2Partition("roomService", Integer.valueOf(serverId), msgKey, responseVo); rs.put("result", "ok"); return agentResponse; } @DemoChecker @RequestMapping("/dissolveRoomByUserId") public AgentResponse dissolveRoomByUserId(String userId) { String roomId = RedisManager.getUserRedisService().getRoomId(Long.valueOf(userId)); System.out.println("roomId = " + roomId); return dissolveRoom(roomId); } @RequestMapping("/upateAgentInfo") public void updateAF() { List<AgentUser> list = (List<AgentUser>) agentUserDao.findAll(); for (AgentUser agentUser : list) { AgentInfo agentInfo = new AgentInfo(); AgentInfoRecord agentInfoRecord = new AgentInfoRecord(); agentUser.setAgentInfo(agentInfo); agentUser.setAgentInfoRecord(agentInfoRecord); agentUserDao.save(agentUser); } } //充值之后计算返利 @RequestMapping("/testDemo") public void testAgentInfo() { // // Charge charge = chargeDao.findOne(6424891349438832640l); // double money = charge.getMoney(); // // String dayStr = DateUtil.convert2DayString(new Date()); // Constant constant = constantDao.findOne(1l); // AgentUser agentUser1 = agentUserDao.findOne(17); // AgentInfo agentInfo1 = agentUser1.getAgentInfo(); // // if (agentUser1 != null){ // Map<String, ChildCost> rs1 = agentInfo1.getEveryDayCost(); // ChildCost childCost1 = rs1.get(dayStr); // if (childCost1 == null){ // childCost1 = new ChildCost(); // } // //今日来源于玩家的收入 // childCost1.firstLevel += money * constant.getIncome1(); // //暂时用来充当今日有没有结算完 // childCost1.setPartner(0); // rs1.put(dayStr, childCost1); // agentUserDao.save(agentUser1); // } // // AgentUser agentUser2 = null; // if (agentUser1 != null){ // agentUserDao.findOne(agentUser1.getParentId()); // } // // if (agentUser2 != null){ // AgentInfo agentInfo2 = agentUser2.getAgentInfo(); // Map<String, ChildCost> rs2 = agentInfo2.getEveryDayCost(); // ChildCost childCost2 = rs2.get(dayStr); // if (childCost2 == null){ // childCost2 = new ChildCost(); // } // // //今日来源于代理的收入 // childCost2.secondLevel += money * constant.getIncome2(); // //暂时用来充当今日有没有结算完 // childCost2.setPartner(0); // rs2.put(dayStr, childCost2); // agentUserDao.save(agentUser2); // } // // //更新订单结算是否已经返利 // charge.setFinishTime(dayStr); // chargeDao.save(charge); } public void testRecord() { AgentUser agentUser1 = agentUserDao.findOne(17); AgentInfo agentInfo = agentUser1.getAgentInfo(); Map<String, ChildCost> rs = new HashMap<>(); List<Map<String, ChildCost>> list = new ArrayList<>(); for (String key : agentInfo.getEveryDayCost().keySet()) { ChildCost childCost = agentInfo.getEveryDayCost().get(key); if (childCost.getPartner() - 0d == 1d) { rs.put(key, childCost); list.add(rs); } } System.out.println("===="); System.out.println(list); } //清除返利 public void testClear() { AgentUser agentUser1 = agentUserDao.findOne(17); AgentInfo agentInfo = agentUser1.getAgentInfo(); for (String key : agentInfo.getEveryDayCost().keySet()) { ChildCost childCost = agentInfo.getEveryDayCost().get(key); childCost.setPartner(1); } agentUserDao.save(agentUser1); } public void testTest() { AgentUser agentUser1 = agentUserDao.findOne(17); AgentInfo agentInfo = agentUser1.getAgentInfo(); //计算累计收入 double totalMoney = 0; double firstLevel = 0; double secondLevel = 0; for (String key : agentInfo.getEveryDayCost().keySet()) { ChildCost childCost = agentInfo.getEveryDayCost().get(key); if (childCost.getPartner() - 0d == 1d) { totalMoney += childCost.firstLevel; totalMoney += childCost.secondLevel; } else { firstLevel += childCost.getFirstLevel(); secondLevel += childCost.getSecondLevel(); } } } @RequestMapping("/test") public Map<String, Object> test() { return AgentUtil.caches; } @RequestMapping("/ttt") public String hello() { return "Hello World"; } // @RequestMapping("/testUpdate") // public String testUpdate(){ //// System.out.println(agentUserDao); //// System.out.println(agentUserDao.findAll()); // Object o = agentUserDao.findAll(); // List<AgentUser> list = (List<AgentUser>) agentUserDao.findAll(); // // for (AgentUser agentUser : list){ // AgentInfo agentInfo = new AgentInfo(); // AgentInfoRecord agentInfoRecord = new AgentInfoRecord(); // if (agentUser.getId() == 17){ // ChildCost childCost1 = new ChildCost(); // childCost1.firstLevel = 10; // childCost1.secondLevel = 5; // childCost1.setPartner(0d); // agentInfo.getEveryDayCost().put("2018-8-20", childCost1); // // ChildCost childCost2 = new ChildCost(); // childCost2.firstLevel = 12; // childCost2.secondLevel =6; // childCost1.setPartner(0d); // agentInfo.getEveryDayCost().put("2018-8-19", childCost2); // } // // agentUser.setAgentInfo(agentInfo); // agentUser.setAgentInfoRecord(agentInfoRecord); // agentUserDao.save(agentUser); // // } // // return "ok"; // } public static void main(String[] args) { // LocalDate today = LocalDate.now(); // for(int i=0;i<7;i++) { // LocalDate temp = today.minusDays(i + 1); // System.out.println(temp.toString()); // } // Map<String, Object> oo = ass(); // System.out.println(oo); } }
// Copyright 2019 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef MEDIAPIPE_FRAMEWORK_PROFILER_TEST_CONTEXT_BUILDER_H_ #define MEDIAPIPE_FRAMEWORK_PROFILER_TEST_CONTEXT_BUILDER_H_ #include <map> #include <memory> #include <string> #include <utility> #include <vector> #include "absl/memory/memory.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_options.pb.h" #include "mediapipe/framework/mediapipe_options.pb.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/statusor.h" #include "mediapipe/framework/tool/tag_map.h" #include "mediapipe/framework/tool/tag_map_helper.h" namespace mediapipe { using tool::TagMap; // A builder for the CalculatorContext for testing a calculator node. class TestContextBuilder { // An InputStreamHandler to initialize and fill input streams. class InputStreamWriter : public InputStreamHandler { public: using InputStreamHandler::InputStreamHandler; void set_packets(const std::vector<Packet>& packets) { packets_ = packets; } NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) { return NodeReadiness::kReadyForProcess; } void FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) override { for (auto id = input_set->BeginId(); id < input_set->EndId(); ++id) { Packet packet = packets_[id.value()]; AddPacketToShard(&input_set->Get(id), std::move(packet), false); } } std::vector<Packet> packets_; }; public: TestContextBuilder() = default; TestContextBuilder(const std::string& node_name, int node_id, const std::vector<std::string>& inputs, const std::vector<std::string>& outputs) { Init(node_name, node_id, inputs, outputs); } // Initializes the input and output specs of the calculator node. // Also, creates the default calculator context for the calculator node. void Init(const std::string& node_name, int node_id, const std::vector<std::string>& inputs, const std::vector<std::string>& outputs) { static auto packet_type = new PacketType; packet_type->Set<std::string>(); state_ = absl::make_unique<CalculatorState>( node_name, node_id, "PCalculator", CalculatorGraphConfig::Node(), nullptr); input_map_ = tool::CreateTagMap(inputs).ValueOrDie(); output_map_ = tool::CreateTagMap(outputs).ValueOrDie(); input_handler_ = absl::make_unique<InputStreamWriter>( input_map_, nullptr, MediaPipeOptions(), false); input_managers_.reset(new InputStreamManager[input_map_->NumEntries()]); for (auto id = input_map_->BeginId(); id < input_map_->EndId(); ++id) { MEDIAPIPE_CHECK_OK(input_managers_[id.value()].Initialize( input_map_->Names()[id.value()], packet_type, false)); } MEDIAPIPE_CHECK_OK( input_handler_->InitializeInputStreamManagers(input_managers_.get())); for (auto id = output_map_->BeginId(); id < output_map_->EndId(); ++id) { static auto packet_type_ = new PacketType; packet_type_->Set<std::string>(); OutputStreamSpec spec; spec.name = output_map_->Names()[id.value()]; spec.packet_type = packet_type; spec.error_callback = [](const ::mediapipe::Status& status) { LOG(ERROR) << status; }; output_specs_[spec.name] = spec; } context_ = CreateCalculatorContext(); } // Initializes the input and output streams of a calculator context. std::unique_ptr<CalculatorContext> CreateCalculatorContext() { auto result = absl::make_unique<CalculatorContext>(state_.get(), input_map_, output_map_); MEDIAPIPE_CHECK_OK(input_handler_->SetupInputShards(&result->Inputs())); for (auto id = output_map_->BeginId(); id < output_map_->EndId(); ++id) { auto& out_stream = result->Outputs().Get(id); const std::string& stream_name = output_map_->Names()[id.value()]; out_stream.SetSpec(&output_specs_[stream_name]); } return result; } // Returns the calculator context. CalculatorContext* get() { return context_.get(); } // Resets the calculator context. void Clear() { context_ = CreateCalculatorContext(); } // Writes packets to the input streams of a calculator context. void AddInputs(const std::vector<Packet>& packets) { Timestamp input_timestamp = GetTimestamp(packets); input_handler_->set_packets(packets); input_handler_->FillInputSet(input_timestamp, &context_->Inputs()); CalculatorContextManager().PushInputTimestampToContext(context_.get(), input_timestamp); } // Writes packets to the output streams of a calculator context. void AddOutputs(const std::vector<std::vector<Packet>>& packets) { auto& out_map = context_->Outputs().TagMap(); for (auto id = out_map->BeginId(); id < out_map->EndId(); ++id) { auto& out_stream = context_->Outputs().Get(id); for (const Packet& packet : packets[id.value()]) { out_stream.AddPacket(packet); } } } // Returns the Timestamp of the first non-empty packet. static Timestamp GetTimestamp(const std::vector<Packet>& packets) { for (const Packet& packet : packets) { if (!packet.IsEmpty()) { return packet.Timestamp(); } } return Timestamp(); } std::unique_ptr<CalculatorState> state_; std::unique_ptr<InputStreamWriter> input_handler_; std::unique_ptr<InputStreamManager[]> input_managers_; std::shared_ptr<TagMap> input_map_; std::shared_ptr<TagMap> output_map_; std::map<std::string, OutputStreamSpec> output_specs_; std::unique_ptr<CalculatorContext> context_; }; } // namespace mediapipe #endif // MEDIAPIPE_FRAMEWORK_PROFILER_TEST_CONTEXT_BUILDER_H_
Report of a meeting of Supporters of the Communist (Maoist) Party of Afghanistan in Europe On December 2012 a meeting of the supporters of the Communist (Maoist) Party of Afghanistan [C(M)PA] in Europe, in the presence of representatives of the party, was held as part of the overall campaign to rectify the party’s style of work. Representatives of the C(M)PA attending the meeting briefed the party supporters on the situation in Afghanistan, the international communist scene––particularly on the efforts of the Marxist-Leninist-Maoist parties, including the C(M)PA, to form a new international communist organization of the Marxist-Leninist-Maoist parties and organizations throughout the world. At this meeting overwhelming support for the efforts of the C(M)PA and other comrade parties for the formation of a new international communist organization was expressed. Comrades emphasized the fact that the existence of an international communist organization to coordinate and lead the activities of the international communist movement will have an immensely positive impact on the expansion of the revolutionary movements and preparing the subjective factor of the world revolution, especially during a time when the revolutionary masses are rising up in an unprecedented manner to challenge the “masters of the universe” around the world, and the capitalist imperialist system has been threatened by the most severe economic and financial crisis since the great depression of the early twentieth century and its ideology and political systems are facing a profound crisis of legitimacy. Furthermore, comrades at this meeting expressed their full support for the party in taking a position against the Avakianite post-MLM and Prachanda-Bhattarai revisionisms that have resulted in the collapse of the Revolutionary Internationalist Movement [RIM]. The comrades reiterated the fact that we have been deprived of our international organization during a very critical epoch. During a time when the masses in the Middle East are rising up against despots and imperialist puppets––and also revolutionary upsurges are taking place in the imperialist countries in the form of the occupy movement, the students and workers rebellions––an international Maoist organization would have played an important role in influencing these mass upsurges by providing them with a more revolutionary and communist direction. Thus, the comrades stressed the fact that it is important to strive for the formation of a new international organization of MLM forces. The party supporters applauded the work of party comrades and the mass democratic organization within the country and the significant advances that have been made in the revolutionary fronts within the country. The party supporters in Europe pledged to stand beside the revolutionary activities and struggles of the party within the country and to extend their support to the party and its mass democratic organizations by all means possible. Moreover, the party supporters at this meeting formed an Organizing Committee of the Supporters of C(M)PA in Europe in order to lead and coordinate the activities and struggles of the party supporters there. Given the fact that there are many ex-party members and supporters across Europe who, so far, have been unorganized, this committee will strive to gather them into a strong organization of party supporters in this continent. The comrades affirmed the fact that Europe is an important front for the party’s revolutionary struggles within the country and also internationally. In the conjuncture of an imperialist occupation, of which the European countries are forming the bulk of the imperialist alliance occupying Afghanistan, party supporters in Europe, by coordinating with the anti-imperialist internationalist left in that continent, can muster a significant anti-imperialist movement that will be of crucial support for the anti-imperialist struggles of the party and its mass democratic organizations within Afghanistan. Given the immense importance of the party’s work in Europe, party supporters pledged to do the following: To strive for forging an effective organization of the party's supporters in Europe. To build unfailing relations with other Marxist-Leninist-Maoist forces and with the broader anti-imperialist internationalist left in Europe. To not leave any stone unturned in supporting party activities within the country and also internationally. To hold a conference of all party supporters within a year. The meeting concluded with the revolutionary spirit to shake-things-up in Europe in relation to revolutionary work around Afghanistan. [ Back to top ]
/** * This class represents the result of a multi source matcher execution. */ public class ExecutionResultMultiSource { private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionResultMultiSource.class); private final Object alignment; private final Object parameters; private final Object matcher; private final String matcherName; private final List<URL> allGraphs; private final List<TestCase> testCases; /** * The total runtime in nanoseconds */ private long totalRuntime; private boolean computeTransitiveClosure; private final Partitioner partitioner; public ExecutionResultMultiSource(Object alignment, String matcherName, List<TestCase> testCases, long totalRuntime, boolean computeTransitiveClosure, Partitioner partitioner) { this(alignment, null, null, matcherName, new ArrayList<>(), testCases, totalRuntime, computeTransitiveClosure, partitioner); } public ExecutionResultMultiSource(Object alignment, Object parameters, Object matcher, String matcherName, List<URL> allGraphs, List<TestCase> testCases, long totalRuntime, Partitioner partitioner) { this(alignment, parameters, matcher, matcherName, allGraphs, testCases, totalRuntime, GenericMatcherMultiSourceCaller.needsTransitiveClosureForEvaluation(matcher), partitioner); } public ExecutionResultMultiSource(Object alignment, Object parameters, Object matcher, String matcherName, List<URL> allGraphs, List<TestCase> testCases, long totalRuntime, boolean computeTransitiveClosure, Partitioner partitioner) { this.alignment = alignment; this.parameters = parameters; this.matcher = matcher; this.matcherName = matcherName; this.allGraphs = allGraphs; this.testCases = testCases; this.totalRuntime = totalRuntime; this.computeTransitiveClosure = computeTransitiveClosure; this.partitioner = partitioner; } public ExecutionResultSet toExecutionResultSet(){ Alignment fullAlignment; try { fullAlignment = TypeTransformerRegistry.getTransformedObject(this.alignment, Alignment.class); } catch (TypeTransformationException ex) { LOGGER.error("Could not transform alignemnt to Alignment class. Return empty ExecutionResultSet.", ex); return new ExecutionResultSet(); } //remove reflexive edges fullAlignment = ReflexiveCorrespondenceFilter.removeReflexiveCorrespondences(fullAlignment); Map<TestCase, Alignment> testcaseToAlignment = new HashMap<>(); if(computeTransitiveClosure){ TransitiveClosure<String> alignmentClosure = new TransitiveClosure<>(); for(Correspondence c : fullAlignment){ alignmentClosure.add(c.getEntityOne(), c.getEntityTwo()); } for(Set<String> sameAs : alignmentClosure.getClosure()){ Map<TestCase, SourceTargetURIs> map = partitioner.partition(sameAs); for(Map.Entry<TestCase, SourceTargetURIs> entry : map.entrySet()){ SourceTargetURIs sourceTargetUris = entry.getValue(); if(sourceTargetUris.containsSourceAndTarget() == false) continue; Alignment alignment = testcaseToAlignment.computeIfAbsent(entry.getKey(), __->new Alignment()); for(String sourceURI : sourceTargetUris.getSourceURIs()){ for(String targetURI : sourceTargetUris.getTargetURIs()){ //TODO: confidence extensions etc alignment.add(sourceURI, targetURI); } } } } }else{ for(Correspondence c : fullAlignment){ Map<TestCase, SourceTargetURIs> map = partitioner.partition(Arrays.asList(c.getEntityOne(), c.getEntityTwo())); for(Map.Entry<TestCase, SourceTargetURIs> entry : map.entrySet()){ SourceTargetURIs sourceTargetUris = entry.getValue(); if(sourceTargetUris.containsSourceAndTarget() == false) continue; Alignment alignment = testcaseToAlignment.computeIfAbsent(entry.getKey(), __->new Alignment()); for(String sourceURI : sourceTargetUris.getSourceURIs()){ for(String targetURI : sourceTargetUris.getTargetURIs()){ alignment.add(sourceURI, targetURI, c.getConfidence(), c.getRelation(), c.getExtensions()); } } } } } long runtimePerTestCase = totalRuntime / testCases.size(); ExecutionResultSet resultSet = new ExecutionResultSet(); for(TestCase testCase : testCases){ resultSet.add(new ExecutionResult( testCase, matcherName, null, runtimePerTestCase, testcaseToAlignment.getOrDefault(testCase, new Alignment()), testCase.getParsedReferenceAlignment(), null, null, null )); } return resultSet; } public Object getAlignment() { return alignment; } public <T> T getAlignment(Class<T> clazz){ return getAlignment(clazz, new Properties()); } public <T> T getAlignment(Class<T> clazz, Properties parameters){ try { return TypeTransformerRegistry.getTransformedObject(this.alignment, clazz, parameters); } catch (TypeTransformationException ex) { LOGGER.error("Could not transform alignment to {}. Returning null.", clazz, ex); return null; } } public Object getParameters() { return parameters; } public <T> T getParameters(Class<T> clazz){ return getParameters(clazz, new Properties()); } public <T> T getParameters(Class<T> clazz, Properties parameters){ try { return TypeTransformerRegistry.getTransformedObject(this.parameters, clazz, parameters); } catch (TypeTransformationException ex) { LOGGER.error("Could not transform parameters to {}. Returning null.", clazz, ex); return null; } } public Object getMatcher() { return matcher; } public String getMatcherName() { return matcherName; } public List<URL> getAllGraphs() { return allGraphs; } public List<TestCase> getTestCases() { return testCases; } public long getTotalRuntime() { return totalRuntime; } public void addRuntime(long additonalRuntime){ this.totalRuntime += additonalRuntime; } /** * Sets the value of compute transitive closure to true, if the parameter is true. * Otherwise it still uses the old value. * @param computeTransitiveClosure the new value if the transitive closure should be computed or not. */ public void updateComputeTransitiveClosure(boolean computeTransitiveClosure){ if(computeTransitiveClosure){ this.computeTransitiveClosure = true; } } public boolean isComputeTransitiveClosure() { return computeTransitiveClosure; } public Partitioner getPartitioner() { return partitioner; } }
A North Little Rock man who was arrested Sunday told police he was running late to a movie after officers clocked him driving 112 mph on Interstate 430, authorities said. Police charged 20-year-old Milton Johnson with speeding, reckless driving and improper lane changing after he was arrested around 11 p.m on the southbound highway. Officers wrote in an arrest report that Johnson was driving a black Toyota Camry that was "weaving in and out of traffic from lane to lane without signaling." The report said he came close to hitting cars that he passed. Officers began following Johnson after initially clocking him at 105 mph, and reached speeds of 112 mph before pulling him over, police wrote. According to the report, Johnson told police he was late to a 10:55 p.m. showing at the Rave Theater. According to the theater's scheduled showtimes, the only movie starting at 10:55 p.m. Sunday was the action-comedy Keanu. Attempts to reach Johnson by phone Monday were unsuccessful.
package com.example.securityauthdemo.config.auth.smscode; import com.example.securityauthdemo.config.auth.MyAuthenticationFailureHandler; import com.example.securityauthdemo.config.auth.MyAuthenticationSuccessHandler; import com.example.securityauthdemo.config.auth.smscode.authentication.SmsCodeAuthenticationFilter; import com.example.securityauthdemo.config.auth.smscode.authentication.SmsCodeAuthenticationProvider; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.config.annotation.SecurityConfigurerAdapter; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.web.DefaultSecurityFilterChain; import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; import org.springframework.stereotype.Component; @Component public class SmsCodeSecurityConfig extends SecurityConfigurerAdapter<DefaultSecurityFilterChain, HttpSecurity> { private final MyAuthenticationFailureHandler myAuthenticationFailureHandler; private final MyAuthenticationSuccessHandler myAuthenticationSuccessHandler; private final SmsCodeUserDetailsService smsCodeUserDetailsService; public SmsCodeSecurityConfig(MyAuthenticationFailureHandler myAuthenticationFailureHandler, MyAuthenticationSuccessHandler myAuthenticationSuccessHandler, SmsCodeUserDetailsService smsCodeUserDetailsService) { this.myAuthenticationFailureHandler = myAuthenticationFailureHandler; this.myAuthenticationSuccessHandler = myAuthenticationSuccessHandler; this.smsCodeUserDetailsService = smsCodeUserDetailsService; } @Override public void configure(HttpSecurity httpSecurity) { SmsCodeAuthenticationFilter smsCodeAuthenticationFilter = new SmsCodeAuthenticationFilter() {{ setAuthenticationManager(httpSecurity.getSharedObject(AuthenticationManager.class)); setAuthenticationFailureHandler(myAuthenticationFailureHandler); setAuthenticationSuccessHandler(myAuthenticationSuccessHandler); }}; SmsCodeAuthenticationProvider smsCodeAuthenticationProvider = new SmsCodeAuthenticationProvider() {{ setSmsCodeUserDetailsService(smsCodeUserDetailsService); }}; httpSecurity .addFilterAt(smsCodeAuthenticationFilter, UsernamePasswordAuthenticationFilter.class) .authenticationProvider(smsCodeAuthenticationProvider); } }
<gh_stars>1-10 package sb func (t Tokens) Iter() *Proc { proc := IterTokens(t, 0, nil) return &proc } func IterTokens( tokens Tokens, index int, cont Proc, ) Proc { var proc Proc proc = func() (*Token, Proc, error) { if index >= len(tokens) { return nil, cont, nil } token := tokens[index] index++ return &token, proc, nil } return proc }
// Copyright 2014 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "syzygy/agent/asan/shadow_marker.h" #include "base/logging.h" namespace agent { namespace asan { namespace { // Some constants related to the structure of shadow marker values. static const uint8_t kActiveBit = 0x20; static const uint8_t kBlockEndNestedBit = 0x01; static const uint8_t kBlockStartDataMask = 0x7; static const uint8_t kBlockStartNestedBit = 0x08; static const uint8_t kFirstNibble = 0xF0; static const uint8_t kRedzoneBit = 0x80; // ShadowMarker name generator. This maps an enumeration value to a name via // template specialization. template<size_t kIndex> struct ShadowMarkerNameGenerator { static const uintptr_t kName = 0; }; #define SHADOW_MARKER_NAME_GENERATOR_MACRO(name, value) \ template<> struct ShadowMarkerNameGenerator<value> { \ static const char* kName; \ }; \ const char* ShadowMarkerNameGenerator<value>::kName = \ _STRINGIZE(name); SHADOW_MARKER_GENERATOR(SHADOW_MARKER_NAME_GENERATOR_MACRO) #undef SHADOW_MARKER_NAME_GENERATOR_MACRO } // namespace // This generates an array of shadow marker names, populating valid markers // with their names as defined by the template specialization above. Invalid // markers map to NULL as defined by the base template. #define ITERATE_2(F, base) F(base) F(base + 1) #define ITERATE_4(F, base) ITERATE_2(F, base) ITERATE_2(F, base + 2) #define ITERATE_8(F, base) ITERATE_4(F, base) ITERATE_4(F, base + 4) #define ITERATE_16(F, base) ITERATE_8(F, base) ITERATE_8(F, base + 8) #define ITERATE_32(F, base) ITERATE_16(F, base) ITERATE_16(F, base + 16) #define ITERATE_64(F, base) ITERATE_32(F, base) ITERATE_32(F, base + 32) #define ITERATE_128(F, base) ITERATE_64(F, base) ITERATE_64(F, base + 64) #define ITERATE_256(F) ITERATE_128(F, 0) ITERATE_128(F, 128) #define GET_SHADOW_MARKER_STRING_PTR(index) \ reinterpret_cast<const char*>(ShadowMarkerNameGenerator<index>::kName), const char* kShadowMarkerNames[256] = { ITERATE_256(GET_SHADOW_MARKER_STRING_PTR) }; #undef GET_SHADOW_MARKER_STRING_PTR #undef ITERATE_256 #undef ITERATE_128 #undef ITERATE_64 #undef ITERATE_32 #undef ITERATE_16 #undef ITERATE_8 #undef ITERATE_4 #undef ITERATE_2 bool ShadowMarkerHelper::IsRedzone(ShadowMarkerValue marker) { return (marker.value & kRedzoneBit) == kRedzoneBit; } bool ShadowMarkerHelper::IsActiveBlock(ShadowMarkerValue marker) { return marker.value == kHeapLeftPaddingMarker || marker.value == kHeapRightPaddingMarker || marker.value == kHeapFreedMarker || IsActiveBlockStart(marker) || IsActiveBlockEnd(marker); } bool ShadowMarkerHelper::IsHistoricBlock(ShadowMarkerValue marker) { return marker.value == kHeapHistoricLeftPaddingMarker || marker.value == kHeapHistoricRightPaddingMarker || marker.value == kHeapHistoricFreedMarker || IsHistoricBlockStart(marker) || IsHistoricBlockEnd(marker); } bool ShadowMarkerHelper::IsBlock(ShadowMarkerValue marker) { return IsActiveBlock(marker) || IsHistoricBlock(marker); } bool ShadowMarkerHelper::IsActiveBlockStart(ShadowMarkerValue marker) { return (marker.value & kFirstNibble) == kHeapBlockStartMarker0; } bool ShadowMarkerHelper::IsHistoricBlockStart(ShadowMarkerValue marker) { return (marker.value & kFirstNibble) == kHeapHistoricBlockStartMarker0; } bool ShadowMarkerHelper::IsBlockStart(ShadowMarkerValue marker) { static const uint8_t kMask = kFirstNibble ^ kActiveBit; return (marker.value & kMask) == kHeapHistoricBlockStartMarker0; } bool ShadowMarkerHelper::IsNestedBlockStart(ShadowMarkerValue marker) { if (!IsBlockStart(marker)) return false; return (marker.value & kBlockStartNestedBit) == kBlockStartNestedBit; } uint8_t ShadowMarkerHelper::GetBlockStartData(ShadowMarkerValue marker) { return marker.value & kBlockStartDataMask; } bool ShadowMarkerHelper::IsActiveBlockEnd(ShadowMarkerValue marker) { return (marker.value & ~kBlockEndNestedBit) == kHeapBlockEndMarker; } bool ShadowMarkerHelper::IsHistoricBlockEnd(ShadowMarkerValue marker) { return (marker.value & ~kBlockEndNestedBit) == kHeapHistoricBlockEndMarker; } bool ShadowMarkerHelper::IsBlockEnd(ShadowMarkerValue marker) { // Block end markers have arbitrary values for the active bit // and the block end nested bit. static const uint8_t kMask = static_cast<uint8_t>(~(kActiveBit | kBlockEndNestedBit)); return (marker.value & kMask) == kHeapHistoricBlockEndMarker; } bool ShadowMarkerHelper::IsNestedBlockEnd(ShadowMarkerValue marker) { if (!IsBlockEnd(marker)) return false; return (marker.value & kBlockEndNestedBit) == kBlockEndNestedBit; } bool ShadowMarkerHelper::IsHistoricLeftRedzone(ShadowMarkerValue marker) { return marker.value == kHeapHistoricLeftPaddingMarker || IsHistoricBlockStart(marker); } bool ShadowMarkerHelper::IsActiveLeftRedzone(ShadowMarkerValue marker) { return marker.value == kHeapLeftPaddingMarker || IsActiveBlockStart(marker); } bool ShadowMarkerHelper::IsLeftRedzone(ShadowMarkerValue marker) { return (marker.value & ~kActiveBit) == kHeapHistoricLeftPaddingMarker || IsBlockStart(marker); } bool ShadowMarkerHelper::IsHistoricRightRedzone(ShadowMarkerValue marker) { return marker.value == kHeapHistoricRightPaddingMarker || IsHistoricBlockEnd(marker); } bool ShadowMarkerHelper::IsActiveRightRedzone(ShadowMarkerValue marker) { return marker.value == kHeapRightPaddingMarker || IsActiveBlockEnd(marker); } bool ShadowMarkerHelper::IsRightRedzone(ShadowMarkerValue marker) { return (marker.value & ~kActiveBit) == kHeapHistoricRightPaddingMarker || IsBlockEnd(marker); } ShadowMarker ShadowMarkerHelper::ToHistoric(ShadowMarkerValue marker) { DCHECK(IsActiveBlock(marker)); return static_cast<ShadowMarker>(marker.value & ~kActiveBit); } ShadowMarker ShadowMarkerHelper::BuildBlockStart(bool active, bool nested, uint8_t data) { DCHECK_EQ(0, data & ~kBlockStartDataMask); uint8_t marker = kHeapHistoricBlockStartMarker0; if (active) marker |= kActiveBit; if (nested) marker |= kBlockStartNestedBit; marker |= data; return static_cast<ShadowMarker>(marker); } ShadowMarker ShadowMarkerHelper::BuildBlockEnd(bool active, bool nested) { uint8_t marker = kHeapHistoricBlockEndMarker; if (active) marker |= kActiveBit; if (nested) marker |= kBlockEndNestedBit; return static_cast<ShadowMarker>(marker); } } // namespace asan } // namespace agent
<filename>src/test/java/com/owenfeehan/pathpatternfinder/trim/ConstantStringHelper.java package com.owenfeehan.pathpatternfinder.trim; /*- * #%L * path-pattern-finder * %% * Copyright (C) 2019 - 2021 <NAME> * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ import static org.junit.jupiter.api.Assertions.assertEquals; import com.owenfeehan.pathpatternfinder.Pattern; import com.owenfeehan.pathpatternfinder.patternelements.resolved.ResolvedPatternElementFactory; import com.owenfeehan.pathpatternfinder.patternelements.unresolved.UnresolvedPatternElementFactory; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.apache.commons.io.IOCase; /** * Applies the test for trimming constant strings. * * @author <NAME> */ class ConstantStringHelper { private ConstantStringHelper() { // Static access only } /** * Applies the test, and checks if expectations are met. * * @param source the strings that will be trimmed * @param ioCase whether do we test in a case-sensitive way * @param constantValue the constant-value in the expected pattern, if any pattern expected. * @param requirePeriod if true, a constant string will only be trimmed if it includes at least * one period. */ public static void applyTest( List<String> source, IOCase ioCase, Optional<String> constantValue, boolean requirePeriod) { UnresolvedPatternElementFactory factory = new UnresolvedPatternElementFactory(ioCase); TrimConstantString op = new TrimConstantString(factory, requirePeriod); Optional<Pattern> expected = constantValue.map(value -> expectedPattern(value, source, factory)); assertEquals(expected, op.trim(source)); } private static Pattern expectedPattern( String constantValue, List<String> source, UnresolvedPatternElementFactory factory) { List<String> sourceSubstringed = applySubstringToList(source, constantValue.length()); return factory.createUnresolvedString( ResolvedPatternElementFactory.constant(constantValue), sourceSubstringed, false); } /** Applies a substring to each element in a list. */ private static List<String> applySubstringToList( List<String> source, int firstNumberItemsToRemove) { return source.stream() .map(path -> path.substring(firstNumberItemsToRemove)) .collect(Collectors.toList()); } }
/** * validate SID * * @param id the alleged SID * * @exception RecordFormatException if validation fails */ protected void validateSid( final short id ) throws RecordFormatException { if ( id != sid ) { throw new RecordFormatException( "NOT An SST RECORD" ); } }
<reponame>lukas-vaiciunas/haterminator #include "ImagesUI.h" #include <allegro5/allegro_primitives.h> #include "Mouse.h" #include "Keyboard.h" #include "Constants.h" #include "Painter.h" #include "ImageData.h" #include "Image.h" ImagesUI::ImagesUI(Key toggleKey) : toggleKey_(toggleKey), numCols_(0), numRows_(0), isActive_(false) {} ImagesUI::ImagesUI(Key toggleKey, const char *sheetPath) : ImagesUI(toggleKey) { this->load(sheetPath); } void ImagesUI::load(const char *sheetPath) { ImageData &imageData = ImageData::instance(); imageData.clear(); ALLEGRO_BITMAP *sheet = al_load_bitmap(sheetPath); unsigned int sheetWidth = al_get_bitmap_width(sheet); unsigned int sheetHeight = al_get_bitmap_height(sheet); unsigned int tileSize = Constants::tileSize; numCols_ = sheetWidth / tileSize; numRows_ = sheetHeight / tileSize; imageIds_ = std::vector<std::vector<unsigned int>>(numRows_, std::vector<unsigned int>(numCols_, 0)); for (unsigned int row = 0; row < numRows_; row++) { for (unsigned int col = 0; col < numCols_; col++) { unsigned int id = this->spatialHash_(col, row) + 1; imageIds_.at(row).at(col) = id; imageData.put( id, new Image(sheet, col * tileSize, row * tileSize, tileSize, tileSize)); } } al_destroy_bitmap(sheet); sheet = nullptr; } void ImagesUI::updateOnMousePress(const Mouse &mouse, Painter &painter) { if (!isActive_) return; if (mouse.isPressed(Mouse::Button::M1)) { unsigned int col = mouse.x() / Constants::tileSize; unsigned int row = mouse.y() / Constants::tileSize; if(this->isInBounds_(col, row)) painter.setImageId(imageIds_.at(row).at(col)); } } void ImagesUI::updateOnKeyPress(const Keyboard &keyboard) { if (keyboard.isPressed(toggleKey_)) isActive_ = !isActive_; } void ImagesUI::render(const Mouse &mouse) const { if (!isActive_) return; unsigned int tileSize = Constants::tileSize; unsigned int mouseCol = mouse.x() / tileSize; unsigned int mouseRow = mouse.y() / tileSize; for (unsigned int row = 0; row < numRows_; row++) for (unsigned int col = 0; col < numCols_; col++) ImageData::instance() .get(imageIds_.at(row).at(col)) .render(col * tileSize, row * tileSize); if (this->isInBounds_(mouseCol, mouseRow)) { float indicatorX = mouseCol * tileSize; float indicatorY = mouseRow * tileSize; al_draw_rectangle( indicatorX, indicatorY, indicatorX + tileSize, indicatorY + tileSize, al_map_rgb(255, 0, 255), 2.0f); } } bool ImagesUI::isActive() const { return isActive_; } unsigned int ImagesUI::spatialHash_(unsigned int col, unsigned int row) const { return col + row * numCols_; } bool ImagesUI::isInBounds_(unsigned int col, unsigned int row) const { return col >= 0 && col < numCols_ && row >= 0 && row < numRows_; }
t=int(input()) # total cards l=list(map(int,input().split())) s=0 d=0 try: while True : if l[0]>l[-1]: s=s+l.pop(0) else: s=s+l.pop(-1) if l[0]>l[-1]: d=d+l.pop(0) else: d=d+l.pop(-1) except: print(s,d)
def wp_is_in_front_of_ego_veh(pose, waypoint): _, _, ego_yaw = get_euler_angle(pose) ego_X = pose.position.x ego_Y = pose.position.y waypoint_X = waypoint.pose.pose.position.x waypoint_Y = waypoint.pose.pose.position.y shift_x = waypoint_X - ego_X shift_y = waypoint_Y - ego_Y return (shift_x * cos(0 - ego_yaw) - shift_y * sin(0 - ego_yaw)) > 0
/** * A mongodb based repository for guest models, i.e. a local version * of guest data. * * @author dschmitz */ @Named @ApplicationScoped public class GuestModelRepository { private MongoClient mongoClient; public GuestModelRepository() { } @Inject public GuestModelRepository(MongoClient mongoClient) { this.mongoClient = mongoClient; } public void storeGuestModel(@NotNull GuestModel guestModel) { MongoCollection<Document> coll = getGuestModelCollection(); if (null != coll.find(eq("publicId", guestModel.getPublicId())).limit(1).first()) { throw new RuntimeException("Guest with " + guestModel.getPublicId() + " already exists!"); } // obviously crap, but mongojack does not seem to support Mongo3? Document doc = new Document("name", guestModel.getName()) .append("publicId", guestModel.getPublicId()); coll.insertOne(doc); } public Optional<GuestModel> findByPublicId(@NotNull String publicId) { MongoCollection<Document> coll = getGuestModelCollection(); Document doc = coll.find(eq("publicId", publicId)).first(); if (null == doc) { return empty(); } return Optional.of(createGuestModelFromDocument(doc)); } protected GuestModel createGuestModelFromDocument(Document doc) { return new GuestModel(doc.getString("publicId"), doc.getString("name")); } protected MongoCollection<Document> getGuestModelCollection() { return getBookingDb().getCollection("guestmodel"); } protected MongoDatabase getBookingDb() { return mongoClient.getDatabase("booking"); } public List<GuestModel> findAllGuests() { MongoCollection<Document> guestModelCollection = getGuestModelCollection(); List<GuestModel> result = new ArrayList<>(); guestModelCollection.find().forEach((Consumer<Document>) document -> { result.add(createGuestModelFromDocument(document)); }); return result; } }
package commonapi import ( "encoding/json" "net/url" "github.com/pinpt/agent/integrations/pkg/mutate" "github.com/pinpt/agent/pkg/requests" "github.com/pinpt/integration-sdk/work" ) func AddComment(qc QueryContext, issueID, body string) (_ *work.IssueComment, rerr error) { if qc.IsOnPremise { return addCommentOnPremise(qc, issueID, body) } return addCommentCloud(qc, issueID, body) } // AddComment adds a comment to issueID // currently adding body as simple unformatted text // to support formatting need to use Atlassian Document Format // haven't found a way to pass text with atlassian tags, such as {code} // https://developer.atlassian.com/cloud/jira/platform/apis/document/structure/ func addCommentCloud(qc QueryContext, issueID, body string) (_ *work.IssueComment, rerr error) { qc.Logger.Info("adding comment (cloud)", "issue", issueID, "body", body) content := []map[string]interface{}{ { "type": "paragraph", "content": []map[string]interface{}{ { "type": "text", "text": body, }, }, }, } type Body struct { Type string `json:"type"` Version int `json:"version"` Content interface{} `json:"content"` } reqObj := struct { Body Body `json:"body"` }{ Body: Body{ Type: "doc", Version: 1, Content: content, }, } params := url.Values{} params.Add("expand", IssueCommentsExpandParam) var res CommentResponse req := requests.Request{} req.Method = "POST" req.URL = qc.Req.URL("issue/" + issueID + "/comment") req.Query = params var err error req.Body, err = json.Marshal(reqObj) if err != nil { rerr = err return } _, err = qc.Req.JSON(req, &res) if err != nil { rerr = err return } return ConvertComment(qc, res, issueID, nil) } func addCommentOnPremise(qc QueryContext, issueID, body string) (_ *work.IssueComment, rerr error) { qc.Logger.Info("adding comment (on_premise)", "issue", issueID, "body", body) reqObj := struct { Body string `json:"body"` }{ Body: body, } params := url.Values{} params.Add("expand", IssueCommentsExpandParam) var res CommentResponse req := requests.Request{} req.Method = "POST" req.URL = qc.Req.URL("issue/" + issueID + "/comment") req.Query = params var err error req.Body, err = json.Marshal(reqObj) if err != nil { rerr = err return } _, err = qc.Req.JSON(req, &res) if err != nil { rerr = err return } return ConvertComment(qc, res, issueID, nil) } func mutJSONReq(qc QueryContext, method string, uri string, body interface{}, res interface{}) error { req := requests.Request{} req.Method = method req.URL = qc.Req.URL(uri) var err error req.Body, err = json.Marshal(body) if err != nil { return err } _, err = qc.Req.JSON(req, &res) if err != nil { return err } return nil } func EditTitle(qc QueryContext, issueID, title string) error { qc.Logger.Info("editing issue title", "issue", issueID, "title", title) reqObj := struct { Fields struct { Summary string `json:"summary"` } `json:"fields"` }{} reqObj.Fields.Summary = title return mutJSONReq(qc, "PUT", "issue/"+issueID, reqObj, nil) } func EditPriority(qc QueryContext, issueID, priorityID string) error { qc.Logger.Info("editing issue priority", "issue", issueID, "priority_id", priorityID) reqObj := struct { Fields struct { Priority struct { ID string `json:"id"` } `json:"priority"` } `json:"fields"` }{} reqObj.Fields.Priority.ID = priorityID return mutJSONReq(qc, "PUT", "issue/"+issueID, reqObj, nil) } type issueTransition struct { ID string `json:"id"` To struct { ID string `json:"id"` Name string `json:"name"` } `json:"to"` Fields map[string]Field `json:"fields"` } type Field struct { Key string `json:"key"` Name string `json:"name"` Required bool `json:"required"` AllowedValues []AllowedValue `json:"allowedValues"` } type AllowedValue struct { ID string `json:"id"` Name string `json:"name"` } func getIssueTransitions(qc QueryContext, issueID string) (res []issueTransition, rerr error) { var obj struct { Transitions []issueTransition `json:"transitions"` } params := url.Values{} params.Add("expand", "transitions.fields") req := requests.Request{} req.Method = "GET" req.URL = qc.Req.URL("issue/" + issueID + "/transitions") req.Query = params _, err := qc.Req.JSON(req, &obj) if err != nil { rerr = err return } return obj.Transitions, err } func GetIssueTransitions(qc QueryContext, issueID string) (res []mutate.IssueTransition, rerr error) { res0, err := getIssueTransitions(qc, issueID) if err != nil { rerr = err return } for _, iss0 := range res0 { iss := mutate.IssueTransition{} iss.ID = iss0.ID iss.Name = iss0.To.Name for _, f0 := range iss0.Fields { if f0.Key != "resolution" { if f0.Required { qc.Logger.Warn("transition has a required field that is not resolution, we don't support that yet, transition will happen anyway, but field will not by filled", "k", f0.Key, "n", f0.Name) } continue } f := mutate.IssueTransitionField{} f.ID = f0.Key f.Name = f0.Name f.Required = f0.Required for _, av0 := range f0.AllowedValues { av := mutate.AllowedValue{} av.ID = av0.Name // jira uses name when setting value, not id, pass name as id here av.Name = av0.Name f.AllowedValues = append(f.AllowedValues, av) } iss.Fields = append(iss.Fields, f) } res = append(res, iss) } return } type transitionFieldValue struct { Name string `json:"name"` } func EditStatus(qc QueryContext, issueID, transitionID string, fieldValues map[string]string) error { qc.Logger.Info("editing issue status", "issue", issueID) reqObj := struct { Transition struct { ID string `json:"id"` } `json:"transition"` Fields map[string]transitionFieldValue `json:"fields"` }{} reqObj.Transition.ID = transitionID m := map[string]transitionFieldValue{} for k, v := range fieldValues { m[k] = transitionFieldValue{Name: v} } reqObj.Fields = m qc.Logger.Info("seting obj", "v", reqObj) return mutJSONReq(qc, "POST", "issue/"+issueID+"/transitions", reqObj, nil) } func AssignUser(qc QueryContext, issueID, accountID string) error { if qc.IsOnPremise { return assignUserOnPremise(qc, issueID, accountID) } return assignUserCloud(qc, issueID, accountID) } func assignUserCloud(qc QueryContext, issueID, accountID string) error { qc.Logger.Info("change issue assignee (cloud)", "issue", issueID, "account_id", accountID) reqObj := struct { AccountID string `json:"accountId,omitempty"` }{} reqObj.AccountID = accountID return mutJSONReq(qc, "PUT", "issue/"+issueID+"/assignee", reqObj, nil) } func assignUserOnPremise(qc QueryContext, issueID, accountKey string) error { qc.Logger.Info("change issue assignee (on_premise)", "issue", issueID, "account_key", accountKey) name := "" if accountKey != "" { var err error name, err = getUsernameByKey(qc, accountKey) if err != nil { return err } } reqObj := struct { Name string `json:"name,omitempty"` }{} reqObj.Name = name return mutJSONReq(qc, "PUT", "issue/"+issueID+"/assignee", reqObj, nil) } func getUsernameByKey(qc QueryContext, key string) (username string, rerr error) { q := url.Values{} q.Set("key", key) var res struct { Name string `json:"name"` } err := qc.Req.Get("user", q, &res) if err != nil { rerr = err return } return res.Name, nil }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include <algorithm> #include <cstddef> #include <utility> #include "arrow/util/key_value_metadata.h" #include "arrow/util/logging.h" using std::size_t; namespace arrow { static std::vector<std::string> UnorderedMapKeys( const std::unordered_map<std::string, std::string>& map) { std::vector<std::string> keys; keys.reserve(map.size()); for (const auto& pair : map) { keys.push_back(pair.first); } return keys; } static std::vector<std::string> UnorderedMapValues( const std::unordered_map<std::string, std::string>& map) { std::vector<std::string> values; values.reserve(map.size()); for (const auto& pair : map) { values.push_back(pair.second); } return values; } KeyValueMetadata::KeyValueMetadata() : keys_(), values_() {} KeyValueMetadata::KeyValueMetadata( const std::unordered_map<std::string, std::string>& map) : keys_(UnorderedMapKeys(map)), values_(UnorderedMapValues(map)) { DCHECK_EQ(keys_.size(), values_.size()); } KeyValueMetadata::KeyValueMetadata(const std::vector<std::string>& keys, const std::vector<std::string>& values) : keys_(keys), values_(values) { DCHECK_EQ(keys.size(), values.size()); } void KeyValueMetadata::ToUnorderedMap( std::unordered_map<std::string, std::string>* out) const { DCHECK_NE(out, nullptr); const int64_t n = size(); out->reserve(n); for (int64_t i = 0; i < n; ++i) { out->insert(std::make_pair(key(i), value(i))); } } void KeyValueMetadata::Append(const std::string& key, const std::string& value) { keys_.push_back(key); values_.push_back(value); } void KeyValueMetadata::reserve(int64_t n) { DCHECK_GE(n, 0); const auto m = static_cast<size_t>(n); keys_.reserve(m); values_.reserve(m); } int64_t KeyValueMetadata::size() const { DCHECK_EQ(keys_.size(), values_.size()); return static_cast<int64_t>(keys_.size()); } std::string KeyValueMetadata::key(int64_t i) const { DCHECK_GE(i, 0); DCHECK_LT(static_cast<size_t>(i), keys_.size()); return keys_[i]; } std::string KeyValueMetadata::value(int64_t i) const { DCHECK_GE(i, 0); DCHECK_LT(static_cast<size_t>(i), values_.size()); return values_[i]; } std::shared_ptr<KeyValueMetadata> KeyValueMetadata::Copy() const { return std::make_shared<KeyValueMetadata>(keys_, values_); } bool KeyValueMetadata::Equals(const KeyValueMetadata& other) const { return size() == other.size() && std::equal(keys_.cbegin(), keys_.cend(), other.keys_.cbegin()) && std::equal(values_.cbegin(), values_.cend(), other.values_.cbegin()); } } // namespace arrow
<reponame>xasync/lupin<gh_stars>0 __title__ = 'damson' __version__ = '0.6.0' __description__ = 'A simple python validation library.' __author__ = 'xasync' __author_email__ = '<EMAIL>' __license__ = 'Apache 2.0' __copyright__ = 'Copyright Since 2019 xasync.com'
<gh_stars>1-10 import type { Settings } from "@google-cloud/firestore"; import type { LogLevel } from "~/utils"; export interface FirestoreDataOptions { /** * Provide a list of paths to import/export. Documents not belonging * to the specified paths are ignored. If none are provided, all paths * are explored for documents to import/export. */ paths?: string[]; /** * Provide a list of patterns (regexes) which filter which * documents to import/export. If more than one pattern is * provided, a document's path must match at least one pattern * to be imported/exported. */ patterns?: RegExp[]; /** * Limit the subcollection depth to import/export. A document * in a root collection has a depth of 0. Subcollections from * a document in a root collection has a depth of 1, and so on. * * If not provided, all subcollections are imported/exported. */ depth?: number; /** * Specify the number of worker threads to use. Defaults to * the number of logical CPUs available as reported by the * [Node.js API](https://nodejs.org/api/os.html#oscpus) when * exporting, and the number of chunks to read when importing. * * When importing and exporting programatically, it is recommended * to keep this value consistent. Also ensure that this number * is not too much higher the actual cores available on your * machine as it probably won't be of much a benefit. */ workers?: number; /** * Specify the logging level, or provide a custom list of log * levels that should be logged. */ logLevel?: "silent" | "info" | "debug" | "verbose" | LogLevel[]; } export interface FirestoreConnectionOptions { /** * The Firebase project to use. */ project: string; /** * The path to the service account credentials for connecting * to Firestore. * * Use either `keyfile`, `credentials` or `emulator`. Has the * lowest precedence. */ keyfile?: string; /** * Service account credentials for connecting to Firestore. * * Use either `keyfile`, `credentials` or `emulator`. Takes * precendence over `keyfile`. */ credentials?: Settings["credentials"]; /** * Instead of connecting to a Firestore project, provide the * host of the local Firestore emulator to connect to. * * Use either `keyfile`, `credentials` or `emulator`. Takes * precendence over `keyfile` and `credentials`. */ emulator?: string; }
/* * This method should be called within the constructor, * so that the configuration of provider related setting can be updated * and kept at IExecutionProvider level. */ void CUDAExecutionProvider::UpdateProviderOptionsInfo() { UnorderedMapStringToString options; options["device_id"] = std::to_string(device_id_); options["cuda_mem_limit"] = std::to_string(cuda_mem_limit_); std::string strategy; if (arena_extend_strategy_ == ArenaExtendStrategy::kNextPowerOfTwo) { strategy = "kNextPowerOfTwo"; } else if (arena_extend_strategy_ == ArenaExtendStrategy::kSameAsRequested) { strategy = "kSameAsRequested"; } else { strategy = "unknown"; } options["arena_extend_strategy"] = strategy; IExecutionProvider::SetProviderOptions(options); }
def recover_security_br(self): if not CONF.SECURITYGROUP.security_bridge_mapping: LOG.warning(_LW("Security bridge mappings not configured.")) raise SystemExit(1) secbr_list = (CONF.SECURITYGROUP.security_bridge_mapping).split(':') secbr_name = secbr_list[0] secbr_phyname = secbr_list[1] self.sec_br = ovs_lib.OVSBridge(secbr_name) if not self.sec_br.bridge_exists(secbr_name): LOG.error(_LE("Security bridge does not exist. Terminating the " "agent!")) raise SystemExit(1) self.phy_ofport = self.sec_br.get_port_ofport(secbr_phyname) if not self.phy_ofport: LOG.error(_LE("Physical bridge patch port not available on " "Security bridge %s. Terminating the " "agent!"), secbr_name) raise SystemExit(1) br_name = self.sec_br.get_bridge_for_iface( ovsvapp_const.SEC_TO_INT_PATCH) if br_name is not None: if br_name != secbr_name: br = ovs_lib.OVSBridge(br_name) br.delete_port(ovsvapp_const.SEC_TO_INT_PATCH) self.sec_br.add_patch_port( ovsvapp_const.SEC_TO_INT_PATCH, ovsvapp_const.INT_TO_SEC_PATCH) patch_sec_int_ofport = self.sec_br.get_port_ofport( ovsvapp_const.SEC_TO_INT_PATCH) self.patch_sec_ofport = self.int_br.get_port_ofport( ovsvapp_const.INT_TO_SEC_PATCH) if int(patch_sec_int_ofport) < 0 or int(self.patch_sec_ofport) < 0: LOG.error(_LE("Failed to find OVS patch port. Cannot have " "Security enabled on this agent. " "Terminating the agent!")) raise SystemExit(1) LOG.info(_LI("Security bridge successfully recovered."))
<gh_stars>0 module Android.Regex.Glob ( globToRegex , isPattern ) where globToRegex :: String -> String globToRegex cs = '^' : _globToRegex cs ++ "$" _globToRegex :: String -> String _globToRegex "" = "" _globToRegex ('*':cs) = ".*" ++ _globToRegex cs _globToRegex ('?':cs) = '.' : _globToRegex cs _globToRegex ('[':'!':c:cs) = "[^" ++ c : _charClass cs _globToRegex ('[':c:cs) = '[' : c : _charClass cs _globToRegex ('[':_) = error "unterminated character class" _globToRegex (c:cs) = _escape c ++ _globToRegex cs _escape :: Char -> String _escape c | c `elem` regexChars = '\\' : [c] | otherwise = [c] where regexChars = "\\+()^$.{}]|" _charClass :: String -> String _charClass (']':cs) = ']' : _globToRegex cs _charClass (c:cs) = c : _charClass cs _charClass [] = error "unterminated character class" -- | is glob expression ? isPattern :: String -> Bool isPattern = any (`elem` "[?*")
Análise da concentração mássica de materiais particulados provenientes da combustão de diesel e biodiesel Analysis of the mass concentration of particulate matter from the combustion of diesel and biodiesel O biodiesel tem se apresentado como uma excelente fonte de energia alternativa, podendo substituir o diesel mineral. Esse biocombustivel e considerado menos impactante, pois e uma alternativa renovavel, emite menos poluentes durante sua queima e, alem disso, possui um processo de producao mais limpo, trazendo beneficios ambientais. O objetivo deste estudo foi avaliar as concentracoes massicas de material particulado emitido a partir da combustao de diesel (B0) e biodiesel de soja (B100) sob duas diferentes cargas, 500 W e 1500 W, em um motor ciclo diesel de bancada. A concentracao de material particulado obtida foi menor com o uso de biodiesel, que apresentou concentracoes de 438 mg m-3 e 458 mg m-3 enquanto que para o diesel as concentracoes foram de 837 mg m-3 e 848 mg m-3 para as duas cargas, respectivamente. Os resultados indicaram que a concentracao de material particulado e diretamente proporcional a carga aplicada no motor, ou seja, na carga de 1500 W, obteve-se um aumento na concentracao de material particulado de 1,3% com o uso de diesel e 4,4% com o uso de biodiesel. Esses resultados indicam que o uso do biodiesel de soja contribui para a reducao da poluicao atmosferica. Abstract The biodiesel has been showed as an excellent source of alternative energy, can replace mineral diesel. This biofuel is considered less harmful, because it is a renewable alternative, emits fewer pollutants during its burning and also has a cleaner production process, bringing environmental benefits. The aim of this study was to evaluate the mass concentrations of particulate material emitted from the combustion of diesel (B0) and soybean biodiesel (B100) under different load conditions (500 W and 1500 W) in a cycle diesel engine bench. The results indicated that the concentration of particulate material is directly proportional to the applied load on the engine, in other words, comparing the loads of 500 W and 1500 W, the latter obtained an increase in the concentration of particulate material of 1,3% using diesel and 4,4% using biodiesel. Moreover, between the two fuels, the concentration of particulate material was lower with the use of biodiesel, which showed concentrations of 438 mg m-3 and 458 mg m-3 while for diesel the concentrations were 837 mg m-3 and 848 mg m-3. These results indicate that the use of soybean biodiesel contributed to reduction of atmospheric pollution.
/** * com.lordjoe.Utilities.PropertyDumper * Written largely to test Runtime.exec this dumpe the System properties * @author Steve Lewis */ public class PropertyDumper { public static NameValue[] getSystemProperties() { Properties Props = System.getProperties(); return (Util.mapToNameValues(Props)); } public static void dumpSystemProperties() { dumpSystemProperties(System.out); } public static void dumpSystemProperties(PrintStream out) { NameValue[] values = getSystemProperties(); for (int i = 0; i < values.length; i++) { out.print(values[i].m_Name); out.print("="); out.println(values[i].m_Value); } } public static void dumpSystemProperties(String[] items) { dumpSystemProperties(items, System.out); } public static void dumpSystemProperties(String[] items, PrintStream out) { for (int i = 0; i < items.length; i++) { out.print(items[i]); out.print("="); out.println(System.getProperty(items[i])); } } public static void main(String[] args) { for (int i = 0; i < 20; i++) { dumpSystemProperties(); ThreadUtilities.waitFor(2000); } } }
H, N = map(int, input().split()) dp = [[0] + [1e20] * (H) for _ in range(N)] for i in range(N): A, B = map(int, (input().split())) for j in range(1, H + 1): dp[i][j] = min(dp[i - 1][j], dp[i][max(j - A, 0)] + B) print(dp[-1][-1])
#pragma once #include "common.h" TEmethod tesNULL[] = { { 0, NULL } }; TEmethod tesBITMAP[] = { { (VT_I4 << TE_VT) + offsetof(BITMAP, bmType), "bmType" }, { (VT_I4 << TE_VT) + offsetof(BITMAP, bmWidth), "bmWidth" }, { (VT_I4 << TE_VT) + offsetof(BITMAP, bmHeight), "bmHeight" }, { (VT_I4 << TE_VT) + offsetof(BITMAP, bmWidthBytes), "bmWidthBytes" }, { (VT_UI2 << TE_VT) + offsetof(BITMAP, bmPlanes), "bmPlanes" }, { (VT_UI2 << TE_VT) + offsetof(BITMAP, bmBitsPixel), "bmBitsPixel" }, { (VT_PTR << TE_VT) + offsetof(BITMAP, bmBits), "bmBits" }, { 0, NULL } }; TEmethod tesCHOOSECOLOR[] = { { (VT_I4 << TE_VT) + offsetof(CHOOSECOLOR, lStructSize), "lStructSize" }, { (VT_PTR << TE_VT) + offsetof(CHOOSECOLOR, hwndOwner), "hwndOwner" }, { (VT_PTR << TE_VT) + offsetof(CHOOSECOLOR, hInstance), "hInstance" }, { (VT_I4 << TE_VT) + offsetof(CHOOSECOLOR, rgbResult), "rgbResult" }, { (VT_PTR << TE_VT) + offsetof(CHOOSECOLOR, lpCustColors), "lpCustColors" }, { (VT_I4 << TE_VT) + offsetof(CHOOSECOLOR, Flags), "Flags" }, { (VT_PTR << TE_VT) + offsetof(CHOOSECOLOR, lCustData), "lCustData" }, { (VT_PTR << TE_VT) + offsetof(CHOOSECOLOR, lpfnHook), "lpfnHook" }, { (VT_BSTR << TE_VT) + offsetof(CHOOSECOLOR, lpTemplateName), "lpTemplateName" }, { 0, NULL } }; TEmethod tesCHOOSEFONT[] = { { (VT_I4 << TE_VT) + offsetof(CHOOSEFONT, lStructSize), "lStructSize" }, { (VT_PTR << TE_VT) + offsetof(CHOOSEFONT, hwndOwner), "hwndOwner" }, { (VT_PTR << TE_VT) + offsetof(CHOOSEFONT, hDC), "hDC" }, { (VT_PTR << TE_VT) + offsetof(CHOOSEFONT, lpLogFont), "lpLogFont" }, { (VT_I4 << TE_VT) + offsetof(CHOOSEFONT, iPointSize), "iPointSize" }, { (VT_I4 << TE_VT) + offsetof(CHOOSEFONT, Flags), "Flags" }, { (VT_I4 << TE_VT) + offsetof(CHOOSEFONT, rgbColors), "rgbColors" }, { (VT_PTR << TE_VT) + offsetof(CHOOSEFONT, lCustData), "lCustData" }, { (VT_PTR << TE_VT) + offsetof(CHOOSEFONT, lpfnHook), "lpfnHook" }, { (VT_BSTR << TE_VT) + offsetof(CHOOSEFONT, lpTemplateName), "lpTemplateName" }, { (VT_PTR << TE_VT) + offsetof(CHOOSEFONT, hInstance), "hInstance" }, { (VT_BSTR << TE_VT) + offsetof(CHOOSEFONT, lpszStyle), "lpszStyle" }, { (VT_UI2 << TE_VT) + offsetof(CHOOSEFONT, nFontType), "nFontType" }, { (VT_UI2 << TE_VT) + offsetof(CHOOSEFONT, ___MISSING_ALIGNMENT__), "___MISSING_ALIGNMENT__" }, { (VT_I4 << TE_VT) + offsetof(CHOOSEFONT, nSizeMin), "nSizeMin" }, { (VT_I4 << TE_VT) + offsetof(CHOOSEFONT, nSizeMax), "nSizeMax" }, { 0, NULL } }; TEmethod tesCOPYDATASTRUCT[] = { { (VT_PTR << TE_VT) + offsetof(COPYDATASTRUCT, dwData), "dwData" }, { (VT_I4 << TE_VT) + offsetof(COPYDATASTRUCT, cbData), "cbData" }, { (VT_PTR << TE_VT) + offsetof(COPYDATASTRUCT, lpData), "lpData" }, { 0, NULL } }; TEmethod tesDIBSECTION[] = { { (VT_PTR << TE_VT) + offsetof(DIBSECTION, dsBm), "dsBm" }, { (VT_PTR << TE_VT) + offsetof(DIBSECTION, dsBmih), "dsBmih" }, { (VT_I4 << TE_VT) + offsetof(DIBSECTION, dsBitfields), "dsBitfields0" }, { (VT_I4 << TE_VT) + offsetof(DIBSECTION, dsBitfields) + sizeof(DWORD), "dsBitfields1" }, { (VT_I4 << TE_VT) + offsetof(DIBSECTION, dsBitfields) + sizeof(DWORD) * 2, "dsBitfields2" }, { (VT_PTR << TE_VT) + offsetof(DIBSECTION, dshSection), "dshSection" }, { (VT_I4 << TE_VT) + offsetof(DIBSECTION, dsOffset), "dsOffset" }, { 0, NULL } }; TEmethod tesEXCEPINFO[] = { { (VT_UI2 << TE_VT) + offsetof(EXCEPINFO, wCode), "wCode" }, { (VT_UI2 << TE_VT) + offsetof(EXCEPINFO, wReserved), "wReserved" }, { (VT_BSTR << TE_VT) + offsetof(EXCEPINFO, bstrSource), "bstrSource" }, { (VT_BSTR << TE_VT) + offsetof(EXCEPINFO, bstrDescription), "bstrDescription" }, { (VT_BSTR << TE_VT) + offsetof(EXCEPINFO, bstrHelpFile), "bstrHelpFile" }, { (VT_I4 << TE_VT) + offsetof(EXCEPINFO, dwHelpContext), "dwHelpContext" }, { (VT_PTR << TE_VT) + offsetof(EXCEPINFO, pvReserved), "pvReserved" }, { (VT_PTR << TE_VT) + offsetof(EXCEPINFO, pfnDeferredFillIn), "pfnDeferredFillIn" }, { (VT_I4 << TE_VT) + offsetof(EXCEPINFO, scode), "scode" }, { 0, NULL } }; TEmethod tesFINDREPLACE[] = { { (VT_I4 << TE_VT) + offsetof(FINDREPLACE, lStructSize), "lStructSize" }, { (VT_PTR << TE_VT) + offsetof(FINDREPLACE, hwndOwner), "hwndOwner" }, { (VT_PTR << TE_VT) + offsetof(FINDREPLACE, hInstance), "hInstance" }, { (VT_I4 << TE_VT) + offsetof(FINDREPLACE, Flags), "Flags" }, { (VT_BSTR << TE_VT) + offsetof(FINDREPLACE, lpstrFindWhat), "lpstrFindWhat" }, { (VT_BSTR << TE_VT) + offsetof(FINDREPLACE, lpstrReplaceWith), "lpstrReplaceWith" }, { (VT_UI2 << TE_VT) + offsetof(FINDREPLACE, wFindWhatLen), "wFindWhatLen" }, { (VT_UI2 << TE_VT) + offsetof(FINDREPLACE, wReplaceWithLen), "wReplaceWithLen" }, { (VT_PTR << TE_VT) + offsetof(FINDREPLACE, lCustData), "lCustData" }, { (VT_PTR << TE_VT) + offsetof(FINDREPLACE, lpfnHook), "lpfnHook" }, { (VT_BSTR << TE_VT) + offsetof(FINDREPLACE, lpTemplateName), "lpTemplateName" }, { 0, NULL } }; TEmethod tesFOLDERSETTINGS[] = { { (VT_I4 << TE_VT) + offsetof(FOLDERSETTINGS, ViewMode), "ViewMode" }, { (VT_I4 << TE_VT) + offsetof(FOLDERSETTINGS, fFlags), "fFlags" }, { (VT_I4 << TE_VT) + (SB_Options - 1) * 4, "Options" }, { (VT_I4 << TE_VT) + (SB_ViewFlags - 1) * 4, "ViewFlags" }, { (VT_I4 << TE_VT) + (SB_IconSize - 1) * 4, "ImageSize" }, { 0, NULL } }; TEmethod tesHDITEM[] = { { (VT_I4 << TE_VT) + offsetof(HDITEM, mask), "mask" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, cxy), "cxy" }, { (VT_BSTR << TE_VT) + offsetof(HDITEM, pszText), "pszText" }, { (VT_PTR << TE_VT) + offsetof(HDITEM, hbm), "hbm" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, cchTextMax), "cchTextMax" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, fmt), "fmt" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, lParam), "lParam" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, iImage), "iImage" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, iOrder), "iOrder" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, type), "type" }, { (VT_PTR << TE_VT) + offsetof(HDITEM, pvFilter), "pvFilter" }, { (VT_I4 << TE_VT) + offsetof(HDITEM, state), "state" }, { 0, NULL } }; TEmethod tesICONINFO[] = { { (VT_BOOL << TE_VT) + offsetof(ICONINFO, fIcon), "fIcon" }, { (VT_I4 << TE_VT) + offsetof(ICONINFO, xHotspot), "xHotspot" }, { (VT_I4 << TE_VT) + offsetof(ICONINFO, yHotspot), "yHotspot" }, { (VT_PTR << TE_VT) + offsetof(ICONINFO, hbmMask), "hbmMask" }, { (VT_PTR << TE_VT) + offsetof(ICONINFO, hbmColor), "hbmColor" }, { 0, NULL } }; TEmethod tesICONMETRICS[] = { { (VT_I4 << TE_VT) + offsetof(ICONMETRICS, iHorzSpacing), "iHorzSpacing" }, { (VT_I4 << TE_VT) + offsetof(ICONMETRICS, iVertSpacing), "iVertSpacing" }, { (VT_I4 << TE_VT) + offsetof(ICONMETRICS, iTitleWrap), "iTitleWrap" }, { (VT_PTR << TE_VT) + offsetof(ICONMETRICS, lfFont), "lfFont" }, { 0, NULL } }; TEmethod tesKEYBDINPUT[] = { { (VT_I4 << TE_VT), "type" }, { (VT_I4 << TE_VT) + offsetof(KEYBDINPUT, wVk), "wVk" }, { (VT_I4 << TE_VT) + offsetof(KEYBDINPUT, wScan), "wScan" }, { (VT_I4 << TE_VT) + offsetof(KEYBDINPUT, dwFlags), "dwFlags" }, { (VT_I4 << TE_VT) + offsetof(KEYBDINPUT, time), "time" }, { (VT_PTR << TE_VT) + offsetof(KEYBDINPUT, dwExtraInfo), "dwExtraInfo" }, { 0, NULL } }; TEmethod tesLOGFONT[] = { { (VT_I4 << TE_VT) + offsetof(LOGFONT, lfHeight), "lfHeight" }, { (VT_I4 << TE_VT) + offsetof(LOGFONT, lfWidth), "lfWidth" }, { (VT_I4 << TE_VT) + offsetof(LOGFONT, lfEscapement), "lfEscapement" }, { (VT_I4 << TE_VT) + offsetof(LOGFONT, lfOrientation), "lfOrientation" }, { (VT_I4 << TE_VT) + offsetof(LOGFONT, lfWeight), "lfWeight" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfItalic), "lfItalic" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfUnderline), "lfUnderline" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfStrikeOut), "lfStrikeOut" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfCharSet), "lfCharSet" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfOutPrecision), "lfOutPrecision" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfClipPrecision), "lfClipPrecision" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfQuality), "lfQuality" }, { (VT_UI1 << TE_VT) + offsetof(LOGFONT, lfPitchAndFamily), "lfPitchAndFamily" }, { (VT_LPWSTR << TE_VT) + offsetof(LOGFONT, lfFaceName), "lfFaceName" }, { 0, NULL } }; TEmethod tesLVBKIMAGE[] = { { (VT_I4 << TE_VT) + offsetof(LVBKIMAGE, ulFlags), "ulFlags" }, { (VT_PTR << TE_VT) + offsetof(LVBKIMAGE, hbm), "hbm" }, { (VT_BSTR << TE_VT) + offsetof(LVBKIMAGE, pszImage), "pszImage" }, { (VT_I4 << TE_VT) + offsetof(LVBKIMAGE, cchImageMax), "cchImageMax" }, { (VT_I4 << TE_VT) + offsetof(LVBKIMAGE, xOffsetPercent), "xOffsetPercent" }, { (VT_I4 << TE_VT) + offsetof(LVBKIMAGE, yOffsetPercent), "yOffsetPercent" }, { 0, NULL } }; TEmethod tesLVFINDINFO[] = { { (VT_I4 << TE_VT) + offsetof(LVFINDINFO, flags), "flags" }, { (VT_BSTR << TE_VT) + offsetof(LVFINDINFO, psz), "psz" }, { (VT_I4 << TE_VT) + offsetof(LVFINDINFO, lParam), "lParam" }, { (VT_CY << TE_VT) + offsetof(LVFINDINFO, pt), "pt" }, { (VT_I4 << TE_VT) + offsetof(LVFINDINFO, vkDirection), "vkDirection" }, { 0, NULL } }; TEmethod tesLVGROUP[] = { { (VT_I4 << TE_VT) + offsetof(LVGROUP, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, mask), "mask" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszHeader), "pszHeader" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchHeader), "cchHeader" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszFooter), "pszFooter" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchFooter), "cchFooter" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, iGroupId), "iGroupId" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, stateMask), "stateMask" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, state), "state" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, uAlign), "uAlign" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszSubtitle), "pszSubtitle" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchSubtitle), "cchSubtitle" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszTask), "pszTask" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchTask), "cchTask" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszDescriptionTop), "pszDescriptionTop" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchDescriptionTop), "cchDescriptionTop" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszDescriptionBottom), "pszDescriptionBottom" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchDescriptionBottom), "cchDescriptionBottom" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, iTitleImage), "iTitleImage" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, iExtendedImage), "iExtendedImage" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, iFirstItem), "iFirstItem" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cItems), "cItems" }, { (VT_BSTR << TE_VT) + offsetof(LVGROUP, pszSubsetTitle), "pszSubsetTitle" }, { (VT_I4 << TE_VT) + offsetof(LVGROUP, cchSubsetTitle), "cchSubsetTitle" }, { 0, NULL } }; TEmethod tesLVHITTESTINFO[] = { { (VT_CY << TE_VT) + offsetof(LVHITTESTINFO, pt), "pt" }, { (VT_I4 << TE_VT) + offsetof(LVHITTESTINFO, flags), "flags" }, { (VT_I4 << TE_VT) + offsetof(LVHITTESTINFO, iItem), "iItem" }, { (VT_I4 << TE_VT) + offsetof(LVHITTESTINFO, iSubItem ), "iSubItem" }, { (VT_I4 << TE_VT) + offsetof(LVHITTESTINFO, iGroup), "iGroup" }, { 0, NULL } }; TEmethod tesLVITEM[] = { { (VT_I4 << TE_VT) + offsetof(LVITEM, mask), "mask" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, iItem), "iItem" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, iSubItem), "iSubItem" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, state), "state" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, stateMask), "stateMask" }, { (VT_BSTR << TE_VT) + offsetof(LVITEM, pszText), "pszText" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, cchTextMax), "cchTextMax" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, iImage), "iImage" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, lParam), "lParam" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, iIndent), "iIndent" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, iGroupId), "iGroupId" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, cColumns), "cColumns" }, { (VT_PTR << TE_VT) + offsetof(LVITEM, puColumns), "puColumns" }, { (VT_PTR << TE_VT) + offsetof(LVITEM, piColFmt), "piColFmt" }, { (VT_I4 << TE_VT) + offsetof(LVITEM, iGroup), "iGroup" }, { 0, NULL } }; TEmethod tesMENUITEMINFO[] = { { (VT_I4 << TE_VT) + offsetof(MENUITEMINFO, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(MENUITEMINFO, fMask), "fMask" }, { (VT_I4 << TE_VT) + offsetof(MENUITEMINFO, fType), "fType" }, { (VT_I4 << TE_VT) + offsetof(MENUITEMINFO, fState), "fState" }, { (VT_I4 << TE_VT) + offsetof(MENUITEMINFO, wID), "wID" }, { (VT_PTR << TE_VT) + offsetof(MENUITEMINFO, hSubMenu), "hSubMenu" }, { (VT_PTR << TE_VT) + offsetof(MENUITEMINFO, hbmpChecked), "hbmpChecked" }, { (VT_PTR << TE_VT) + offsetof(MENUITEMINFO, hbmpUnchecked), "hbmpUnchecked" }, { (VT_PTR << TE_VT) + offsetof(MENUITEMINFO, dwItemData), "dwItemData" }, { (VT_BSTR << TE_VT) + offsetof(MENUITEMINFO, dwTypeData), "dwTypeData" }, { (VT_I4 << TE_VT) + offsetof(MENUITEMINFO, cch), "cch" }, { (VT_PTR << TE_VT) + offsetof(MENUITEMINFO, hbmpItem), "hbmpItem" }, { 0, NULL } }; TEmethod tesMONITORINFOEX[] = { { (VT_I4 << TE_VT), "cbSize" }, { (VT_CARRAY << TE_VT) + offsetof(MONITORINFOEX, rcMonitor), "rcMonitor" }, { (VT_CARRAY << TE_VT) + offsetof(MONITORINFOEX, rcWork), "rcWork" }, { (VT_I4 << TE_VT) + offsetof(MONITORINFOEX, dwFlags), "dwFlags" }, { (VT_LPWSTR << TE_VT) + offsetof(MONITORINFOEX, szDevice), "szDevice" }, { 0, NULL } }; TEmethod tesMOUSEINPUT[] = { { (VT_I4 << TE_VT), "type" }, { (VT_I4 << TE_VT) + offsetof(MOUSEINPUT, dx), "dx" }, { (VT_I4 << TE_VT) + offsetof(MOUSEINPUT, dy), "dy" }, { (VT_I4 << TE_VT) + offsetof(MOUSEINPUT, mouseData), "mouseData" }, { (VT_I4 << TE_VT) + offsetof(MOUSEINPUT, dwFlags), "dwFlags" }, { (VT_I4 << TE_VT) + offsetof(MOUSEINPUT, time), "time" }, { (VT_PTR << TE_VT) + offsetof(MOUSEINPUT, dwExtraInfo), "dwExtraInfo" }, { 0, NULL } }; TEmethod tesMSG[] = { { (VT_PTR << TE_VT) + offsetof(MSG, hwnd), "hwnd" }, { (VT_I4 << TE_VT) + offsetof(MSG, message), "message" }, { (VT_PTR << TE_VT) + offsetof(MSG, wParam), "wParam" }, { (VT_PTR << TE_VT) + offsetof(MSG, lParam), "lParam" }, { (VT_I4 << TE_VT) + offsetof(MSG, time), "time" }, { (VT_CY << TE_VT) + offsetof(MSG, pt), "pt" }, { 0, NULL } }; TEmethod tesNONCLIENTMETRICS[] = { { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iBorderWidth), "iBorderWidth" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iScrollWidth), "iScrollWidth" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iScrollHeight), "iScrollHeight" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iCaptionWidth), "iCaptionWidth" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iCaptionHeight), "iCaptionHeight" }, { (VT_PTR << TE_VT) + offsetof(NONCLIENTMETRICS, lfCaptionFont), "lfCaptionFont" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iSmCaptionWidth), "iSmCaptionWidth" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iSmCaptionHeight), "iSmCaptionHeight" }, { (VT_PTR << TE_VT) + offsetof(NONCLIENTMETRICS, lfSmCaptionFont), "lfSmCaptionFont" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iMenuWidth), "iMenuWidth" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iMenuHeight), "iMenuHeight" }, { (VT_PTR << TE_VT) + offsetof(NONCLIENTMETRICS, lfMenuFont), "lfMenuFont" }, { (VT_PTR << TE_VT) + offsetof(NONCLIENTMETRICS, lfStatusFont), "lfStatusFont" }, { (VT_PTR << TE_VT) + offsetof(NONCLIENTMETRICS, lfMessageFont), "lfMessageFont" }, { (VT_I4 << TE_VT) + offsetof(NONCLIENTMETRICS, iPaddedBorderWidth), "iPaddedBorderWidth" }, { 0, NULL } }; TEmethod tesNOTIFYICONDATA[] = { { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, cbSize), "cbSize" }, { (VT_PTR << TE_VT) + offsetof(NOTIFYICONDATA, hWnd), "hWnd" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, uID), "uID" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, uFlags), "uFlags" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, uCallbackMessage), "uCallbackMessage" }, { (VT_PTR << TE_VT) + offsetof(NOTIFYICONDATA, hIcon), "hIcon" }, { (VT_LPWSTR << TE_VT) + offsetof(NOTIFYICONDATA, szTip), "szTip" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, dwState), "dwState" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, dwState), "dwState" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, dwStateMask), "dwStateMask" }, { (VT_LPWSTR << TE_VT) + offsetof(NOTIFYICONDATA, szInfo), "szInfo" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, uTimeout), "uTimeout" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, uVersion), "uVersion" }, { (VT_LPWSTR << TE_VT) + offsetof(NOTIFYICONDATA, szInfoTitle), "szInfoTitle" }, { (VT_I4 << TE_VT) + offsetof(NOTIFYICONDATA, dwInfoFlags), "dwInfoFlags" }, { (VT_PTR << TE_VT) + offsetof(NOTIFYICONDATA, guidItem), "guidItem" }, { (VT_PTR << TE_VT) + offsetof(NOTIFYICONDATA, hBalloonIcon), "hBalloonIcon" }, { 0, NULL } }; TEmethod tesNMCUSTOMDRAW[] = { { (VT_PTR << TE_VT) + offsetof(NMCUSTOMDRAW, hdr), "hdr" }, { (VT_I4 << TE_VT) + offsetof(NMCUSTOMDRAW, dwDrawStage), "dwDrawStage" }, { (VT_PTR << TE_VT) + offsetof(NMCUSTOMDRAW, hdc), "hdc" }, { (VT_CARRAY << TE_VT) + offsetof(NMCUSTOMDRAW, rc), "rc" }, { (VT_PTR << TE_VT) + offsetof(NMCUSTOMDRAW, dwItemSpec), "dwItemSpec" }, { (VT_I4 << TE_VT) + offsetof(NMCUSTOMDRAW, uItemState), "uItemState" }, { (VT_PTR << TE_VT) + offsetof(NMCUSTOMDRAW, lItemlParam), "lItemlParam" }, { 0, NULL } }; TEmethod tesNMLVCUSTOMDRAW[] = { { (VT_PTR << TE_VT) + offsetof(NMLVCUSTOMDRAW, nmcd), "nmcd" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, clrText), "clrText" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, clrTextBk), "clrTextBk" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, iSubItem), "iSubItem" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, dwItemType), "dwItemType" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, clrFace), "clrFace" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, iIconEffect), "iIconEffect" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, iIconPhase), "iIconPhase" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, iPartId), "iPartId" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, iStateId), "iStateId" }, { (VT_CARRAY << TE_VT) + offsetof(NMLVCUSTOMDRAW, rcText), "rcText" }, { (VT_I4 << TE_VT) + offsetof(NMLVCUSTOMDRAW, uAlign), "uAlign" }, { 0, NULL } }; TEmethod tesNMTVCUSTOMDRAW[] = { { (VT_PTR << TE_VT) + offsetof(NMTVCUSTOMDRAW, nmcd), "nmcd" }, { (VT_I4 << TE_VT) + offsetof(NMTVCUSTOMDRAW, clrText), "clrText" }, { (VT_I4 << TE_VT) + offsetof(NMTVCUSTOMDRAW, clrTextBk), "clrTextBk" }, { (VT_I4 << TE_VT) + offsetof(NMTVCUSTOMDRAW, iLevel), "iLevel" }, { 0, NULL } }; TEmethod tesNMHDR[] = { { (VT_PTR << TE_VT) + offsetof(NMHDR, hwndFrom), "hwndFrom" }, { (VT_I4 << TE_VT) + offsetof(NMHDR, idFrom), "idFrom" }, { (VT_I4 << TE_VT) + offsetof(NMHDR, code), "code" }, { 0, NULL } }; TEmethod tesOSVERSIONINFOEX[] = { { (VT_I4 << TE_VT) + offsetof(OSVERSIONINFOEX, dwOSVersionInfoSize), "dwOSVersionInfoSize" }, { (VT_I4 << TE_VT) + offsetof(OSVERSIONINFOEX, dwMajorVersion), "dwMajorVersion" }, { (VT_I4 << TE_VT) + offsetof(OSVERSIONINFOEX, dwMinorVersion), "dwMinorVersion" }, { (VT_I4 << TE_VT) + offsetof(OSVERSIONINFOEX, dwBuildNumber), "dwBuildNumber" }, { (VT_I4 << TE_VT) + offsetof(OSVERSIONINFOEX, dwPlatformId), "dwPlatformId" }, { (VT_LPWSTR << TE_VT) + offsetof(OSVERSIONINFOEX, szCSDVersion), "szCSDVersion" }, { (VT_UI2 << TE_VT) + offsetof(OSVERSIONINFOEX, wServicePackMajor), "wServicePackMajor" }, { (VT_UI2 << TE_VT) + offsetof(OSVERSIONINFOEX, wServicePackMinor), "wServicePackMinor" }, { (VT_UI2 << TE_VT) + offsetof(OSVERSIONINFOEX, wSuiteMask), "wSuiteMask" }, { (VT_UI1 << TE_VT) + offsetof(OSVERSIONINFOEX, wProductType), "wProductType" }, { (VT_UI1 << TE_VT) + offsetof(OSVERSIONINFOEX, wReserved), "wReserved" }, { 0, NULL } }; TEmethod tesPAINTSTRUCT[] = { { (VT_PTR << TE_VT) + offsetof(PAINTSTRUCT, hdc), "hdc" }, { (VT_BOOL << TE_VT) + offsetof(PAINTSTRUCT, fErase), "fErase" }, { (VT_CARRAY << TE_VT) + offsetof(PAINTSTRUCT, rcPaint), "rcPaint" }, { (VT_BOOL << TE_VT) + offsetof(PAINTSTRUCT, fRestore), "fRestore" }, { (VT_BOOL << TE_VT) + offsetof(PAINTSTRUCT, fIncUpdate), "fIncUpdate" }, { (VT_UI1 << TE_VT) + offsetof(PAINTSTRUCT, rgbReserved), "rgbReserved" }, { 0, NULL } }; TEmethod tesPOINT[] = { { (VT_I4 << TE_VT) + offsetof(POINT, x), "x" }, { (VT_I4 << TE_VT) + offsetof(POINT, y), "y" }, { 0, NULL } }; TEmethod tesRECT[] = { { (VT_I4 << TE_VT) + offsetof(RECT, left), "left" }, { (VT_I4 << TE_VT) + offsetof(RECT, top), "top" }, { (VT_I4 << TE_VT) + offsetof(RECT, right), "right" }, { (VT_I4 << TE_VT) + offsetof(RECT, bottom), "bottom" }, { 0, NULL } }; TEmethod tesSHELLEXECUTEINFO[] = { { (VT_I4 << TE_VT) + offsetof(SHELLEXECUTEINFO, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(SHELLEXECUTEINFO, fMask), "fMask" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, hwnd), "hwnd" }, { (VT_BSTR << TE_VT) + offsetof(SHELLEXECUTEINFO, lpVerb), "lpVerb" }, { (VT_BSTR << TE_VT) + offsetof(SHELLEXECUTEINFO, lpFile), "lpFile" }, { (VT_BSTR << TE_VT) + offsetof(SHELLEXECUTEINFO, lpParameters), "lpParameters" }, { (VT_BSTR << TE_VT) + offsetof(SHELLEXECUTEINFO, lpDirectory), "lpDirectory" }, { (VT_I4 << TE_VT) + offsetof(SHELLEXECUTEINFO, nShow), "nShow" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, hInstApp), "hInstApp" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, lpIDList), "lpIDList" }, { (VT_BSTR << TE_VT) + offsetof(SHELLEXECUTEINFO, lpClass), "lpClass" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, hkeyClass), "hkeyClass" }, { (VT_I4 << TE_VT) + offsetof(SHELLEXECUTEINFO, dwHotKey), "dwHotKey" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, hIcon), "hIcon" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, hMonitor), "hMonitor" }, { (VT_PTR << TE_VT) + offsetof(SHELLEXECUTEINFO, hProcess), "hProcess" }, { 0, NULL } }; TEmethod tesSHFILEINFO[] = { { (VT_PTR << TE_VT) + offsetof(SHFILEINFO, hIcon), "hIcon" }, { (VT_I4 << TE_VT) + offsetof(SHFILEINFO, iIcon), "iIcon" }, { (VT_I4 << TE_VT) + offsetof(SHFILEINFO, dwAttributes), "dwAttributes" }, { (VT_LPWSTR << TE_VT) + offsetof(SHFILEINFO, szDisplayName), "szDisplayName" }, { (VT_LPWSTR << TE_VT) + offsetof(SHFILEINFO, szTypeName), "szTypeName" }, { 0, NULL } }; TEmethod tesSHFILEOPSTRUCT[] = { { (VT_PTR << TE_VT) + offsetof(SHFILEOPSTRUCT, hwnd), "hwnd" }, { (VT_I4 << TE_VT) + offsetof(SHFILEOPSTRUCT, wFunc), "wFunc" }, { (VT_BSTR << TE_VT) + offsetof(SHFILEOPSTRUCT, pFrom), "pFrom" }, { (VT_BSTR << TE_VT) + offsetof(SHFILEOPSTRUCT, pTo), "pTo" }, { (VT_UI2 << TE_VT) + offsetof(SHFILEOPSTRUCT, fFlags), "fFlags" }, { (VT_BOOL << TE_VT) + offsetof(SHFILEOPSTRUCT, fAnyOperationsAborted), "fAnyOperationsAborted" }, { (VT_PTR << TE_VT) + offsetof(SHFILEOPSTRUCT, hNameMappings), "hNameMappings" }, { (VT_BSTR << TE_VT) + offsetof(SHFILEOPSTRUCT, lpszProgressTitle), "lpszProgressTitle" }, { 0, NULL } }; TEmethod tesSIZE[] = { { (VT_I4 << TE_VT) + offsetof(SIZE, cx), "cx" }, { (VT_I4 << TE_VT) + offsetof(SIZE, cy), "cy" }, { 0, NULL } }; TEmethod tesTCHITTESTINFO[] = { { (VT_CY << TE_VT) + offsetof(TCHITTESTINFO, pt), "pt" }, { (VT_I4 << TE_VT) + offsetof(TCHITTESTINFO, flags), "flags" }, { 0, NULL } }; TEmethod tesTCITEM[] = { { (VT_I4 << TE_VT) + offsetof(TCITEM, mask), "mask" }, { (VT_I4 << TE_VT) + offsetof(TCITEM, dwState), "dwState" }, { (VT_I4 << TE_VT) + offsetof(TCITEM, dwStateMask), "dwStateMask" }, { (VT_BSTR << TE_VT) + offsetof(TCITEM, pszText), "pszText" }, { (VT_I4 << TE_VT) + offsetof(TCITEM, cchTextMax), "cchTextMax" }, { (VT_I4 << TE_VT) + offsetof(TCITEM, iImage), "iImage" }, { (VT_PTR << TE_VT) + offsetof(TCITEM, lParam), "lParam" }, { 0, NULL } }; TEmethod tesTOOLINFO[] = { { (VT_I4 << TE_VT) + offsetof(TOOLINFO, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(TOOLINFO, uFlags), "uFlags" }, { (VT_PTR << TE_VT) + offsetof(TOOLINFO, hwnd), "hwnd" }, { (VT_PTR << TE_VT) + offsetof(TOOLINFO, uId), "uId" }, { (VT_CARRAY << TE_VT) + offsetof(TOOLINFO, rect), "rect" }, { (VT_PTR << TE_VT) + offsetof(TOOLINFO, hinst), "hinst" }, { (VT_BSTR << TE_VT) + offsetof(TOOLINFO, uId), "lpszText" }, { (VT_PTR << TE_VT) + offsetof(TOOLINFO, uId), "lParam" }, { (VT_PTR << TE_VT) + offsetof(TOOLINFO, uId), "lpReserved" }, { 0, NULL } }; TEmethod tesTVHITTESTINFO[] = { { (VT_CY << TE_VT) + offsetof(TVHITTESTINFO, pt), "pt" }, { (VT_I4 << TE_VT) + offsetof(TVHITTESTINFO, flags), "flags" }, { (VT_PTR << TE_VT) + offsetof(TVHITTESTINFO, hItem), "hItem" }, { 0, NULL } }; TEmethod tesTVITEM[] = { { (VT_I4 << TE_VT) + offsetof(TVITEM, mask), "mask" }, { (VT_PTR << TE_VT) + offsetof(TVITEM, hItem), "hItem" }, { (VT_I4 << TE_VT) + offsetof(TVITEM, state), "state" }, { (VT_I4 << TE_VT) + offsetof(TVITEM, stateMask), "stateMask" }, { (VT_BSTR << TE_VT) + offsetof(TVITEM, pszText), "pszText" }, { (VT_I4 << TE_VT) + offsetof(TVITEM, cchTextMax), "cchTextMax" }, { (VT_I4 << TE_VT) + offsetof(TVITEM, iImage), "iImage" }, { (VT_I4 << TE_VT) + offsetof(TVITEM, iSelectedImage), "iSelectedImage" }, { (VT_I4 << TE_VT) + offsetof(TVITEM, cChildren), "cChildren" }, { (VT_PTR << TE_VT) + offsetof(TVITEM, lParam), "lParam" }, { 0, NULL } }; TEmethod tesWIN32_FIND_DATA[] = { { (VT_I4 << TE_VT) + offsetof(WIN32_FIND_DATA, dwFileAttributes), "dwFileAttributes" }, { (VT_FILETIME << TE_VT) + offsetof(WIN32_FIND_DATA, ftCreationTime), "ftCreationTime" }, { (VT_FILETIME << TE_VT) + offsetof(WIN32_FIND_DATA, ftLastAccessTime), "ftLastAccessTime" }, { (VT_FILETIME << TE_VT) + offsetof(WIN32_FIND_DATA, ftLastWriteTime), "ftLastWriteTime" }, { (VT_I4 << TE_VT) + offsetof(WIN32_FIND_DATA, nFileSizeHigh), "nFileSizeHigh" }, { (VT_I4 << TE_VT) + offsetof(WIN32_FIND_DATA, nFileSizeLow), "nFileSizeLow" }, { (VT_I4 << TE_VT) + offsetof(WIN32_FIND_DATA, dwReserved0), "dwReserved0" }, { (VT_I4 << TE_VT) + offsetof(WIN32_FIND_DATA, dwReserved1), "dwReserved1" }, { (VT_LPWSTR << TE_VT) + offsetof(WIN32_FIND_DATA, cFileName), "cFileName" }, { (VT_LPWSTR << TE_VT) + offsetof(WIN32_FIND_DATA, cAlternateFileName), "cAlternateFileName" }, { 0, NULL } }; TEmethod tesDRAWITEMSTRUCT[] = { { (VT_I4 << TE_VT) + offsetof(DRAWITEMSTRUCT, CtlType), "CtlType" }, { (VT_I4 << TE_VT) + offsetof(DRAWITEMSTRUCT, CtlID), "CtlID" }, { (VT_I4 << TE_VT) + offsetof(DRAWITEMSTRUCT, itemID), "itemID" }, { (VT_I4 << TE_VT) + offsetof(DRAWITEMSTRUCT, itemAction), "itemAction" }, { (VT_I4 << TE_VT) + offsetof(DRAWITEMSTRUCT, itemState), "itemState" }, { (VT_PTR << TE_VT) + offsetof(DRAWITEMSTRUCT, hwndItem), "hwndItem" }, { (VT_PTR << TE_VT) + offsetof(DRAWITEMSTRUCT, hDC), "hDC" }, { (VT_CARRAY << TE_VT) + offsetof(DRAWITEMSTRUCT, rcItem), "rcItem" }, { (VT_PTR << TE_VT) + offsetof(DRAWITEMSTRUCT, itemData), "itemData" }, { 0, NULL } }; TEmethod tesMEASUREITEMSTRUCT[] = { { (VT_I4 << TE_VT) + offsetof(MEASUREITEMSTRUCT, CtlType), "CtlType" }, { (VT_I4 << TE_VT) + offsetof(MEASUREITEMSTRUCT, CtlID), "CtlID" }, { (VT_I4 << TE_VT) + offsetof(MEASUREITEMSTRUCT, itemID), "itemID" }, { (VT_I4 << TE_VT) + offsetof(MEASUREITEMSTRUCT, itemWidth), "itemWidth" }, { (VT_I4 << TE_VT) + offsetof(MEASUREITEMSTRUCT, itemHeight), "itemHeight" }, { (VT_PTR << TE_VT) + offsetof(MEASUREITEMSTRUCT, itemData), "itemData" }, { 0, NULL } }; TEmethod tesMENUINFO[] = { { (VT_I4 << TE_VT) + offsetof(MENUINFO, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(MENUINFO, fMask), "fMask" }, { (VT_I4 << TE_VT) + offsetof(MENUINFO, dwStyle), "dwStyle" }, { (VT_I4 << TE_VT) + offsetof(MENUINFO, cyMax), "cyMax" }, { (VT_PTR << TE_VT) + offsetof(MENUINFO, hbrBack), "hbrBack" }, { (VT_I4 << TE_VT) + offsetof(MENUINFO, dwContextHelpID), "dwContextHelpID" }, { (VT_PTR << TE_VT) + offsetof(MENUINFO, dwMenuData), "dwMenuData" }, { 0, NULL } }; TEmethod tesGUITHREADINFO[] = { { (VT_I4 << TE_VT) + offsetof(GUITHREADINFO, cbSize), "cbSize" }, { (VT_I4 << TE_VT) + offsetof(GUITHREADINFO, flags), "flags" }, { (VT_PTR << TE_VT) + offsetof(GUITHREADINFO, hwndActive), "hwndActive" }, { (VT_PTR << TE_VT) + offsetof(GUITHREADINFO, hwndFocus), "hwndFocus" }, { (VT_PTR << TE_VT) + offsetof(GUITHREADINFO, hwndCapture), "hwndCapture" }, { (VT_PTR << TE_VT) + offsetof(GUITHREADINFO, hwndMenuOwner), "hwndMenuOwner" }, { (VT_PTR << TE_VT) + offsetof(GUITHREADINFO, hwndMoveSize), "hwndMoveSize" }, { (VT_PTR << TE_VT) + offsetof(GUITHREADINFO, hwndCaret), "hwndCaret" }, { (VT_CARRAY << TE_VT) + offsetof(GUITHREADINFO, hwndCaret), "rcCaret" }, { 0, NULL } }; TEStruct pTEStructs[] = { { sizeof(BITMAP), "BITMAP", tesBITMAP }, { sizeof(BSTR), "BSTR", tesNULL }, { sizeof(BYTE), "BYTE", tesNULL }, { sizeof(char), "char", tesNULL }, { sizeof(CHOOSECOLOR), "CHOOSECOLOR", tesCHOOSECOLOR }, { sizeof(CHOOSEFONT), "CHOOSEFONT", tesCHOOSEFONT }, { sizeof(COPYDATASTRUCT), "COPYDATASTRUCT", tesCOPYDATASTRUCT }, { sizeof(DIBSECTION), "DIBSECTION", tesDIBSECTION }, { sizeof(DRAWITEMSTRUCT), "DRAWITEMSTRUCT", tesDRAWITEMSTRUCT }, { sizeof(DWORD), "DWORD", tesNULL }, { sizeof(EXCEPINFO), "EXCEPINFO", tesEXCEPINFO }, { sizeof(FINDREPLACE), "FINDREPLACE", tesFINDREPLACE }, { sizeof(FOLDERSETTINGS), "FOLDERSETTINGS", tesFOLDERSETTINGS }, { sizeof(GUID), "GUID", tesNULL }, { sizeof(GUITHREADINFO), "GUITHREADINFO", tesGUITHREADINFO }, { sizeof(HANDLE), "HANDLE", tesNULL }, { sizeof(HDITEM), "HDITEM", tesHDITEM }, { sizeof(ICONINFO), "ICONINFO", tesICONINFO }, { sizeof(ICONMETRICS), "ICONMETRICS", tesICONMETRICS }, { sizeof(int), "int", tesNULL }, { sizeof(KEYBDINPUT) + sizeof(DWORD), "KEYBDINPUT", tesKEYBDINPUT }, { 256, "KEYSTATE", tesNULL }, { sizeof(LOGFONT), "LOGFONT", tesLOGFONT }, { sizeof(LPWSTR), "LPWSTR", tesNULL }, { sizeof(LVBKIMAGE), "LVBKIMAGE", tesLVBKIMAGE }, { sizeof(LVFINDINFO), "LVFINDINFO", tesLVFINDINFO }, { sizeof(LVGROUP), "LVGROUP", tesLVGROUP }, { sizeof(LVHITTESTINFO), "LVHITTESTINFO", tesLVHITTESTINFO }, { sizeof(LVITEM), "LVITEM", tesLVITEM }, { sizeof(MEASUREITEMSTRUCT), "MEASUREITEMSTRUCT", tesMEASUREITEMSTRUCT }, { sizeof(MENUINFO), "MENUINFO", tesMENUINFO }, { sizeof(MENUITEMINFO), "MENUITEMINFO", tesMENUITEMINFO }, { sizeof(MONITORINFOEX), "MONITORINFOEX", tesMONITORINFOEX }, { sizeof(MOUSEINPUT) + sizeof(DWORD), "MOUSEINPUT", tesMOUSEINPUT }, { sizeof(MSG), "MSG", tesMSG }, { sizeof(NMCUSTOMDRAW), "NMCUSTOMDRAW", tesNMCUSTOMDRAW }, { sizeof(NMLVCUSTOMDRAW), "NMLVCUSTOMDRAW", tesNMLVCUSTOMDRAW }, { sizeof(NMTVCUSTOMDRAW), "NMTVCUSTOMDRAW", tesNMTVCUSTOMDRAW }, { sizeof(NMHDR), "NMHDR", tesNMHDR }, { sizeof(NONCLIENTMETRICS), "NONCLIENTMETRICS", tesNONCLIENTMETRICS }, { sizeof(NOTIFYICONDATA), "NOTIFYICONDATA", tesNOTIFYICONDATA }, { sizeof(OSVERSIONINFO), "OSVERSIONINFO", tesOSVERSIONINFOEX }, { sizeof(OSVERSIONINFOEX), "OSVERSIONINFOEX", tesOSVERSIONINFOEX }, { sizeof(PAINTSTRUCT), "PAINTSTRUCT", tesPAINTSTRUCT }, { sizeof(POINT), "POINT", tesPOINT }, { sizeof(RECT), "RECT", tesRECT }, { sizeof(SHELLEXECUTEINFO), "SHELLEXECUTEINFO", tesSHELLEXECUTEINFO }, { sizeof(SHFILEINFO), "SHFILEINFO", tesSHFILEINFO }, { sizeof(SHFILEOPSTRUCT), "SHFILEOPSTRUCT", tesSHFILEOPSTRUCT }, { sizeof(SIZE), "SIZE", tesSIZE }, { sizeof(TCHITTESTINFO), "TCHITTESTINFO", tesTCHITTESTINFO }, { sizeof(TCITEM), "TCITEM", tesTCITEM }, { sizeof(TOOLINFO), "TOOLINFO", tesTOOLINFO }, { sizeof(TVHITTESTINFO), "TVHITTESTINFO", tesTVHITTESTINFO }, { sizeof(TVITEM), "TVITEM", tesTVITEM }, { sizeof(VARIANT), "VARIANT", tesNULL }, { sizeof(WCHAR), "WCHAR", tesNULL }, { sizeof(WIN32_FIND_DATA), "WIN32_FIND_DATA", tesWIN32_FIND_DATA }, { sizeof(WORD), "WORD", tesNULL }, }; TEmethod methodMem2[] = { { VT_I4 << TE_VT, "int" }, { VT_UI4 << TE_VT, "DWORD" }, { VT_UI1 << TE_VT, "BYTE" }, { VT_UI2 << TE_VT, "WORD" }, { VT_UI2 << TE_VT, "WCHAR" }, { VT_PTR << TE_VT, "HANDLE" }, { VT_PTR << TE_VT, "LPWSTR" }, { 0, NULL } }; TEmethod methodTE[] = { { 1001, "Data" }, { 1002, "hwnd" }, { 1004, "About" }, { TE_METHOD + 1005, "Ctrl" }, { TE_METHOD + 1006, "Ctrls" }, { TE_METHOD + 1008, "ClearEvents" }, { TE_METHOD + 1009, "Reload" }, { TE_METHOD + 1010, "CreateObject" }, { TE_METHOD + 1020, "GetObject" }, { TE_METHOD + 1025, "AddEvent" }, { TE_METHOD + 1026, "RemoveEvent" }, { 1030, "WindowsAPI" }, { 1031, "WindowsAPI0" }, { 1131, "CommonDialog" }, { 1132, "WICBitmap" }, { 1132, "GdiplusBitmap" }, { 1137, "ProgressDialog" }, { 1138, "DateTimeFormat" }, { 1139, "HiddenFilter" }, // { 1140, "Background" },,//Deprecated // { 1150, "ThumbnailProvider" },//Deprecated { 1160, "DragIcon" }, { 1180, "ExplorerBrowserFilter" }, { TE_METHOD + 1133, "FolderItems" }, { TE_METHOD + 1134, "Object" }, { TE_METHOD + 1135, "Array" }, { TE_METHOD + 1136, "Collection" }, { TE_METHOD + 1050, "CreateCtrl" }, { TE_METHOD + 1040, "CtrlFromPoint" }, { TE_METHOD + 1060, "MainMenu" }, { TE_METHOD + 1070, "CtrlFromWindow" }, { TE_METHOD + 1080, "LockUpdate" }, { TE_METHOD + 1090, "UnlockUpdate" }, { TE_METHOD + 1100, "HookDragDrop" },//Deprecated #ifdef _USE_TESTOBJECT { 1200, "TestObj" }, #endif { TE_OFFSET + TE_Type , "Type" }, { TE_OFFSET + TE_Left , "offsetLeft" }, { TE_OFFSET + TE_Top , "offsetTop" }, { TE_OFFSET + TE_Right , "offsetRight" }, { TE_OFFSET + TE_Bottom , "offsetBottom" }, { TE_OFFSET + TE_Tab, "Tab" }, { TE_OFFSET + TE_CmdShow, "CmdShow" }, { TE_OFFSET + TE_Layout, "Layout" }, { TE_OFFSET + TE_NetworkTimeout, "NetworkTimeout" }, { TE_OFFSET + TE_SizeFormat, "SizeFormat" }, { TE_OFFSET + TE_Version, "Version" }, { TE_OFFSET + TE_UseHiddenFilter, "UseHiddenFilter" }, { TE_OFFSET + TE_ColumnEmphasis, "ColumnEmphasis" }, { TE_OFFSET + TE_ViewOrder, "ViewOrder" }, { TE_OFFSET + TE_LibraryFilter, "LibraryFilter" }, { TE_OFFSET + TE_AutoArrange, "AutoArrange" }, { TE_OFFSET + TE_ShowInternet, "ShowInternet" }, { START_OnFunc + TE_Labels, "Labels" }, { START_OnFunc + TE_ColumnsReplace, "ColumnsReplace" }, { START_OnFunc + TE_OnBeforeNavigate, "OnBeforeNavigate" }, { START_OnFunc + TE_OnViewCreated, "OnViewCreated" }, { START_OnFunc + TE_OnKeyMessage, "OnKeyMessage" }, { START_OnFunc + TE_OnMouseMessage, "OnMouseMessage" }, { START_OnFunc + TE_OnCreate, "OnCreate" }, { START_OnFunc + TE_OnDefaultCommand, "OnDefaultCommand" }, { START_OnFunc + TE_OnItemClick, "OnItemClick" }, { START_OnFunc + TE_OnGetPaneState, "OnGetPaneState" }, { START_OnFunc + TE_OnMenuMessage, "OnMenuMessage" }, { START_OnFunc + TE_OnSystemMessage, "OnSystemMessage" }, { START_OnFunc + TE_OnShowContextMenu, "OnShowContextMenu" }, { START_OnFunc + TE_OnSelectionChanged, "OnSelectionChanged" }, { START_OnFunc + TE_OnClose, "OnClose" }, { START_OnFunc + TE_OnDragEnter, "OnDragEnter" }, { START_OnFunc + TE_OnDragOver, "OnDragOver" }, { START_OnFunc + TE_OnDrop, "OnDrop" }, { START_OnFunc + TE_OnDragLeave, "OnDragLeave" }, { START_OnFunc + TE_OnAppMessage, "OnAppMessage" }, { START_OnFunc + TE_OnStatusText, "OnStatusText" }, { START_OnFunc + TE_OnToolTip, "OnToolTip" }, { START_OnFunc + TE_OnNewWindow, "OnNewWindow" }, { START_OnFunc + TE_OnWindowRegistered, "OnWindowRegistered" }, { START_OnFunc + TE_OnSelectionChanging, "OnSelectionChanging" }, { START_OnFunc + TE_OnClipboardText, "OnClipboardText" }, { START_OnFunc + TE_OnCommand, "OnCommand" }, { START_OnFunc + TE_OnInvokeCommand, "OnInvokeCommand" }, { START_OnFunc + TE_OnArrange, "OnArrange" }, { START_OnFunc + TE_OnHitTest, "OnHitTest" }, { START_OnFunc + TE_OnVisibleChanged, "OnVisibleChanged" }, { START_OnFunc + TE_OnTranslatePath, "OnTranslatePath" }, { START_OnFunc + TE_OnNavigateComplete, "OnNavigateComplete" }, { START_OnFunc + TE_OnILGetParent, "OnILGetParent" }, { START_OnFunc + TE_OnViewModeChanged, "OnViewModeChanged" }, { START_OnFunc + TE_OnColumnsChanged, "OnColumnsChanged" }, { START_OnFunc + TE_OnItemPrePaint, "OnItemPrePaint" }, { START_OnFunc + TE_OnColumnClick, "OnColumnClick" }, { START_OnFunc + TE_OnBeginDrag, "OnBeginDrag" }, { START_OnFunc + TE_OnBeforeGetData, "OnBeforeGetData" }, { START_OnFunc + TE_OnIconSizeChanged, "OnIconSizeChanged" }, { START_OnFunc + TE_OnFilterChanged, "OnFilterChanged" }, { START_OnFunc + TE_OnBeginLabelEdit, "OnBeginLabelEdit" }, { START_OnFunc + TE_OnEndLabelEdit, "OnEndLabelEdit" }, { START_OnFunc + TE_OnReplacePath, "OnReplacePath" }, { START_OnFunc + TE_OnBeginNavigate, "OnBeginNavigate" }, { START_OnFunc + TE_OnSort, "OnSort" }, { START_OnFunc + TE_OnGetAlt, "OnGetAlt" }, { START_OnFunc + TE_OnEndThread, "OnEndThread" }, { START_OnFunc + TE_OnItemPostPaint, "OnItemPostPaint" }, { START_OnFunc + TE_OnHandleIcon, "OnHandleIcon" }, { START_OnFunc + TE_OnSorting, "OnSorting" }, { START_OnFunc + TE_OnSetName, "OnSetName" }, { START_OnFunc + TE_OnIncludeItem, "OnIncludeItem" }, { START_OnFunc + TE_OnContentsChanged, "OnContentsChanged" }, { START_OnFunc + TE_OnFilterView, "OnFilterView" }, { 0, NULL } }; TEmethod methodSB[] = { { 0x10000001, "Data" }, { 0x10000002, "hwnd" }, { 0x10000003, "Type" }, { 0x10000004, "Navigate" }, { 0x10000007, "Navigate2" }, { 0x10000008, "Index" }, { 0x10000009, "FolderFlags" }, { 0x1000000B, "History" }, { 0x10000010, "CurrentViewMode" }, { 0x10000011, "IconSize" }, { 0x10000012, "Options" }, { 0x10000013, "SizeFormat" }, { 0x10000014, "NameFormat" }, //Deprecated { 0x10000016, "ViewFlags" }, { 0x10000017, "Id" }, { 0x10000018, "FilterView" }, { 0x10000020, "FolderItem" }, { 0x10000021, "TreeView" }, { 0x10000024, "Parent" }, { 0x10000031, "Close" }, { 0x10000032, "Title" }, { 0x10000033, "Suspend" }, { 0x10000040, "Items" }, { 0x10000041, "SelectedItems" }, { 0x10000050, "ShellFolderView" }, { 0x10000058, "Droptarget" }, { 0x10000059, "Columns" }, // { 0x1000005A, "Searches" }, { 0x1000005B, "MapColumnToSCID" }, { 0x10000102, "hwndList" }, { 0x10000103, "hwndView" }, { 0x10000104, "SortColumn" }, { 0x10000105, "GroupBy" }, { 0x10000106, "Focus" }, { 0x10000107, "HitTest" }, { 0x10000108, "hwndAlt" }, { 0x10000110, "ItemCount" }, { 0x10000111, "Item" }, { 0x10000206, "Refresh" }, { 0x10000207, "ViewMenu" }, { 0x10000208, "TranslateAccelerator" }, { 0x10000209, "GetItemPosition" }, { 0x1000020A, "SelectAndPositionItem" }, { 0x10000280, "SelectItem" }, { 0x10000281, "FocusedItem" }, { 0x10000282, "GetFocusedItem" }, { 0x10000283, "GetItemRect" }, { 0x10000300, "Notify" }, { 0x10000400, "NavigateComplete" }, { 0x10000501, "AddItem" }, { 0x10000502, "RemoveItem" }, { 0x10000503, "AddItems" }, { 0x10000504, "RemoveAll" }, { 0x10000505, "SessionId" }, { START_OnFunc + SB_TotalFileSize, "TotalFileSize" }, { START_OnFunc + SB_ColumnsReplace, "ColumnsReplace" }, { START_OnFunc + SB_OnIncludeObject, "OnIncludeObject" }, { START_OnFunc + SB_AltSelectedItems, "AltSelectedItems" }, { START_OnFunc + SB_VirtualName, "VirtualName" }, //Deprecated { 0, NULL } }; TEmethod methodWB[] = { { 0x10000001, "Data" }, { 0x10000002, "hwnd" }, { 0x10000003, "Type" }, { 0x10000004, "TranslateAccelerator" }, { 0x10000005, "Application" }, { 0x10000006, "Document" }, { 0x10000007, "Window" }, { 0x10000008, "Focus" }, // { 0x10000009, "Close" }, { 0, NULL } }; TEmethod methodTC[] = { { 1, "Data" }, { 2, "hwnd" }, { 3, "Type" }, { 6, "HitTest" }, { 7, "Move" }, { 8, "Selected" }, { 9, "Close" }, { 10, "SelectedIndex" }, { 11, "Visible" }, { 12, "Id" }, { 13, "LockUpdate" }, { 14, "UnlockUpdate" }, { DISPID_NEWENUM, "_NewEnum" }, { DISPID_TE_ITEM, "Item" }, { DISPID_TE_COUNT, "Count" }, { DISPID_TE_COUNT, "length" }, { TE_OFFSET + TE_Left, "Left" }, { TE_OFFSET + TE_Top, "Top" }, { TE_OFFSET + TE_Width, "Width" }, { TE_OFFSET + TE_Height, "Height" }, { TE_OFFSET + TC_Flags, "Style" }, { TE_OFFSET + TC_Align, "Align" }, { TE_OFFSET + TC_TabWidth, "TabWidth" }, { TE_OFFSET + TC_TabHeight, "TabHeight" }, { 0, NULL } }; TEmethod methodFIs[] = { { 2, "Application" }, { 3, "Parent" }, { 8, "AddItem" }, { 9, "hDrop" }, { 10, "GetData" }, { 11, "SetData" }, { DISPID_NEWENUM, "_NewEnum" }, { DISPID_TE_ITEM, "Item" }, { DISPID_TE_COUNT, "Count" }, { DISPID_TE_COUNT, "length" }, { DISPID_TE_INDEX, "Index" }, // { 0x10000001, "lEvent" }, { 0x10000001, "dwEffect" }, { 0x10000002, "pdwEffect" }, { 0x10000003, "Data" }, { 0x10000004, "UseText" }, { 0, NULL } }; TEmethod methodDT[] = { { 1, "DragEnter" }, { 2, "DragOver" }, { 3, "Drop" }, { 4, "DragLeave" }, { 5, "Type" }, { 6, "FolderItem" }, { 0, NULL } }; TEmethod methodTV[] = { { 0x10000001, "Data" }, { 0x10000002, "Type" }, { 0x10000003, "hwnd" }, { 0x10000004, "Close" }, { 0x10000005, "hwndTree" }, { 0x10000007, "FolderView" }, { 0x10000008, "Align" }, { 0x10000009, "Visible" }, { 0x10000106, "Focus" }, { 0x10000107, "HitTest" }, { 0x10000206, "Refresh" }, { 0x10000283, "GetItemRect" }, { 0x10000300, "Notify" }, { TE_OFFSET + SB_TreeWidth, "Width" }, { TE_OFFSET + SB_TreeFlags, "Style" }, { TE_OFFSET + SB_EnumFlags, "EnumFlags" }, { TE_OFFSET + SB_RootStyle, "RootStyle" }, { 0x20000000, "SelectedItem" }, { 0x20000001, "SelectedItems" }, { 0x20000002, "Root" }, { 0x20000003, "SetRoot" }, { 0x20000004, "Expand" }, { 0x20000005, "Columns" }, { 0x20000006, "CountViewTypes" }, { 0x20000007, "Depth" }, { 0x20000008, "EnumOptions" }, { 0x20000009, "Export" }, { 0x2000000a, "Flags" }, { 0x2000000b, "Import" }, { 0x2000000c, "Mode" }, { 0x2000000d, "ResetSort" }, { 0x2000000e, "SetViewType" }, { 0x2000000f, "Synchronize" }, { 0x20000010, "TVFlags" }, { 0, NULL } }; TEmethod methodFI[] = { { 1, "Name" }, { 2, "Path" }, { 3, "Alt" }, // { 4, "FocusedItem" }, { 5, "Unavailable" }, { 6, "Enum" }, { 9, "_BLOB" }, //To be necessary { 9, "FolderItem" }, { 0, NULL } }; TEmethod methodMem[] = { { 1, "P" }, { 4, "Read" }, { 5, "Write" }, { 6, "Size" }, { 7, "Free" }, { 8, "Clone" }, { 9, "_BLOB" }, { DISPID_NEWENUM, "_NewEnum" }, { DISPID_TE_ITEM, "Item" }, { DISPID_TE_COUNT, "Count" }, { DISPID_TE_COUNT, "length" }, { 0, NULL } }; TEmethod methodCM[] = { { 1, "QueryContextMenu" }, { 2, "InvokeCommand" }, { 3, "Items" }, { 4, "GetCommandString" }, { 5, "FolderView" }, { 6, "HandleMenuMsg" }, { 10, "hmenu" }, { 11, "indexMenu" }, { 12, "idCmdFirst" }, { 13, "idCmdLast" }, { 14, "uFlags" }, { 0, NULL } }; TEmethod methodCD[] = { { 40, "ShowOpen" }, { 41, "ShowSave" }, // { 42, "ShowFolder" }, { 10, "FileName" }, { 13, "Filter" }, { 20, "InitDir" }, { 21, "DefExt" }, { 22, "Title" }, { 30, "MaxFileSize" }, { 31, "Flags" }, { 32, "FilterIndex" }, { 31, "FlagsEx" }, { 0, NULL } }; TEmethod methodGB[] = { { 1, "FromHBITMAP" }, { 2, "FromHICON" }, { 3, "FromResource" }, { 4, "FromFile" }, { 5, "FromStream" }, { 6, "FromArchive" }, { 7, "FromItem" },//Deprecated { 8, "FromClipboard" }, { 9, "FromSource" }, { 90, "Create" }, { 99, "Free" }, { 100, "Save" }, { 101, "Base64" }, { 102, "DataURI" }, { 103, "GetStream" }, { 110, "GetWidth" }, { 111, "GetHeight" }, { 112, "GetPixel" }, { 113, "SetPixel" }, { 114, "GetPixelFormat" }, { 115, "FillRect" }, { 120, "GetThumbnailImage" }, { 130, "RotateFlip" }, { 140, "GetFrameCount" }, { 150, "Frame" }, { 160, "GetMetadata" }, { 161, "GetFrameMetadata" }, { 210, "GetHBITMAP" }, { 211, "GetHICON" }, { 212, "DrawEx" }, { 900, "GetCodecInfo" }, { START_OnFunc + WIC_OnGetAlt, "OnGetAlt" }, { 0, NULL } }; TEmethod methodPD[] = { { 0x60010001, "HasUserCancelled" }, { 0x60010002, "SetCancelMsg" }, { 0x60010003, "SetLine" }, { 0x60010004, "SetProgress" }, { 0x60010005, "SetTitle" }, { 0x60010006, "StartProgressDialog" }, { 0x60010007, "StopProgressDialog" }, { 0x60010008, "Timer" }, { 0x60010009, "SetAnimation" }, { 0, NULL } }; TEmethod methodCO[] = { { 0x60010001, "Free" }, { 0, NULL } };
def create(self): stmts = [] stmts.append("CREATE TYPE %s" % self.qualname()) stmts.append(self.dep_funcs['input'].create(basetype=True)) stmts.append(self.dep_funcs['output'].create(basetype=True)) opt_clauses = [] for fnc in OPT_FUNCS: if fnc in self.dep_funcs: stmts.append(self.dep_funcs[fnc].create(basetype=True)) opt_clauses.append("%s = %s" % ( fnc.upper(), self.dep_funcs[fnc].qualname())) if hasattr(self, 'internallength'): opt_clauses.append("INTERNALLENGTH = %s" % self.internallength) if hasattr(self, 'alignment'): opt_clauses.append("ALIGNMENT = %s" % self.alignment) if hasattr(self, 'storage'): opt_clauses.append("STORAGE = %s" % self.storage) if hasattr(self, 'delimiter'): opt_clauses.append("DELIMITER = '%s'" % self.delimiter) if hasattr(self, 'category'): opt_clauses.append("CATEGORY = '%s'" % self.category) if hasattr(self, 'preferred'): opt_clauses.append("PREFERRED = TRUE") stmts.append("CREATE TYPE %s (\n INPUT = %s," "\n OUTPUT = %s%s%s)" % ( self.qualname(), self.input, self.output, opt_clauses and ',\n ' or '', ',\n '.join(opt_clauses))) if hasattr(self, 'description'): stmts.append(self.comment()) return stmts
def markAsView(self, episode): for i, e in self.map.iteritems(): if e == episode: try: self.episodes.cellWidget(*i).setStatus(1) except AttributeError: pass e.userPlayed() self.refreshCount() self.refreshFooter() return True return False
package report import ( "os" "github.com/giantswarm/microerror" "github.com/spf13/cobra" "github.com/giantswarm/resource-police/internal/env" ) const ( flagDryRun = "dryrun" flagSlackWebhookEndpoint = "slack.webhook.endpoint" flagCortexEndpoint = "cortex.endpoint.url" flagCortexUsername = "cortex.username" flagCortexPassword = "<PASSWORD>" ) type flag struct { DryRun bool SlackWebhookEndpoint string CortexEndpoint string CortexUsername string CortexPassword string } func (f *flag) Init(cmd *cobra.Command) { cmd.Flags().BoolVar(&f.DryRun, flagDryRun, false, "Set this to print the report to STDOUT and avoid sending it to Slack") cmd.Flags().StringVar(&f.SlackWebhookEndpoint, flagSlackWebhookEndpoint, os.Getenv(env.SlackWebhookEndpoint), "Slack Webhook endpoint for posting messages into channel") cmd.Flags().StringVar(&f.CortexEndpoint, flagCortexEndpoint, "https://prometheus-us-central1.grafana.net/api/prom", "Cortex endpoint URL") cmd.Flags().StringVar(&f.CortexUsername, flagCortexUsername, os.Getenv(env.CortexUserName), "Cortex user ID") cmd.Flags().StringVar(&f.CortexPassword, flagCortexPassword, os.Getenv(env.CortexPassword), "Cortex API token") } func (f *flag) Validate() error { if f.SlackWebhookEndpoint == "" { return microerror.Maskf(invalidFlagError, "--%s or %s environment variable must not be empty", flagSlackWebhookEndpoint, env.SlackWebhookEndpoint) } if f.CortexEndpoint == "" { return microerror.Maskf(invalidFlagError, "--%s must not be empty", flagCortexEndpoint) } if f.CortexUsername == "" { return microerror.Maskf(invalidFlagError, "--%s or %s environment variable must not be empty", flagCortexUsername, env.CortexUserName) } if f.CortexPassword == "" { return microerror.Maskf(invalidFlagError, "--%s or %s environment variable must not be empty", flagCortexPassword, env.CortexPassword) } return nil }
def initialize_patch_indices(self): self.patch_indices = [] for i, image in enumerate(self.data): patch_indices, overflow = self.calc_patch_indices( self.unpadded_data_spatial_shape[i], self.patch_shape, overlap=self.patch_overlap, randomize_offset=self.randomize_patch_offsets) patch_indices = np.append(np.full(shape=(patch_indices.shape[0],1), fill_value=i), patch_indices, axis=1) self.patch_indices += patch_indices.tolist() if self.padding_boundary[i] is None: pad_width = np.stack([overflow, overflow], axis=1) self.padding_boundary[i] = pad_width
#include <fstream> #include <iostream> #include <string.h> #include <string> #include "core.h" #include "utils.h" #include "parse.h" #include "compile.h" // This is a compiler that reads some cpp file and outputs x86_64 assembly // It would be nice to bootrap it to itself, so limiting the included libraries // might be a good idea... void help() { std::cout << "Help: I need all possible arguments to make sense.\n"; } int main(int argc, char ** argv) { std::string filePath; std::string outFile = "out.s"; // get the command line args // Will attempt to mimic Gnu-Gcc inputs so Makefiles can be easilly modified for(int i = 1; i < argc; i ++) { if(!strcmp(argv[i], "help")) { help(); } else if(!strcmp(argv[i], "-I")) { // include folder as a source of header files. } else if(!strcmp(argv[i], "-o")) { // set output fileName i ++; outFile = argv[i]; } else if(!strcmp(argv[i], "-Wall")) { //enable base warnings } else if(!strcmp(argv[i], "-E")) { // build preprocessor output only, // send to stdout } else if(!strcmp(argv[i], "-S")) { // build Assembly output (Currently the default) } else if(!strcmp(argv[i], "-C")) { // Produce pure machine code without linking // use for making libraries? } else if(!strcmp(argv[i], "-save-temps")) { // save all intermediate files (preprocessed, assembled, object) } else if(!strcmp(argv[i], "-l")) { // lowercase L, link to shared libs } else if(!strcmp(argv[i], "-fPIC")) { // Produce position independent code, best for obj files // that are intended to be shared libs } else if(!strcmp(argv[i], "-V")) { // verbose output while compiling } else if(!strcmp(argv[i], "--version")) { // send version to stdout and exit without further processing // std::cout << compilerName << " " << compilerVersion << '\n' << compilerDescription << '\n'; std::cout << "Version: "<< COREVERSION << std::endl; exit(0); } else { // include argv[i] as filePath to code // issue error if not a *.cpp or *.h file filePath = argv[i]; } } compiler c; c.load(filePath.c_str()); c.compile(); // std::string test = "3+4+5+var+ vari + variable"; // std::cout << c.expression("", test); std::cout << c.getError().toString() << std::endl; c.write(outFile); }
<reponame>benjaminapetersen/tanzu-framework<gh_stars>10-100 // Copyright 2022 VMware, Inc. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 package version import ( "fmt" "strings" fuzz "github.com/google/gofuzz" "k8s.io/apimachinery/pkg/util/rand" ) // Fuzz fuzzes the passed version. func Fuzz(v *Version, _ fuzz.Continue) { major := rand.Intn(3) minor := rand.Intn(50) patch := rand.Intn(30) var ss []string bmCount := rand.Intn(3) for i := 0; i < bmCount; i++ { ss = append(ss, fmt.Sprintf("%s.%v", rand.String(rand.IntnRange(3, 10)), rand.IntnRange(1, 5))) } buildMeta := strings.Join(ss, "-") vString := strings.TrimSuffix(fmt.Sprintf("%v.%v.%v+%s", major, minor, patch, buildMeta), "+") vNew, _ := ParseSemantic(vString) *v = *vNew }
<gh_stars>1-10 import {Component, OnInit} from '@angular/core'; import {FormBuilder, FormGroup, Validators} from '@angular/forms'; import {StaffService} from '../staff.service'; import {Help} from '../../../../../utils/Help'; import {ActivatedRoute, ParamMap} from '@angular/router'; import {switchMap} from 'rxjs/operators'; import {of} from 'rxjs'; import {UploadFile} from 'ng-zorro-antd'; import {Staff} from '../staff'; @Component({ selector: 'app-staff-edit', templateUrl: './staff-edit.component.html', styleUrls: ['./staff-edit.component.scss'] }) export class StaffEditComponent implements OnInit { validateForm: FormGroup; obj: Staff = new Staff(); constructor( private formBuilder: FormBuilder, private staffService: StaffService, private route: ActivatedRoute, public help: Help) { } ngOnInit() { this.route.queryParamMap.pipe( switchMap((params: ParamMap) => { if (params.get('id')) { return this.staffService.getById(params.get('id')); } else { return of(new Staff()); } }) ).subscribe(d => { if (d.success) { this.obj = d.data; } else { this.obj = new Staff(); } }); this.validateForm = this.formBuilder.group({ id: [null, null], serialNo: [null, [Validators.required]], name: [null, [Validators.required]], genderId: [null, [Validators.required]], mobile: [null, [Validators.required]], email: [null, [Validators.required]], avatar: [null, null], departmentId: [null, [Validators.required]], birthday: [null, [Validators.required]], academicId: [null, null], degreeId: [null, null], positionId: [null, null], titleId: [null, null], typeId: [null, null], identifyTypeId: [null, null], identifyNo: [null, null], status: [null, [Validators.required]], birthProvinceId: [null, null], birthCityId: [null, null], policy: [null, null], nationId: [null, null], joinDate: [null, null], remark: [null, null], }); } handleChange(info: { file: UploadFile }): void { switch (info.file.status) { case 'uploading': this.help.isLoading = true; break; case 'done': this.help.isLoading = false; // tslint:disable-next-line:no-non-null-assertion this.help.getBase64(info.file!.originFileObj!, (img: string) => { this.obj.avatar = info.file.response.data.fullFilePath; }); break; case 'error': this.help.showMessage('error', '网络错误!'); this.help.isLoading = false; break; } } submitForm() { this.help.isLoading = true; this.obj.birthday = this.help.fmtDate(this.obj.birthday, 'yyyy-MM-dd'); this.obj.joinDate = this.help.fmtDate(this.obj.joinDate, 'yyyy-MM-dd'); this.staffService.saveOrUpdateData(this.obj).subscribe(res => { this.help.isLoading = false; if (res.success) { this.help.showMessage('success', res.message); this.help.back(); } }); } }
<reponame>mudphone/HaskellBook module Ten where stops = "pbtdkg" vowels = "aeiou" stopVowelStop :: [Char] -> [Char] -> [[Char]] stopVowelStop stops vowels = [[x, y, z] | x <- stops, y <- vowels, z <- stops] noPs :: [Char] -> [Char] -> [[Char]] noPs stops vowels = [[x, y, z] | x <- stops, y <- vowels, z <- stops, x /= 'p'] onlyPs :: [Char] -> [Char] -> [[Char]] onlyPs stops vowels = [[x, y, z] | x <- stops, y <- vowels, z <- stops, x == 'p'] nouns = ["house", "cane", "sugar", "wave"] verbs = ["blow", "swim", "argue", "fart"] nounVerbNoun :: [[Char]] -> [[Char]] -> [[Char]] nounVerbNoun nouns verbs = [x ++ " " ++ y ++ " " ++ z | x <- nouns, y <- verbs, z <- nouns] avgLetters :: Fractional a => [Char] -> a avgLetters sentence = numLetters / numWords where wordList = words sentence numWords = realToFrac (length wordList) numLetters = realToFrac (sum (map length wordList)) myOr :: [Bool] -> Bool myOr = foldr (\a b -> if a then True else b) False myOr' :: [Bool] -> Bool myOr' = foldr (||) False myAny :: (a -> Bool) -> [a] -> Bool myAny p = foldr (\a b -> if p a then True else b) False myAny' :: (a -> Bool) -> [a] -> Bool myAny' p = foldr (\a b -> (p a) || b) False myElem :: Eq a => a -> [a] -> Bool myElem e = foldr (\a b -> a == e || b) False myReverse :: [a] -> [a] myReverse = foldr (\a b -> b ++ [a]) [] myMap :: (a -> b) -> [a] -> [b] myMap f = foldr (\a b -> [f a] ++ b) [] myFilter :: (a -> Bool) -> [a] -> [a] myFilter p = foldr (\a b -> if p a then [a] ++ b else b) [] squish :: [[a]] -> [a] squish = foldr (\a b -> a ++ b) [] squishMap :: (a -> [b]) -> [a] -> [b] squishMap f = foldr (\a b -> f a ++ b) [] squishAgain :: [[a]] -> [a] squishAgain = squishMap id myMaximumBy :: (a -> a -> Ordering) -> [a] -> a myMaximumBy compFn = foldr1 (\a b -> if (compFn a b) == GT then a else b) myMinimumBy :: (a -> a -> Ordering) -> [a] -> a myMinimumBy compFn = foldr1 (\a b -> if (compFn a b) == LT then a else b)
The Eredivisie outfit have announced on their website that the prolific striker has signed a five-year contract with the 2009 Bundesliga champions Wolfsburg have completed the signing of Bas Dost from Heerenveen for an undisclosed fee.The Eredivisie outfit have announced on their official website that the striker underwent a medical in Germany on Friday, before inking a five-year deal with the Wolfe.The 2009 Bundesliga champions had long been linked with a move for the 23-year-old, and head coach Felix Magath has now got the attacking reinforcements he was looking for.Dost is a product of the Emmen youth academy, and also wore the jersey of Heracles before joining Heerenveen in the summer of 2010.He netted an impressive 38 goals in all competitions in the 2011-12 campaign, and was crowned Eredivisie top scorer with 32 strikes.The former Netherlands Under-21 striker had also attracted the interest of clubs such as Borussia Monchengladbach, West Ham United, Aston Villa and Everton.
export enum Privacy { PUBLIC = "PUBLIC", SELF = "SELF", MEMBERS = "MEMBERS", }
<filename>server/src/api/items/index.ts<gh_stars>0 import Router from 'koa-router'; import * as itemsCtrl from './items'; import { isAdminIn } from '../../libs/utils'; const items = new Router(); items.post('/', isAdminIn, itemsCtrl.addItem); items.get('/', isAdminIn, itemsCtrl.listItems); const item = new Router(); item.get('/', isAdminIn, itemsCtrl.readItem); item.delete('/', isAdminIn, itemsCtrl.removeItem); item.patch('/', isAdminIn, itemsCtrl.updateItem); items.use('/:id', itemsCtrl.getById, item.routes()); export default items;
/** * Simple class that holds an immutable reference to another object (or to * <code>null</code>). * * @author Brian Stansberry * */ public class ImmutableReference<T> { private final T referent; /** * Create a new ImmutableReference. * * @param referent the object to refer to, or <code>null</code> */ public ImmutableReference(T referent) { this.referent = referent; } /** * Gets the wrapped object, if there is one. * * @return the object passed to the constructor, or <code>null</code> if * <code>null</code> was passed to the constructor */ public T get() { return referent; } }
/*=========================================================================== * * Class CObContRecord Method - CObCntoSubrecord* AddItem (FormID, Count); * * Adds a new item to the item list. Returns the new subrecord. * *=========================================================================*/ CObCntoSubrecord* CObContRecord::AddItem (const obformid_t FormID, const word Count) { CObCntoSubrecord* pNewSubrecord; CObSubrecord* pSubrecord; pSubrecord = AddNewSubrecord(OB_NAME_CNTO); if (pSubrecord == NULL) return (NULL); pNewSubrecord = ObCastClass(CObCntoSubrecord, pSubrecord); if (pNewSubrecord == NULL) return (NULL); pNewSubrecord->InitializeNew(); pNewSubrecord->SetCount(Count); pNewSubrecord->SetFormID(FormID); return (pNewSubrecord); }
//---------------------------------------------------------------------------- // used to help speed input pin connection times. We return a partially // specified media type - only the main type is specified. If we return // anything BUT a major type, some codecs written improperly will crash //---------------------------------------------------------------------------- HRESULT CSampleSenderInPin::GetMediaType( int iPosition, CMediaType * pMediaType ) { if (iPosition < 0) { return E_INVALIDARG; } if (iPosition > 0) { return VFW_S_NO_MORE_ITEMS; } *pMediaType = CMediaType( ); pMediaType->SetType( ((CSampleSender*)m_pFilter)->m_mtAccept.Type( ) ); return S_OK; }
<filename>experimental/HarbourMaster/harbour-master/dao/src/main/java/org/solent/com504/project/impl/dao/repository/GPSRepository.java package org.solent.com504.project.impl.dao.repository; import org.solent.com504.project.model.dto.GPS; import java.util.UUID; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; //wher we would have SQL queries. @Repository public interface GPSRepository extends JpaRepository<GPS, Integer> { }
If you remember hearing the words “Hamburger Mary’s” and “opening soon” back in April, you’re not alone. When we last checked in this spring, the decade-and-a-half vacancy of the former Patio Café (531 Castro St.) was only a head chef and a reactivated liquor license away from ending. However, in case you forgot to flip your calendar last week, it’s now September—and if you haven’t ventured near the 500 block of Castro Street recently, 531 is still shuttered. The trouble, says property owner Les Natali, is that he can't find managers for the restaurant. “About a dozen applicants” have applied for the two open managerial positions—roughly one applicant per month since we initially reported last August that Hamburger Mary’s was looking to make those hires. The job posting first went up last August. | Photo: Steven Bracco/Hoodline “Most [applications] have been from people who live outside the city or whose job experience has been outside the Bay Area,” wrote Natali in an email. “We would prefer to hire applicants who live in San Francisco and whose experience has been in here.” Once Natali and his team are able to make those hires, he told us that Hamburger Mary’s is “good to go" with permitting and licensing and "will be ready to open." So, if you know anybody who has local experience as a restaurant or kitchen manager, for the love of Peaches Christ, tell them to send their resume to [email protected] so that this saga can end, and the Castro community can breathe fresh life into this once vibrant space. Here are the official job postings: General Manager: Must have minimum 3 years experience as restaurant manager in high-volume restaurant in San Francisco or Bay Area. Must have experience in hiring, training and supervising restaurant staff, front of house and back of house, be a team leader, and provide excellent guest service. Must be available to work nights and weekends. Hourly pay or salary commensurate with experience. Bonus based on performance and profitability. Working Chef/Kitchen Manager: Must have minimum 3 years experience as kitchen manager in a high-volume restaurant in San Francisco or Bay Area. Must have experience in opening a restaurant kitchen, training and supervising kitchen staff, create menu specials, perform inventory control, work well under stress, attention to detail, quality food preparation and plating. Must be available to work nights and weekends. Hourly pay or salary commensurate with experience. Bonus based on performance and profitability. Opportunity for advancement. You can email resumes and contact information to [email protected].
<gh_stars>1-10 package com.doggogram.backendsvc.util.responses; import lombok.AllArgsConstructor; import lombok.Data; import javax.xml.bind.annotation.XmlRootElement; import java.util.List; @Data @AllArgsConstructor @XmlRootElement (name = "error") public class ErrorResponse { private String exception; private String message; private List<String> details; }
#pragma once #include "Borders/Border.h" #include "Graphics/NineSlice.h" namespace OpenGLGUI { class NineSliceBorder : public Border { private: NineSlice& nineSlice; public: NineSliceBorder(NineSlice& border); ~NineSliceBorder(); }; }
// GetEnforcementStatus checks if a text string passes AutoMod settings func (s *HelixService) GetEnforcementStatus(broadcaster_id, userID, messageText string) (bool, error) { m := manyMessages{ []message{ { MessageID: random(1, 1000), MessageText: messageText, UserID: userID, }, }, } b := new(bytes.Buffer) json.NewEncoder(b).Encode(m) req, err := http.NewRequest("POST", fmt.Sprintf("https://api.twitch.tv/helix/moderation/enforcements/status?broadcaster_id=%s", broadcaster_id), b) if err != nil { log.Println(err.Error()) return false, err } req.Header.Set("Client-Id", s.config.OAuth2.ClientID) req.Header.Set("Content-Type", "application/json") accessToken, err := s.getAccessToken(broadcaster_id) if err != nil { return false, err } req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", accessToken)) res, err := s.client.Do(req) if res != nil { defer res.Body.Close() } if err != nil { log.Println(err.Error()) return false, err } bodyBytes, err := ioutil.ReadAll(res.Body) if err != nil { log.Println(err.Error()) return false, err } err = json.Unmarshal(bodyBytes, &m) if err != nil { log.Println(err.Error()) return false, err } return m.Messages[0].IsPermitted, nil }
def background_to_binding_transition_prior(self, p_binding_site, p_alt_background): result = numpy.zeros((self.N(), self.N())) if self.num_background_mosaics > 1: p_remain_in_bg = 1.0 - p_binding_site/2 - p_alt_background/(self.num_background_mosaics-1) else: p_remain_in_bg = 1.0 - p_binding_site for bg1 in self.background_states: for bg2 in self.background_states: if bg1 == bg2: result[bg1,bg2] = p_remain_in_bg else: result[bg1,bg2] = p_alt_background/(self.num_background_mosaics-1) result[bg1, self.kth(0,rev_comp=False)] = p_binding_site/2 result[bg1, self.kth(0,rev_comp=True)] = p_binding_site/2 return result
Robust multiple‐fault detection filter A new robust multiple‐fault detection and identification algorithm is determined. Different from other algorithms which explicitly force the geometric structure by using eigenstructure assignment or geometric theory, this algorithm is derived from solving an optimization problem. The output error is divided into several subspaces. For each subspace, the transmission from one fault, denoted the associated target fault, is maximized while the transmission from other faults, denoted the associated nuisance fault, is minimized. Therefore, each projected residual of the robust multiple‐fault detection filter is affected primarily by one fault and minimally by other faults. The transmission from process and sensor noises is also minimized so that the filter is robust with respect to these disturbances. It is shown that, in the limit where the weighting on each associated nuisance fault transmission goes to infinity, the filter recovers the geometric structure of the restricted diagonal detection filter of which the Beard–Jones detection filter and unknown input observer are special cases. Filter designs can be obtained for both time‐invariant and time‐varying systems. Copyright © 2002 John Wiley & Sons, Ltd.
#!/usr/bin/env python3 from sys import stdin,stdout def ri(): return map(int, stdin.readline().split()) #lines = stdin.readlines() k, r = ri() a = 10**9 b = 10**9 for i in range(10**5): if (10*i+r)%k == 0: a = (10*i+r)//k break for i in range(1, 10**5): if (10*i)%k == 0: b = (10*i)//k break print(min(a, b))
Three-dimensional numerical analysis of wet cooling tower A mathematical model for water evaporation and water droplet movement is established to describe the air-water interaction in natural draft wet cooling tower (NDWCT).The standard k e − model is used to close the Reynolds average Navier-Stokes equations. The three-dimensional heat and mass transfer process in NDWCT is simulated to analyze the crosswind effect on wet cooling tower performance. It is found that the heat and mass transfer in fill zone is seriously affected by crosswind, while the wet cooling tower performance is improved when crosswind velocity is higher than 5 . Conditions and locations for good cooling performance are pointed out. -1 ms ⋅
The Searching Effectiveness of Social Tagging in Museum Websites This paper explores the search effectiveness of social tagging which allows the public to freely tag resources, denoted as keywords, with any words as well as to share personal opinions on those resources. Social tagging potentially helps users to organize, manage, and retrieve resources. Efficient retrieval can help users put more of their focus on studying the resources rather than the retrieval process. This study was an investigation into the relations between social tags and user queries. Our findings were summarized into 4 main points: 1) 85% of the surveyed users agreed that social tags assisted them in searching for resources; 2) Over 40% of user queries searches, found their resources via the matching of social tags; 3) Social tags matched over 70% of user queries; 4) 14% of the social tags for a resource did not appear in the context of the resource. The experimental results demonstrated that social tags can improve users' efficiency in searching for resources.
#pragma once namespace memory { #if defined(_MSC_VER) #define ALIGN(x) __declspec(align(x)) #define MALLOC64(x) __aligned_malloc(x, 64) #define FREE64(x) _aligned_free(x) #elif defined(__APPLE__) #define ALIGN(x) __attribute__((aligned(x))) inline void *MALLOC64(size_t x) { void *pointer; posix_memalign(&pointer, 64, x); return pointer; } #define FREE64(x) free(x) #elif defined(__linux__) #define ALIGN(x) __attribute__((aligned(x))) #define MALLOC64(x) aligned_alloc(64, x) #define FREE64(x) free(x) #endif } // namespace memory
package cz.muni.ics.oidc.server.claims.sources; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.google.common.net.UrlEscapers; import cz.muni.ics.oidc.server.claims.ClaimSource; import cz.muni.ics.oidc.server.claims.ClaimSourceInitContext; import cz.muni.ics.oidc.server.claims.ClaimSourceProduceContext; /** * This source converts groupNames to AARC format and joins them with eduPersonEntitlement * * @author <NAME> <EMAIL> */ public class JoinGroupNamesAndEduPersonEntitlementSource extends ClaimSource { private String groupNames; private String eduPersonEntitlement; private String prefix; private String authority; public JoinGroupNamesAndEduPersonEntitlementSource(ClaimSourceInitContext ctx) { super(ctx); groupNames = ctx.getProperty("groupNames", null); eduPersonEntitlement = ctx.getProperty("eduPersonEntitlement", null); prefix = ctx.getProperty("prefix", null); authority = ctx.getProperty("authority", null); } @Override public JsonNode produceValue(ClaimSourceProduceContext pctx) { JsonNode groupNamesJson = pctx.getRichUser().getJson(groupNames); JsonNode eduPersonEntitlementJson = pctx.getRichUser().getJson(eduPersonEntitlement); JsonNodeFactory factory = JsonNodeFactory.instance; ArrayNode result = new ArrayNode(factory); if (groupNamesJson != null) { ArrayNode groupNamesArrayNode = (ArrayNode) groupNamesJson; for (int i = 0; i < groupNamesArrayNode.size(); i++) { String value = groupNamesArrayNode.get(i).textValue(); value = prefix + UrlEscapers.urlPathSegmentEscaper().escape(value) + "#" + authority; result.add(value); } } if (eduPersonEntitlementJson != null) { ArrayNode eduPersonEntitlementArrayNode = (ArrayNode) eduPersonEntitlementJson; result.addAll(eduPersonEntitlementArrayNode); } return result; } }
/** * Displays a protocol description to a user attempting to grant blessings via NFC. */ public class NfcBlesserActivity extends PreferenceActivity { private static final String PROTOCOL_MESSAGE = "WAIT FOR BLESSEE TO BEAM BLESSINGS!"; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); PreferenceScreen prefScreen = this.getPreferenceManager().createPreferenceScreen(this); Preference messagePref = new Preference(this); messagePref.setSummary(PROTOCOL_MESSAGE); messagePref.setEnabled(false); prefScreen.addPreference(messagePref); setPreferenceScreen(prefScreen); } }
/** * Represents the RuneScape Bestiary API. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs">Bestiary APIs</a> */ public final class Bestiary { /** * A {@link TypeToken} which represents a {@link Map} of {@link String}s to {@link Integer}s. */ private static final Type TYPE_TOKEN = new TypeToken<Map<String, Integer>>() { }.getType(); /** * The URL to the Bestiary web-service. */ private static final String BESTIARY_URL = HttpClient.WEB_SERVICES_URL + "/m=itemdb_rs/bestiary"; /** * The format of the URL to fetch a {@link Beast}. */ private static final String BEAST_DATA_URL_FORMAT = BESTIARY_URL + "/beastData.json?beastid=%d"; /** * The format of the URL to search for a {@link Beast}. */ private static final String BEAST_SEARCH_URL_FORMAT = BESTIARY_URL + "/beastSearch.json?term=%s"; /** * The format of the URL to search for a bestiary name. */ private static final String BESTIARY_NAMES_URL_FORMAT = BESTIARY_URL + "/bestiaryNames.json?letter=%c"; /** * The URL to fetch the list of area names. */ private static final String AREA_NAMES_URL = BESTIARY_URL + "/areaNames.json"; /** * The format of the URL to the search for {@link Beast}s in an area. */ private static final String AREA_BEASTS_URL_FORMAT = BESTIARY_URL + "/areaBeasts.json?identifier=%s"; /** * The URL to fetch the list of Slayer category names. */ private static final String SLAYER_CATEGORY_NAMES_URL = BESTIARY_URL + "/slayerCatNames.json"; /** * The format of the URL to search for a {@link Beast} in a given Slayer category. */ private static final String SLAYER_BEASTS_URL_FORMAT = BESTIARY_URL + "/slayerBeasts.json?identifier=%d"; /** * The URL to fetch the list of weakness names. */ private static final String WEAKNESS_NAMES_URL = BESTIARY_URL + "/weaknessNames.json"; /** * The format of the URL to search for a {@link Beast} weak to a given weakness. */ private static final String WEAKNESS_BEASTS_URL_FORMAT = BESTIARY_URL + "/weaknessBeasts.json?identifier=%d"; /** * The format of the URL to search for a {@link Beast} in a combat level range. */ private static final String LEVEL_GROUP_URL_FORMAT = BESTIARY_URL + "/levelGroup.json?identifier=%d-%d"; /** * The {@link Pattern} that is replaced with '+' symbols when parsing a beast's name. */ private static final Pattern NAME_SPACER = Pattern.compile(" "); /** * Converts an array of {@link SearchResult} to an {@link ImmutableMap} of {@link Integer}s to {@link String}s. * @param results The array of {@link SearchResult}s. * @return An {@link ImmutableMap} of {@link Integer}s to {@link String}s. */ private static ImmutableMap<Integer, String> resultsToImmutableMap(SearchResult... results) { if (results == null) { return ImmutableMap.of(); } ImmutableMap.Builder<Integer, String> builder = ImmutableMap.builder(); for (SearchResult result : results) { result.getLabel().ifPresent(label -> builder.put(result.getValue(), label)); } return builder.build(); } /** * The web-services {@link Client}. */ private final Client client; /** * Creates a new {@link Bestiary}. * @param client The web-services {@link Client}. */ public Bestiary(Client client) { this.client = Preconditions.checkNotNull(client); } /** * Creates a new {@link Search} that will use results from this {@link Bestiary}. * @return The {@link Search}. */ public Search search() { return new Search(this); } /** * Gets the a {@link Beast} by its id. * @param beastId The id of the {@link Beast}. * @return An {@link Optional} containing the {@link Beast}, or {@link Optional#empty()} if no {@link Beast} of that id was found. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#Beast_Data">Beast Data</a> */ public Optional<Beast> beastData(int beastId) throws IOException { String url = String.format(BEAST_DATA_URL_FORMAT, beastId); return client.fromJson(url, Beast.class); } /** * Searches for a {@link Beast}'s id by a set of terms. * @param terms The terms to search by. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#Searching_Names">Searching Names</a> */ public ImmutableMap<Integer, String> searchByTerms(String... terms) throws IOException { Preconditions.checkNotNull(terms); StringJoiner joiner = new StringJoiner("+"); for (String term : terms) { joiner.add(term); } String url = String.format(BEAST_SEARCH_URL_FORMAT, joiner.toString()); return resultsToImmutableMap(client.fromJson(url, SearchResult[].class).orElse(null)); } /** * Searches for a {@link Beast} by the first letter in it's name. * @param letter The letter to search by. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#Beasts_A_to_Z">Beasts A to Z</a> */ public ImmutableMap<Integer, String> searchByFirstLetter(char letter) throws IOException { String url = String.format(BESTIARY_NAMES_URL_FORMAT, letter); return resultsToImmutableMap(client.fromJson(url, SearchResult[].class).orElse(null)); } /** * Gets an {@link ImmutableList} of area names. * @return An {@link ImmutableList} of area names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#areaNames">Beasts by Area - areaNames</a> */ public ImmutableList<String> areaNames() throws IOException { Optional<String[]> optional = client.fromJson(AREA_NAMES_URL, String[].class); return optional.map(ImmutableList::copyOf).orElse(ImmutableList.of()); } /** * Searches for the {@link Beast}s in a given area. * @param area The name of the area to search for. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#areaBeasts">Beasts by Area - areaBeasts</a> */ public ImmutableMap<Integer, String> beastsInArea(String area) throws IOException { Preconditions.checkNotNull(area); String url = String.format(AREA_BEASTS_URL_FORMAT, NAME_SPACER.matcher(area).replaceAll("+")); return resultsToImmutableMap(client.fromJson(url, SearchResult[].class).orElse(null)); } /** * Gets an {@link ImmutableMap} of Slayer category names to their corresponding ids. * @return An {@link ImmutableMap} of Slayer category names to their corresponding ids. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#slayerCatNames">Beasts by Slayer Category - slayerCatNames</a> */ public ImmutableMap<String, Integer> slayerCategories() throws IOException { Optional<Map<String, Integer>> optional = client.fromJson(SLAYER_CATEGORY_NAMES_URL, TYPE_TOKEN); return optional.map(ImmutableMap::copyOf).orElse(ImmutableMap.of()); } /** * Searches for the {@link Beast}s in a given Slayer category. * @param categoryId The id of the Slayer category. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#slayerBeasts">Beasts by Slayer Category - slayerBeasts</a> */ public ImmutableMap<Integer, String> beastsInSlayerCategory(int categoryId) throws IOException { String url = String.format(SLAYER_BEASTS_URL_FORMAT, categoryId); return resultsToImmutableMap(client.fromJson(url, SearchResult[].class).orElse(null)); } /** * Searches for the {@link Beast}s in a given Slayer category. * @param categoryName The name of the Slayer category. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#slayerBeasts">Beasts by Slayer Category - slayerBeasts</a> */ public ImmutableMap<Integer, String> beastsInSlayerCategory(String categoryName) throws IOException { Preconditions.checkNotNull(categoryName); ImmutableMap<String, Integer> categories = slayerCategories(); return categories.containsKey(categoryName) ? beastsInSlayerCategory(categories.get(categoryName)) : ImmutableMap.of(); } /** * Gets an {@link ImmutableMap} of weakness category names to their corresponding ids. * @return An {@link ImmutableMap} of weakness category names to their corresponding ids. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#weaknessNames">Beasts by Weakness - weaknessNames</a> */ public ImmutableMap<String, Integer> weaknesses() throws IOException { Optional<Map<String, Integer>> optional = client.fromJson(WEAKNESS_NAMES_URL, TYPE_TOKEN); return optional.map(ImmutableMap::copyOf).orElse(ImmutableMap.of()); } /** * Searches for the {@link Beast}s that are weak to a specific weakness. * @param weaknessId The id of the weakness. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#weaknessBeasts">Beasts by Weakness - weaknessBeasts</a> */ public ImmutableMap<Integer, String> beastsWeakTo(int weaknessId) throws IOException { String url = String.format(WEAKNESS_BEASTS_URL_FORMAT, weaknessId); return resultsToImmutableMap(client.fromJson(url, SearchResult[].class).orElse(null)); } /** * Searches for the {@link Beast}s that are weak to a specific weakness. * @param weaknessName The name of the weakness. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#weaknessBeasts">Beasts by Weakness - weaknessBeasts</a> */ public ImmutableMap<Integer, String> beastsWeakTo(String weaknessName) throws IOException { Preconditions.checkNotNull(weaknessName); ImmutableMap<String, Integer> weaknesses = weaknesses(); return weaknesses.containsKey(weaknessName) ? beastsWeakTo(weaknesses.get(weaknessName)) : ImmutableMap.of(); } /** * Searches for the {@link Beast}s that have a combat level between the lower and upper bound inclusively. * @param lowerBound The lowest combat level. * @param upperBound The highest combat level. * @return An {@link ImmutableMap} of {@link Beast} ids to {@link Beast} names. * @throws IOException If an I/O error occurs. * @see <a href="http://services.runescape.com/m=rswiki/en/Bestiary_APIs#Beasts_by_Level">Beasts by Level</a> */ public ImmutableMap<Integer, String> beastsInLevelGroup(int lowerBound, int upperBound) throws IOException { Preconditions.checkArgument(upperBound > lowerBound, "The upper combat level bound must be higher than the lower combat level bound."); String url = String.format(LEVEL_GROUP_URL_FORMAT, lowerBound, upperBound); return resultsToImmutableMap(client.fromJson(url, SearchResult[].class).orElse(null)); } }
def api_update_secret(token, session_secret_key, secret_id, data, data_nonce): method = 'POST' endpoint = '/secret/' data = json.dumps({ 'secret_id': secret_id, 'data': data, 'data_nonce': data_nonce }) return api_request(method, endpoint, data=data, token=token, session_secret_key=session_secret_key)
Destabilization of NaBH4 by Transition Metal Fluorides With the goal of improving performance of a hydrogen-rich storage medium, the influence of a collection of first and second period transition metal fluorides on the destabilization of NaBH4 is studied on samples produced by ball milling NaBH4 with 2 mol% of a metal fluoride additive. The effects obtained by increasing additive amount and changing oxidation state are also evaluated for NbF5, CeF3, and CeF4. The as-milled products are characterized by in-house power X-ray diffraction, while the hydrogen release and decomposition are monitored by temperature programmed desorption with residual gas analysis, differential scanning calorimetry, and thermogravimetry. The screening of samples containing 2 mol% of additive shows that distinctive groups of transition metal fluorides affect the ball milling process differently depending on their enthalpy of formation, melting point, or their ability to react at the temperatures achieved during ball milling. This leads to the formation of NaBF4 in the case of TiF4, MnF3, VF4, CdF2, NbF5, AgF, and CeF3 and the presence of the metal in CrF3, CuF2, and AgF. There is no linear correlation between the position of the transition metal in the periodic table and the observed behavior. The thermal behavior of the products after milling is given by the remaining NaBH4, fluoride, and the formation of intermediate metastable compounds. A noticeable decrease of the decomposition temperature is seen for the majority of the products, with the exceptions of the samples containing YF3, AgF, and CeF3. The largest decrease of the decomposition temperature is observed for NbF5. When comparing increasing amounts of the same additive, the largest decrease of the decomposition temperature is observed for 10 mol% of NbF5. Higher amounts of additive result in the loss of the NaBH4 thermal signal and ultimately the loss of the crystalline borohydride. When comparing additives with the same transition metal and different oxidation states, the most efficient additive is found to be the one with a higher oxidation state. Furthermore, among all the samples studied, higher oxidation state metal fluorides are found to be the most destabilizing agents for NaBH4. Overall, the present study shows that there is no single parameter affecting the destabilization of NaBH4 by transition metal fluorides. Instead, parameters such as the transition metal electronegativity and oxidation state or the enthalpy of formation of the fluoride and its melting point are competing to influence the destabilization. In particular, it is found that the combination of a high metal oxidation state and a low fluoride melting point will enhance destabilization. This is observed for MnF3, NbF5, NiF2, and CuF2, which lead to high gas releases from the decomposition of NaBH4 at the lowest decomposition temperatures. Introduction As hydrogen becomes one of the important alternative energy carriers for renewable energy sources, the discussion about its safe and efficient storage gains momentum. The challenge is not only to achieve small compact systems with high gravimetric and volumetric hydrogen densities fulfilling the necessary safety requirements, but for a competitive practical use, the hydrogen needs to be efficiently absorbed and desorbed. These goals can be reached by utilizing storage media that have intrinsically high hydrogen densities such as pressurized cylinders and cryogenic liquid hydrogen systems, as well as by solid-state hydrogen containing materials. The latter method has the additional advantages of safety and high volumetric density . Among solid-storage materials, first and second group borohydrides (LiBH 4 , NaBH 4 , Ca(BH 4 ) 2 , and Mg(BH 4 ) 2 ) have been for two decades very attractive candidates because of their gravimetric densities of the order of 10 to 20 wt% H 2 . NaBH 4 , which has a high gravimetric capacity of 10.6 wt% and a decomposition temperature of about 535 • C , has gone from being a favorite solid-storage material in the early 2000s to being rejected by the U.S. Department of Energy (DoE) for on-board applications, to then again being described as a fuel for the future due to its large yields of hydrogen release by hydrolysis and thermal decomposition that can be readily used in aqueous solutions in some types of fuel cells such as proton exchange membrane fuel cells (PEMFCs) or direct boron hydride fuel cells (DBFCs) . The present work focuses on reducing the thermal desorption temperature of NaBH 4 below 535 • C by adding small amounts of transition metal fluorides (TMFs). This is an extension of two previous works: one with transition metal chlorides (TMCs) that showed the formation of NaBH 4 -chloride substituted phases and another with selected TMFs, where no substitution was found . An extensive literature review including the last four decades reveals a limited amount of work concerning the effect of fluorides on borohydrides. The first report of a borohydride being ball-milled with a variety of fluorides corresponds to Zhang et al. . In this work, selected chlorides were found to form new borohydrides easier than their corresponding fluorides. Al-Kukhun et al. and Zhang et al. also found that the addition of selected fluorides (NbF 5 and CaF 2 and ZnF 2 and TiF 3 , respectively) to MgBH 4 had a positive effect on the hydrogen release and the kinetics of the borohydride. Furthermore, Minella et al. investigated the sorption properties and reversibility of the Ti(IV) and Nb(V) doped-(CaBH 4 ) 2 -MgH 2 system. Adding NbF 5 resulted in a system with enhanced reversibility by slightly suppressing the formation of CaB 12 H 12 . Likewise, Zhou et al. used CeF 3 as a catalyst on LiBH 4 nanoconfined on activated carbon. They found a considerable decrease of the onset temperature of hydrogen release and a substantial increase in the dehydrogenation capacity. The fluoride substitution of LiBH 4 by Richter et al. was one of the most significant destabilization effects of fluorides on borohydrides observed up to date. The first study involving NaBH 4 and fluorides did not occur until early 2013 when Rude et al. reported fluorine substitution on NaBH 4 while investigating different NaBH 4 -NaBF 4 mixtures. The fluorine-substituted phases were found to decompose into more stable compounds, while the NaBH 4 -NaBF 4 composite itself presented considerably lower decomposition temperature. Chong et al. found that the addition of LaF 3 to NaBH 4 promoted hydrogen sorption better than LaH. The literature about the co-addition of more than one transition metal fluoride to a borohydride is rare. Recently, Huang et al. found that adding ScF 3 and YF to a NaBH 4 -containing system resulted in a three step hydrogen desorbing system with enhanced reversibility when using the two fluorides simultaneously. The results were partially confirmed by Zhao et al. on the reversibility of 3NaBH 4 /ScF 3 and by Huang et al. on the reversible hydrogen sorption behavior of 3NaBH 4 -(x) YF 3 -(1 − x) GdF 3 . A recent review by Jain et al. summarized the catalytic effect on lightweight hydrogen storage materials of a variety of compounds, including TiF 3 , TiF 4 , CeF 4 , NbF 5 , ZrF 4 , ternary K-TM-F fluorides (TM: Ti, Zr, Ni, Fe), NaMgF 3 , and NaF. Additionally, Mao et al. concluded that using metal fluorides as additives is a promising direction for improving the sorption kinetics of NaBH 4 by lowering the energy barriers. These authors stated that both Ti and F have a positive effect. However, the physical, chemical, or thermodynamic parameters of the halide responsible for the increase in the decomposition rate of NaBH 4 /borohydrides/hydrides have so far not been identified. It is generally suggested that the oxidation state of the metal element that forms the halide plays the most important role. This is justified by the influence of different catalysts observed for chemical compounds that exist in only one oxidation state when comparing to chemical compounds with multivalent metals . Similar discussions have been carried out about the influence of the oxidation state of the metal for catalyzed MgH 2 . All these studies showed that some metal fluorides induce strong effects on the destabilization of particular borohydrides. However, there is still a large number of fluorides whose effect on borohydrides has not been reported that might be beneficial for the hydrogen storage community. Continuing with this line of investigation, the main focus of the present work is to study the destabilization effects of available transition metal fluorides (TMFs) from the first and second periods of the periodic table on NaBH 4 . These effects might occur through the formation of new compounds, as well as the mechanochemical process itself. The ball-milled products are analyzed by powder X-ray diffraction (PXD) and a variety of thermal methods including differential scanning calorimetry (DSC) and temperature-programmed desorption (TPD). The observed behavior is discussed in terms of the transition metal (TM) electronic structure and the position in the periodic table, as well as the ability of the fluoride to react during milling and form new compounds. Variations due to the additive amount and oxidation state are also discussed. In particular, NbF 5 was chosen as one of the additives based on previous results by Luo et al. showing an increase of solubility during ball milling due to its low melting point (90 • C). On the other hand, increasing the oxidation state of a metal has been shown to lead to compounds with a lower melting point and, therefore, higher solubility during the ball milling process . This is tested by using CeF 3 and CeF 4 as additives to NaBH 4 . Table 1 summarizes the PXD data obtained for all the samples after ball milling and analyzed by DIFFRAC plus EVA in terms of the wt% content of the different compounds in the mixture. The table also contains the calculated wt% of the original mixtures for comparison. The same data are included in Appendix A as PXD plots ( Figure A1). The data showed that all the samples still contained crystalline NaBH 4 in large amounts after milling, although the exact composition of the products varied depending on the TM fluoride. Moreover, the lack of a shift in the Bragg peaks corresponding to the remaining NaBH 4 indicated that there was no substitution in the NaBH 4 unit cell despite the presence of crystalline NaBF 4 in some of the samples. This was in agreement with the previously reported formation of NaBF 4 . Ball Milling Effects of the TMFs on NaBH 4 The added fluorides remained as a crystalline phase for ScF 3 , FeF 3 , CrF 3 , NiF 3 , CoF 3 , CuF 2 , VF 4 , ZnF 2 , CdF 2 , YF 3 , and AgF. With the exception of AgF and VF 4 , these were all fluorides with melting points above 800 • C. On the other hand, TiF 4 , MnF 3 , NbF 5 , ZrF 4 , CeF 4 , and CeF 3 did not appear as crystalline phases in the PXD results, and no peaks corresponding to NbF 5 were seen in the 10 and 15 mol% cases either. Except for the ZrF 4 and CeF 4 containing samples, which only showed crystalline NaBH 4 in the PXD pattern, the disappearance of the fluoride in these samples was correlated with the appearance of NaBF 4 and/or metallic TM. The presence of other compounds containing TM and fluorine could not be confirmed with the current PXD data. Table 1. Composition of the samples before and after ball milling. The first 5 columns show the composition of the ball-milled samples as evaluated by EVA. The two last columns are the calculated wt% of the original physical mixture before ball milling. Sample NaBH 4 (wt%) TMF (wt%) NaBF 4 (wt%) TM (wt%) Other (wt%) NaBH 4 (wt%) TMF (wt%) A detailed analysis of the PXD patterns suggested a classification of the samples based on the products of the ball milling. First, ScF 3 , FeF 3 , NiF 2 , ZnF 2 , and YF 3 showed no effect on the milling process. For these additives, the original ratio between NaBH 4 and the fluoride was still present in the powder after ball milling. Small changes of the composition appeared for the samples containing CrF 3 , CoF 3 , and CuF 2 . This was seen by the presence of metallic Cr and Cu, respectively, while for CoF 3 , the presence of CoF 2 was likely related to the original fluoride. Stronger changes of the composition were introduced by CdF 2 , CeF 3 , and AgF. In these cases, NaBF 4 was present in the products together with the original NaBH 4 and the fluoride. For the AgF case, metallic Ag and Ag 2 F were also seen in the PXD pattern. TiF 4 , MnF 3 , VF 4 , and NbF 5 (2, 10, and 15 mol%) produced the strongest changes of the composition of the samples after milling. This was mostly seen for TiF 4 , MnF 3 , and VF 4 by a significant amount of NaBF 4 and for VF 4 by the additional metallic V. For the NbF 5 cases, the amount of NaBF 4 produced by milling was smaller than published earlier . On the other hand, two new compounds containing Nb appeared in these samples: NbF 3 and NaNb 1.25 F 6 . The content of these two products increased with NbF 5 content in the mixture. The presence of F − containing compounds in some of the studied cases confirmed the decomposition of the original fluorides and some level of H − substitution in small amounts of NaBH 4 . In contrast, CeF 4 and ZrF 4 containing samples showed only crystalline NaBH 4 after milling, with the exception of a nonsymmetric peak at 25 • for CeF 4 indicating a substituted phase. The comparison of the composition of the samples with CeF 3 and CeF 4 after milling showed that both oxidation states led to the disappearance of the fluoride in the crystalline form. Moreover, for CeF 3 , the analysis showed that the fluoride decomposed to form NaBF 4 , while for CeF 4 , there was no crystalline indication of the fluoride dissociating (it could still be there as amorphous). The effect of the oxidation state could also be seen by comparing the reported use of TiF 3 and the current use of TiF 4 . While the published data showed no formation of a new crystalline phase after ball milling for 3 h, the current data showed the formation of up to 18 wt% of NaBF 4 after only 1 h ball milling, when using the higher oxidation state. Overall, the melting point of the fluoride seemed to play a role in the interaction with NaBH 4 during ball milling. This was easily seen in the case of NbF 5 , but also in the general trends observed between samples that contained high melting point TM fluorides such as ScF 3 and NiF 2 , which led to mostly unchanged sample compositions, and those with lower melting points such as TiF 4 and VF 4 , which led to the formation of NaBF 4 . However, the PXD results alone did not establish any correlation between the ability of the fluoride to interact chemically with NaBH 4 and properties such as the enthalpy of formation, the electronic structure, or the oxidation state of the TM. Pure NaBH 4 with Different Calorimetry Methods Pure NaBH 4 samples were analyzed with three different calorimetric methods: TPD, DSC-Netzsch and DSC-Setaram, and TGA ( Figure 1). Each of these techniques accessed useful information and presented experimental limitations that might lead to different decomposition behaviors of the samples. The in-house TPD and the Netzsch DSC (blue and black lines in Figure 1) showed the maximum of the melting point of NaBH 4 to occur at around 503 • C, in agreement with the literature. However, the decomposition event happened at higher temperatures with the Netzsch DSC, at about 558 • C, compared to the 534 • C of the TPD curve. The reason for this discrepancy was the fact that the TPD analysis was taking place in a dynamic vacuum, while the Netzsch DSC measured under an Ar flow of 20 mL/min. The Ar flow cooled down the surroundings of the sample, making it more difficult to achieve the necessary temperature to decompose (more heat needed to be applied to decompose the material). Larger differences were observed between these two techniques and the Setaram DSC analysis. On the one hand, the Setaram DSC technique only showed the NaBH 4 melting event at 509 • C. The reason for this was that the measurements were generally carried out in closed stainless steel (SS) crucibles. These were high pressure crucibles without a venting hole, and therefore, it was likely that the desorbed gas/H 2 built pressure inside the crucible and hindered the gas evolution, stopping the decomposition process. On the other hand, the melting point was observed to happen at about 6 • C higher than by the TPD and Netzsch techniques. The reason for this shift was related to both the different experimental environments (vacuum and Ar flows of 15 and 20 mL/min) and the different equilibrium pressure imposed by the closed crucible. The TGA data showed the expected single-step decomposition corresponding to H 2 to maximize beyond 600 • C and with its onset at 505 • C. Table 2 summarizes the data obtained by TPD for all the samples. The most important peaks were the main decomposition and melting events of NaBH 4 . Furthermore, Figure 2 shows the influence of the TM fluoride additive on the NaBH 4 decomposition temperature as measured by TPD and represented as the difference in the temperature between the decomposition peak of the sample with additive and that of pure NaBH 4 . Temperature-Programmed Desorption Results As seen in the figure, prominent reductions in the decomposition temperature corresponded to MnF 3 , CuF 2 , NiF 2 , and NbF 5 , with the largest reduction for 10 and 15 mol% NbF 5 , where the decomposition already occurred at 379 • C. The least influence on the decomposition behavior of NaBH 4 was observed for TiF 4 and YF 3 . However, this destabilization performance could not be correlated to a single fluoride property. On the one hand, MnF 3 , CuF 2 , and NiF 2 had a relatively high enthalpy of formation, ∆E f orm , suggesting that less energy was required to mix and react with the borohydride. On the other hand, NbF 5 had the highest metal oxidation state and the lowest fluoride melting point. The latter property had a strong effect during ball milling as it enhanced the effective surface area for reactions between the fluoride and the borohydride to occur. The high oxidation state of the metal then provided an electronic environment with an abundance of available e − to assist in further chemical reactions. By increasing the amount of NbF 5 additive from 2 to 10 mol%, the decomposition temperature of the remaining NaBH 4 decreased from 442 to 379 • C. Larger amounts of fluoride additive led to no change in the decomposition features, indicating that there was an optimal amount of additive of 10 to 15 mol% before NaBH 4 disappears. The influence of different oxidation states was represented by the CeF 3 and CeF 4 cases. For these samples, the TPD data showed that the TM with higher oxidation state resulted in a slightly higher decomposition temperature. The TPD signals corresponding to diborane species (m/z = 26, 27) were found to be two orders of magnitude weaker than those for hydrogen (m/z = 2) for the whole temperature range and for all the investigated samples. No indication of fluoride release was found. Overall, the hydrogen release temperature could not be correlated with the Pauling electronegativity of the TM (χρ) as it was reported for selected TM chlorides on a study on NH 3 BH 3 . Closed Crucible DSC-Setaram Discussion As discussed in Section 2.2.1, DSC-Setaram data of pure NaBH 4 showed a strong endothermic event occurring at 509 • C that corresponded with melting. Since the Setaram measurements were done in closed SS crucibles (closed system), the decomposition event at higher temperatures was hindered and not seen. The same effect was expected for the samples containing fluoride additives. In Figure 3, the samples are grouped based on DSC-Setaram measurements. ScF 3 , YF 3 , and CeF 3 showed a single endothermic peak attributed to the melting of NaBH 4 , but occurring at slightly lower temperatures (NaBH 4 509 • C > YF 3 507 • C ≥ CeF 3 507 • C > ScF 3 499 • C). The ScF 3 melting feature was broader and asymmetric compared to the narrower peaks of YF 3 and CeF 3 . From the PXD data in Section 2.1, it was found that the first two samples contained metallic Sc and Y, respectively, but no indications of metallic Ce were observed. Thus, the presence of metallic TM (Sc or Y, respectively) did not explain the different melting profiles. Moreover, the presence of metallic TM did not seem to influence the melting of NaBH 4 as seen by DSC-Setaram. On the other hand, the asymmetry of some melting peaks could be interpreted as the overlapping of the melting of NaBH 4 with other intermediate phases formed during heating, as well as by a small gas release, which was weakened in the SS closed system. The same asymmetry was seen in the DSC-Setaram of MnF 3 and NbF 5 , which still appeared as single peaks, but at much lower temperatures than the melting point of pure NaBH 4 (481.5 • C and 481 • C, respectively). These were samples that contained NaBF 4 (Table 1), which crystallized in a different space group than NaBH 4 and seemed to have a prominent effect on the melting point. In the case of MnF 3 , shoulders at both sides of the main peak also indicated the overlapping of events related to the presence of different phases and hindered gas release. The second group of samples (Group 2 in Figure 3) showed a strong decrease of the melting temperature of the ball-milled samples, which now appeared between 473 and 477 • C. In addition, the range of melting temperatures in this group showed a different level of interaction between the TM fluoride additives and NaBH 4 and their role in disturbing the intermolecular forces in the borohydride. The samples also showed similar second features after the melting peak, between 482 • C at the shoulder in TiF 4 and the single peak at 496 • C in ZnF 2 . This second feature occurred at too low temperatures to be associated with NaBH 4 decomposition, which was also hindered by the SS crucibles, avoiding gas release. Therefore, it could only be interpreted as processes occurring on intermediate phases created in the mixture during heating. This was also confirmed by the fact that as the intensity of the melting feature decreased, the stronger the feature at larger temperatures became, indicating that some of the NaBH 4 in the mixture after milling reacted during heating. A different behavior was seen in the DSC-Setaram of the remaining samples, AgF and CrF 3 . Their melting points were not strongly changed from that of NaBH 4 , suggesting a small effect by the presence of NaBF 4 after milling with AgF. On the other hand, features at lower temperatures than their melting peaks indicated the presence of other intermediate phases and processes happening during the low temperature stages of heating. All in all, the majority of the fluorides studied in this work had an influence on the melting temperature of NaBH 4 . This was particularly true for all the second period TMFs, but also for MnF 3 and NbF 5 . The least effective in decreasing the melting temperature, as measured by DSC-Setaram, were YF 3 and CeF 3 , and the most effective were MnF 3 , NbF 5 , NiF 2 , and CuF 2 . These latter fluorides were the same fluorides that led to the highest hydrogen releases as observed by TPD ( Figure 2). The DSC-Setaram data also showed a significant difference between the CeF 3 and CeF 4 samples (Group 4 in Figure 3). While the lower oxidation state compound only decreased the melting temperature of NaBH 4 slightly, the higher oxidation state showed a feature at 475 • C that could be assigned to the melting and a broad region of overlapping events between 477 and 500 • C. The comparison between the DSC-Setaram behavior of CeF 4 and the other tetravalent fluorides, VF 4 and ZrF 4 (Figure 3, Group 2) showed that CeF 4 had a smaller effect on the melting point of NaBH 4 . In the case of the trivalent fluorides, MnF 3 , FeF 3 , and CoF 3 showed the strongest decrease of the melting point (close to 475 • C), while CrF 3 and ScF 3 showed a smaller effect and CeF 3 and YF 3 the smallest effect. On the other hand, the increase of NbF 5 content in the mixture had the effect of reducing the intensity of the melting peak, as well as reducing the melting temperature. For the 15 mol% NbF 5 sample, the melting peak in the DSC-Setaram disappeared completely, indicating that the amount of NaBH 4 available was small (Group 5 in Figure 3). This was confirmed by the PXD data, which showed a decrease in the content of NaBH 4 after ball milling of about 40 % or ca. 59 wt% (Table 1). DSC-Netzsch Discussion DSC-Netzsch data complemented the findings by TPD and DSC-Setaram by showing the different calorimetric events during heating in the milled samples as presented in Figure 4. Both TPD and DSC-Netzsch showed a double feature corresponding to the melting and subsequent decomposition of remaining NaBH 4 for the samples in the first group. These were both hydrogen release events, with less gas being released during melting in the case of MnF 3 . This was also corroborated by TPD. For ScF 3 and CeF 3 , the melting regions were made of more than one feature in the DSC-Netzsch data. The extra features were not seen by TPD and therefore corresponded to phase transformations without gas release. From this group of samples, NbF 5 was the one with the lowest melting point and the lowest decomposition temperature for the remaining NaBH 4 , in agreement with both TPD and DSC-Setaram results. The second group of samples showed a heterogeneous behavior in DSC-Nezsch (Group 2 in Figure 4). This was consistent with the results by TPD and DSC-Setaram showing the variety of interactions between the different TM fluorides and NaBH 4 . Like in the previous group, the presence of extra features in the melting area mostly indicated phase transformations without gas release, except for ZnF 2 and NiF 2 , which showed a TPD shoulder at lower temperatures than the melting of the NaBH 4 feature. These features corresponded to the growing shoulder observed by DSC-Setaram in Figure 3. The group made of CrF 3 and AgF showed the presence of extra DSC features below the melting temperature of NaBH 4 in both Setaram and Netzsch data. In the case of CrF 3 , the lowest temperature feature corresponded with a gas release shoulder in TPD, while the event in AgF was a phase transformation without gas release. An extra feature at 580 • C for CrF 3 was only seen by DSC-Netzsch. This also corresponded to a phase transformation without gas release. The comparison between CeF 3 and CeF 4 confirmed the results by DSC-Setaram about the lower oxidation state being less efficient to decrease the melting and decomposition temperatures of NaBH 4 (Group 4 in Figure 4). Higher oxidation state systems such as CeF 4 packed more F − ions around the Ce + compared to CeF 3 . This caused a decrease of the melting point from 817 to 650 • C that made the TMF more susceptible to reactions with NaBH 4 during milling. This coincided with the fact that the additive with the highest oxidation state, NbF 5 , resulted in some of the lowest hydrogen melting and desorption temperatures observed. On the other hand, the increase of NbF 5 content in the mixture led to the decrease of the intensity of the melting and decomposition features. For 15 mol% of NbF 5 , the DSC features were lost (Group 5 in Figure 4). Thermogravimetric Analysis The TGA data showed that for most of the samples, significant mass losses did not start until about 470 • C (see Figure A2 in Appendix A). The most notable exceptions were the NbF 5 and NiF 2 samples starting at about 400 • C. Below this temperature, the largest mass loss was seen for NbF 5 , 10 and 15 mol%, with 3.5 and 3.6 wt%, respectively, and for CeF 4 , with 1.4 wt%. All other samples showed mass losses below 1 wt% for the same temperature range. Based on the TPD and DSC results, the mass loss observed below the melting of NaBH 4 was related to intermediate phases formed during the heating process involving NaBH 4 . For the samples containing 2 mol% of TM fluoride, the largest mass evolution between 300 and 600 • C was seen for the YF 3 case (31.3 wt%), while the smallest mass loss was seen for the CoF 3 sample (16.4 wt%) ( Figure 5). These mass losses were larger that the gravimetric capacity of NaBH 4 (10.6 wt%). Thus, the mass loss in this temperature range included gas released during melting and decomposition of NaBH 4 , as well as gas release events related to other phases formed by the reaction of the fluoride and the borohydride. This might include a substantial evaporation of Na . The difference between CeF 3 and CeF 4 was a decrease of the mass loss for the higher oxidation state: 30 to 25.3 wt% between 300 and 600 • C. When increasing the amount of NbF 5 from 2 to 15 mol%, the mass loss went from 22.5 to 1.6 wt% in the same temperature range. This indicated that for the higher content of NbF 5 , a larger portion of the hydrogen contained in the mixture with NaBH 4 was released during the ball milling process due to the low melting point of the fluoride. In order to increase the hydrogen yield, the amount of NbF 5 had to be lower than 2 mol%, which would also affect the melting and decomposition temperatures. The TGA data showed that even if NbF 5 was one of the most efficient additives to decrease the melting and decomposition temperatures of NaBH 4 , as seen by TPD and DSC, its usefulness for hydrogen storage was hindered by its reactive behavior and the small yield of hydrogen obtained from the milled mixture. The results also showed that a TM fluoride such as MnF 3 produced a desirable destabilization of NaBH 4 , while still giving high hydrogen yields of 24.2 wt% between 300 and 600 • C (Table 3). Table 3. Mass loss measured by TG in the 100-300 • C and 300-600 • C temperature ranges. The tabulated values correspond to the minima of the measured data in every region. Samples are ordered from larger to smaller losses following Figure 5. Materials and Methods Mixtures containing pure NaBH4 (Sigma Aldrich, 99%) and a commercially available anhydrous transition metal fluoride (TMF, Sigma Aldrich: in 1:0.02 molar ratios (2 mol%)) were ball milled in Ar atmosphere using a Fritzch Pulverisette 7 Planetary Mill (300 rpm) and hardened stainless steel vials and balls ( Table 4). The samples included the complete first period TM and the available YF 3 , ZrF 4 , NbF 5 , AgF, and CdF 2 from the second period, as well as CeF 3 and CeF 4 . The lanthanide metal Ce was chosen due to its light weight. In the NbF 5 case, additional molar ratios of 1:0.10 and 1:0.15 (10 and 15 mol%) were also prepared to study the destabilization effect of increasing the amount of additive. The fluoride name was used throughout the text to identify the NaBH 4 + TMF mixture. All the samples were treated equally and were milled for 1 h with a ball-to-powder ratio of 40:1. Both hardened stainless steel vials and balls (10 mm φ) were used for the milling. Sample handling was carried out in MBraun Unilab glove boxes filled with purified argon (<1 ppm O 2 , H 2 O) to avoid contamination. Powder X-ray diffraction (PXD) patterns were collected in transmission mode using CuKα radiation (λ = 1.5418 Å) in a Bruker AXS D8 Advance Diffractometer equipped with a Göbel mirror and a LynxEye TM 1D strip detector. The samples were packed in sealed boron glass capillaries (0.5 and 0.8 mm φ) in Ar atmosphere. These were kept rotating during measurements to decrease preferred directionality effects. Small amounts of pure Si were added to some samples as internal standard (ABCR, APS 1-5 micron, 99.999%) to determine the instrumental off-set. Acquisition of data were restricted to the 2θ = 5-80 • range, with ∆2θ = 0.02 • and 2 s/step scanning rates. Differential scanning calorimetry (DSC) measurements were performed both in a Setaram Sensys DSC and a Netzsch STA 449 F3 Jupiter instrument that also performed simultaneous Thermogravimetric Analysis (TGA). In the Setaram case, 50 mg of sample were put into high pressure stainless steel crucibles that were heated up to 600 • C with an Ar flow of 15 ml/min and a heating rate of 2 • C/min. For the simultaneous TGA and DSC experiments performed in the Netzsch instrument, 3 to 5 mg samples were placed in Al crucibles with pierced lids and heated between 30 and 600 • C, with a heating rate of 2 • C/min under argon gas flow (100 mL/min). The different experimental conditions of the DSC experiments were chosen to provide as much complementary information as possible on the effects induced by the TM fluorides on the NaBH 4 . Additional temperature-programmed desorption (TPD) with residual gas analysis (RGA) data were collected from approximately 25 mg of sample with an in-house built setup under vacuum conditions (10 −5 mbar). Heating ramps between RT and 600 • C at a constant heating rate of 2 • C/min were used. RGA data were obtained with a MULTIVISON IP detector system coupled to a PROCESS Eye analysis package from MKS Instruments. Conclusions Transition metal fluorides from the first and second periods of the periodic table milled with NaBH 4 in a 0.02:1 molar ratio exhibited a destabilizing effect that led to the decrease of the melting and the decomposition temperatures of the borohydride below 505 • C and 535 • C, respectively. • In particular, NbF 5 and MnF 3 were very good destabilizers of NaBH 4 , with a 30 • C decrease of its melting temperature and a 50 to 57 • C decrease of its decomposition temperature, while still giving high decomposition gas yields in the 300 and 600 • C region of 24.2 and 22.5 wt%, for 2 mol% of MnF 3 and NbF 5 , respectively, that might include evaporation of Na. • In addition, the strong reactivity of NbF 5 meant that the yield of hydrogen from a mixture with NaBH 4 decreased strongly with increasing fluoride amount (1.6 wt%, for 15 mol% of NbF 5 ), since most of the hydrogen was lost during the ball milling process. • Increasing the additive amount from 2 to 10 and 15 mol% led to the loss of the NaBH 4 and therefore the loss of hydrogen yield during thermal decomposition. • Higher oxidation states of the metal in the fluoride were more efficient in reducing the melting and decomposition temperatures of NaBH 4 . This was confirmed by the comparison between CeF 3 and CeF 4 (506 and 502 • C, respectively), but also by the results showing NbF 5 , the TM fluoride with highest oxidation state, being one of the most efficient destabilizers. • An increase of the oxidation state also seemed to lead to a decrease of the gas yield in the 300 and 600 • C region, with 29.9 and 25.3 wt%, for CeF 3 and CeF 4 , respectively). It was found that the destabilizing performance of the studied fluorides depended on a combination of their properties rather than on a single parameter. Higher fluoride melting points required higher energy ball milling conditions than lower melting points to achieve similar chemical interactions with NaBH 4 during ball milling, while smaller enthalpies of formation and higher metal oxidation values enhanced the chemical interaction further during and after the ball milling process. Future studies are envisioned to understand how the different properties act on the most successful fluorides found in this work. Conflicts of Interest: The authors declare no conflict of interest. The funders had no role in the design of the study; in the collection, analyses, or interpretation of data; in the writing of the manuscript; nor in the decision to publish the results. Abbreviations The following abbreviations are used in this manuscript:
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- -- Module : GetChar -- Copyright : -- License : MIT -- -- Maintainer : <EMAIL> -- Stability : -- Portability : -- -- | -- ----------------------------------------------------------------------------- module GetChar ( GetChar.getChar ) where #if defined(mingw32_HOST_OS) || defined(__MINGW32__) foreign import ccall unsafe "conio.h getch" getChar :: IO Char #else import qualified System.IO as SIO getChar :: IO Char getChar = SIO.getChar #endif
/** Return T iff STRATEGY would throw away all transformation. */ @SubL(source = "cycl/inference/harness/inference-tactician.lisp", position = 23179) public static final SubLObject strategy_throws_away_all_transformationP(SubLObject strategy) { { final SubLThread thread = SubLProcess.currentSubLThread(); return makeBoolean(((NIL == inference_datastructures_problem_store.problem_store_transformation_allowedP(inference_datastructures_strategy.strategy_problem_store(strategy))) || ((NIL != $set_aside_non_continuable_implies_throw_awayP$.getDynamicValue(thread)) && (NIL == inference_datastructures_inference.inference_continuableP(inference_datastructures_strategy.strategy_inference(strategy))) && (NIL != strategy_sets_aside_all_transformationP(strategy))))); } }
//////////////////////////////////////////////////////////////////////////////// // // // AliFemtoCorrFctnDEtaDPhi - A correlation function that analyzes // // two particle correlations with respect to the azimuthal angle (phi) // // and pseudorapidity (eta) difference // // // //////////////////////////////////////////////////////////////////////////////// #include "AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections.h" #include "AliFemtoModelHiddenInfo.h" //#include "AliFemtoHisto.hh" #include <cstdio> #include <TMath.h> #ifdef __ROOT__ ClassImp(AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections) #endif #define PIH 1.57079632679489656 #define PIT 6.28318530717958623 //____________________________ AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections(const char* title, const int& aPhiBins=20, const int& aEtaBins=20): AliFemtoCorrFctn(), fDPhiDEtaNumerator(0), fDPhiDEtaDenominator(0), fDPhiDEtaHiddenNumerator(0), fDPhiDEtaHiddenDenominator(0), fDPhiDEtaHiddenPrimaryNumerator(0), fDPhiDEtaHiddenPrimaryDenominator(0), fDPhiDEtaHiddenSecWeakNumerator(0), fDPhiDEtaHiddenSecWeakDenominator(0), fDPhiDEtaHiddenSecMatNumerator(0), fDPhiDEtaHiddenSecMatDenominator(0), fDPhiDEtaHiddenPrimaryNumeratorData(0), fDPhiDEtaHiddenPrimaryDenominatorData(0), fDPhiDEtaHiddenSecWeakNumeratorData(0), fDPhiDEtaHiddenSecWeakDenominatorData(0), fDPhiDEtaHiddenSecMatNumeratorData(0), fDPhiDEtaHiddenSecMatDenominatorData(0), fphiL(0), fphiT(0), fEtaBins(0), fPhiBins(0), ftitle(title), fReadHiddenInfo(false) { fphiL = (-(int)(aPhiBins/4)+0.5)*2.*TMath::Pi()/aPhiBins; fphiT = 2*TMath::Pi()+(-(int)(aPhiBins/4)+0.5)*2.*TMath::Pi()/aPhiBins; fEtaBins = aEtaBins; fPhiBins = aPhiBins; // set up numerator char tTitNumD[101] = "NumDPhiDEta"; strncat(tTitNumD,title, 100); fDPhiDEtaNumerator = new TH2D(tTitNumD,title,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitDenD[101] = "DenDPhiDEta"; strncat(tTitDenD,title, 100); fDPhiDEtaDenominator = new TH2D(tTitDenD,title,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // to enable error bar calculation... fDPhiDEtaNumerator->Sumw2(); fDPhiDEtaDenominator->Sumw2(); } //____________________________ AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections(const AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections& aCorrFctn) : AliFemtoCorrFctn(), fDPhiDEtaNumerator(0), fDPhiDEtaDenominator(0), fDPhiDEtaHiddenNumerator(0), fDPhiDEtaHiddenDenominator(0), fDPhiDEtaHiddenPrimaryNumerator(0), fDPhiDEtaHiddenPrimaryDenominator(0), fDPhiDEtaHiddenSecWeakNumerator(0), fDPhiDEtaHiddenSecWeakDenominator(0), fDPhiDEtaHiddenSecMatNumerator(0), fDPhiDEtaHiddenSecMatDenominator(0), fDPhiDEtaHiddenPrimaryNumeratorData(0), fDPhiDEtaHiddenPrimaryDenominatorData(0), fDPhiDEtaHiddenSecWeakNumeratorData(0), fDPhiDEtaHiddenSecWeakDenominatorData(0), fDPhiDEtaHiddenSecMatNumeratorData(0), fDPhiDEtaHiddenSecMatDenominatorData(0), fphiL(0), fphiT(0), fEtaBins(0), fPhiBins(0), ftitle(aCorrFctn.ftitle), fReadHiddenInfo(false) { fEtaBins = aCorrFctn.fEtaBins; fPhiBins = aCorrFctn.fPhiBins; // copy constructor if (aCorrFctn.fDPhiDEtaNumerator) fDPhiDEtaNumerator = new TH2D(*aCorrFctn.fDPhiDEtaNumerator); else fDPhiDEtaNumerator = 0; if (aCorrFctn.fDPhiDEtaDenominator) fDPhiDEtaDenominator = new TH2D(*aCorrFctn.fDPhiDEtaDenominator); else fDPhiDEtaDenominator = 0; fReadHiddenInfo = aCorrFctn.fReadHiddenInfo; if(fReadHiddenInfo) { if (aCorrFctn.fDPhiDEtaHiddenNumerator) fDPhiDEtaHiddenNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenNumerator); else fDPhiDEtaHiddenNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenDenominator) fDPhiDEtaHiddenDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenDenominator); else fDPhiDEtaHiddenDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryNumerator) fDPhiDEtaHiddenPrimaryNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryNumerator); else fDPhiDEtaHiddenPrimaryNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryDenominator) fDPhiDEtaHiddenPrimaryDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryDenominator); else fDPhiDEtaHiddenPrimaryDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakNumerator) fDPhiDEtaHiddenSecWeakNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakNumerator); else fDPhiDEtaHiddenSecWeakNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakDenominator) fDPhiDEtaHiddenSecWeakDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakDenominator); else fDPhiDEtaHiddenSecWeakDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatNumerator) fDPhiDEtaHiddenSecMatNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatNumerator); else fDPhiDEtaHiddenSecMatNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatDenominator) fDPhiDEtaHiddenSecMatDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatDenominator); else fDPhiDEtaHiddenSecMatDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryNumeratorData) fDPhiDEtaHiddenPrimaryNumeratorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryNumeratorData); else fDPhiDEtaHiddenPrimaryNumeratorData = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryDenominatorData) fDPhiDEtaHiddenPrimaryDenominatorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryDenominatorData); else fDPhiDEtaHiddenPrimaryDenominatorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakNumeratorData) fDPhiDEtaHiddenSecWeakNumeratorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakNumeratorData); else fDPhiDEtaHiddenSecWeakNumeratorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakDenominatorData) fDPhiDEtaHiddenSecWeakDenominatorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakDenominatorData); else fDPhiDEtaHiddenSecWeakDenominatorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatNumeratorData) fDPhiDEtaHiddenSecMatNumeratorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatNumeratorData); else fDPhiDEtaHiddenSecMatNumeratorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatDenominatorData) fDPhiDEtaHiddenSecMatDenominatorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatDenominatorData); else fDPhiDEtaHiddenSecMatDenominatorData = 0; } } //____________________________ AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::~AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections(){ // destructor delete fDPhiDEtaNumerator; delete fDPhiDEtaDenominator; if(fReadHiddenInfo) { delete fDPhiDEtaHiddenNumerator; delete fDPhiDEtaHiddenDenominator; delete fDPhiDEtaHiddenPrimaryNumerator; delete fDPhiDEtaHiddenPrimaryDenominator; delete fDPhiDEtaHiddenSecWeakNumerator; delete fDPhiDEtaHiddenSecWeakDenominator; delete fDPhiDEtaHiddenSecMatNumerator; delete fDPhiDEtaHiddenSecMatDenominator; delete fDPhiDEtaHiddenPrimaryNumeratorData; delete fDPhiDEtaHiddenPrimaryDenominatorData; delete fDPhiDEtaHiddenSecWeakNumeratorData; delete fDPhiDEtaHiddenSecWeakDenominatorData; delete fDPhiDEtaHiddenSecMatNumeratorData; delete fDPhiDEtaHiddenSecMatDenominatorData; } } //_________________________ AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections& AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::operator=(const AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections& aCorrFctn) { // assignment operator if (this == &aCorrFctn) return *this; fEtaBins = aCorrFctn.fEtaBins; fPhiBins = aCorrFctn.fPhiBins; ftitle = aCorrFctn.ftitle; if (aCorrFctn.fDPhiDEtaNumerator) fDPhiDEtaNumerator = new TH2D(*aCorrFctn.fDPhiDEtaNumerator); else fDPhiDEtaNumerator = 0; if (aCorrFctn.fDPhiDEtaDenominator) fDPhiDEtaDenominator = new TH2D(*aCorrFctn.fDPhiDEtaDenominator); else fDPhiDEtaDenominator = 0; fReadHiddenInfo = aCorrFctn.fReadHiddenInfo; if(fReadHiddenInfo) { if (aCorrFctn.fDPhiDEtaHiddenNumerator) fDPhiDEtaHiddenNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenNumerator); else fDPhiDEtaHiddenNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenDenominator) fDPhiDEtaHiddenDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenDenominator); else fDPhiDEtaHiddenDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryNumerator) fDPhiDEtaHiddenPrimaryNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryNumerator); else fDPhiDEtaHiddenPrimaryNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryDenominator) fDPhiDEtaHiddenPrimaryDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryDenominator); else fDPhiDEtaHiddenPrimaryDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakNumerator) fDPhiDEtaHiddenSecWeakNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakNumerator); else fDPhiDEtaHiddenSecWeakNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakDenominator) fDPhiDEtaHiddenSecWeakDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakDenominator); else fDPhiDEtaHiddenSecWeakDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatNumerator) fDPhiDEtaHiddenSecMatNumerator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatNumerator); else fDPhiDEtaHiddenSecMatNumerator = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatDenominator) fDPhiDEtaHiddenSecMatDenominator = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatDenominator); else fDPhiDEtaHiddenSecMatDenominator = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryNumeratorData) fDPhiDEtaHiddenPrimaryNumeratorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryNumeratorData); else fDPhiDEtaHiddenPrimaryNumeratorData = 0; if (aCorrFctn.fDPhiDEtaHiddenPrimaryDenominatorData) fDPhiDEtaHiddenPrimaryDenominatorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenPrimaryDenominatorData); else fDPhiDEtaHiddenPrimaryDenominatorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakNumeratorData) fDPhiDEtaHiddenSecWeakNumeratorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakNumeratorData); else fDPhiDEtaHiddenSecWeakNumeratorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecWeakDenominatorData) fDPhiDEtaHiddenSecWeakDenominatorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecWeakDenominatorData); else fDPhiDEtaHiddenSecWeakDenominatorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatNumeratorData) fDPhiDEtaHiddenSecMatNumeratorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatNumeratorData); else fDPhiDEtaHiddenSecMatNumeratorData = 0; if (aCorrFctn.fDPhiDEtaHiddenSecMatDenominatorData) fDPhiDEtaHiddenSecMatDenominatorData = new TH2D(*aCorrFctn.fDPhiDEtaHiddenSecMatDenominatorData); else fDPhiDEtaHiddenSecMatDenominatorData = 0; } return *this; } //_________________________ void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::Finish(){ // here is where we should normalize, fit, etc... // we should NOT Draw() the histos (as I had done it below), // since we want to insulate ourselves from root at this level // of the code. Do it instead at root command line with browser. // mShareNumerator->Draw(); //mShareDenominator->Draw(); //mRatio->Draw(); } //____________________________ AliFemtoString AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::Report(){ // create report string stemp = "TPC Ncls Correlation Function Report:\n"; char ctemp[100]; snprintf(ctemp , 100, "Number of entries in numerator:\t%E\n",fDPhiDEtaNumerator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in denominator:\t%E\n",fDPhiDEtaDenominator->GetEntries()); stemp += ctemp; if(fReadHiddenInfo) { snprintf(ctemp , 100, "Number of entries in hidden numerator:\t%E\n",fDPhiDEtaHiddenNumerator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden denominator:\t%E\n",fDPhiDEtaHiddenDenominator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden primary numerator:\t%E\n",fDPhiDEtaHiddenPrimaryNumerator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden primary denominator:\t%E\n",fDPhiDEtaHiddenPrimaryDenominator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. weak numerator:\t%E\n",fDPhiDEtaHiddenSecWeakNumerator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. weak denominator:\t%E\n",fDPhiDEtaHiddenSecWeakDenominator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. material numerator:\t%E\n",fDPhiDEtaHiddenSecMatNumerator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. material denominator:\t%E\n",fDPhiDEtaHiddenSecMatDenominator->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden primary numerator data:\t%E\n",fDPhiDEtaHiddenPrimaryNumeratorData->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden primary denominator data:\t%E\n",fDPhiDEtaHiddenPrimaryDenominatorData->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. weak numerator data:\t%E\n",fDPhiDEtaHiddenSecWeakNumeratorData->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. weak denominator data:\t%E\n",fDPhiDEtaHiddenSecWeakDenominatorData->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. material numerator data:\t%E\n",fDPhiDEtaHiddenSecMatNumeratorData->GetEntries()); stemp += ctemp; snprintf(ctemp , 100, "Number of entries in hidden second. material denominator data:\t%E\n",fDPhiDEtaHiddenSecMatDenominatorData->GetEntries()); stemp += ctemp; } // stemp += mCoulombWeight->Report(); AliFemtoString returnThis = stemp; return returnThis; } //____________________________ void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::AddRealPair( AliFemtoPair* pair){ // add real (effect) pair if (fPairCut && !fPairCut->Pass(pair)) { return; } double phi1 = pair->Track1()->FourMomentum().Phi(); double phi2 = pair->Track2()->FourMomentum().Phi(); double eta1 = pair->Track1()->FourMomentum().PseudoRapidity(); double eta2 = pair->Track2()->FourMomentum().PseudoRapidity(); double dphi = phi1 - phi2; while (dphi<fphiL) dphi+=PIT; while (dphi>fphiT) dphi-=PIT; double deta = eta1 - eta2; float weight = 1; if(pair->Track1()->Track()){ if(part1==kPion) weight = pair->Track1()->Track()->CorrectionPion(); else if(part1==kKaon) weight = pair->Track1()->Track()->CorrectionKaon(); else if(part1==kProton) weight = pair->Track1()->Track()->CorrectionProton(); else if(part1==kPionMinus) weight = pair->Track1()->Track()->CorrectionPionMinus(); else if(part1==kKaonMinus) weight = pair->Track1()->Track()->CorrectionKaonMinus(); else if(part1==kProtonMinus) weight = pair->Track1()->Track()->CorrectionProtonMinus(); else if(part1==kAll) weight = pair->Track1()->Track()->CorrectionAll(); } if(pair->Track1()->V0()){ if(part1==kLambda) weight = pair->Track1()->V0()->CorrectionLambda(); if(part1==kLambdaMinus) weight = pair->Track1()->V0()->CorrectionLambdaMinus(); } if(pair->Track2()->Track()){ if(part2==kPion) weight *= pair->Track2()->Track()->CorrectionPion(); else if(part2==kKaon) weight *= pair->Track2()->Track()->CorrectionKaon(); else if(part2==kProton) weight *= pair->Track2()->Track()->CorrectionProton(); else if(part2==kPionMinus) weight *= pair->Track2()->Track()->CorrectionPionMinus(); else if(part2==kKaonMinus) weight *= pair->Track2()->Track()->CorrectionKaonMinus(); else if(part2==kProtonMinus) weight *= pair->Track2()->Track()->CorrectionProtonMinus(); else if(part2==kAll) weight *= pair->Track2()->Track()->CorrectionAll(); } if(pair->Track2()->V0()){ if(part2==kLambda) weight *= pair->Track2()->V0()->CorrectionLambda(); if(part2==kLambdaMinus) weight *= pair->Track2()->V0()->CorrectionLambdaMinus(); } fDPhiDEtaNumerator->Fill(dphi, deta, weight); if(fReadHiddenInfo) { AliFemtoModelHiddenInfo* hInfo1 = 0; AliFemtoModelHiddenInfo* hInfo2 = 0; if(pair->Track1()->Track()) { hInfo1 = (AliFemtoModelHiddenInfo*)pair->Track1()->Track()->GetHiddenInfo(); } if(pair->Track1()->V0()) { hInfo1 = (AliFemtoModelHiddenInfo*)pair->Track1()->V0()->GetHiddenInfo(); } if(pair->Track2()->Track()) { hInfo2 = (AliFemtoModelHiddenInfo*)pair->Track2()->Track()->GetHiddenInfo(); } if(pair->Track2()->V0()) { hInfo2 = (AliFemtoModelHiddenInfo*)pair->Track2()->V0()->GetHiddenInfo(); } if(hInfo1 && hInfo2) { AliFemtoThreeVector *v1 = hInfo1->GetTrueMomentum(); AliFemtoThreeVector *v2 = hInfo2->GetTrueMomentum(); double hphi1 = v1->Phi(); double hphi2 = v2->Phi(); double heta1 = v1->PseudoRapidity(); double heta2 = v2->PseudoRapidity(); double dhphi = hphi1 - hphi2; while (dhphi<fphiL) dhphi+=PIT; while (dhphi>fphiT) dhphi-=PIT; double dheta = heta1 - heta2; fDPhiDEtaHiddenNumerator->Fill(dhphi, dheta,weight); if(hInfo1->GetOrigin()==0 && hInfo2->GetOrigin()==0) { fDPhiDEtaHiddenPrimaryNumerator->Fill(dhphi,dheta,weight); fDPhiDEtaHiddenPrimaryNumeratorData->Fill(dphi,deta,weight); } else if(hInfo1->GetOrigin()==1 || hInfo2->GetOrigin()==1) { fDPhiDEtaHiddenSecWeakNumerator->Fill(dhphi,dheta,weight); fDPhiDEtaHiddenSecWeakNumeratorData->Fill(dphi,deta,weight); } else if(hInfo1->GetOrigin()==2 || hInfo2->GetOrigin()==2) { fDPhiDEtaHiddenSecMatNumerator->Fill(dhphi,dheta,weight); fDPhiDEtaHiddenSecMatNumeratorData->Fill(dphi,deta,weight); } } } } //____________________________ void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::AddMixedPair( AliFemtoPair* pair){ // add mixed (background) pair if (fPairCut && !fPairCut->Pass(pair)) { return; } double phi1 = pair->Track1()->FourMomentum().Phi(); double phi2 = pair->Track2()->FourMomentum().Phi(); double eta1 = pair->Track1()->FourMomentum().PseudoRapidity(); double eta2 = pair->Track2()->FourMomentum().PseudoRapidity(); double dphi = phi1 - phi2; while (dphi<fphiL) dphi+=PIT; while (dphi>fphiT) dphi-=PIT; double deta = eta1 - eta2; float weight = 1; if(pair->Track1()->Track()){ if(part1==kPion) weight = pair->Track1()->Track()->CorrectionPion(); else if(part1==kKaon) weight = pair->Track1()->Track()->CorrectionKaon(); else if(part1==kProton) weight = pair->Track1()->Track()->CorrectionProton(); else if(part1==kPionMinus) weight = pair->Track1()->Track()->CorrectionPionMinus(); else if(part1==kKaonMinus) weight = pair->Track1()->Track()->CorrectionKaonMinus(); else if(part1==kProtonMinus) weight = pair->Track1()->Track()->CorrectionProtonMinus(); else if(part1==kAll) weight = pair->Track1()->Track()->CorrectionAll(); } if(pair->Track1()->V0()){ if(part1==kLambda) weight = pair->Track1()->V0()->CorrectionLambda(); else if(part1==kLambdaMinus) weight = pair->Track1()->V0()->CorrectionLambdaMinus(); } if(pair->Track2()->Track()){ if(part2==kPion) weight *= pair->Track2()->Track()->CorrectionPion(); else if(part2==kKaon) weight *= pair->Track2()->Track()->CorrectionKaon(); else if(part2==kProton) weight *= pair->Track2()->Track()->CorrectionProton(); else if(part2==kPionMinus) weight *= pair->Track2()->Track()->CorrectionPionMinus(); else if(part2==kKaonMinus) weight *= pair->Track2()->Track()->CorrectionKaonMinus(); else if(part2==kProtonMinus) weight *= pair->Track2()->Track()->CorrectionProtonMinus(); else if(part2==kAll) weight *= pair->Track2()->Track()->CorrectionAll(); } if(pair->Track2()->V0()){ if(part2==kLambda) weight *= pair->Track2()->V0()->CorrectionLambda(); else if(part2==kLambdaMinus) weight *= pair->Track2()->V0()->CorrectionLambdaMinus(); } if(pair->Track2()->Xi()){ if(part2==kXiMinus) weight *= pair->Track2()->Xi()->CorrectionXiMinus(); else if(part2==kXiPlus) weight *= pair->Track2()->Xi()->CorrectionXiPlus(); } fDPhiDEtaDenominator->Fill(dphi, deta, weight); if(fReadHiddenInfo) { AliFemtoModelHiddenInfo* hInfo1 = 0; AliFemtoModelHiddenInfo* hInfo2 = 0; if(pair->Track1()->Track()) { hInfo1 = (AliFemtoModelHiddenInfo*)pair->Track1()->Track()->GetHiddenInfo(); } if(pair->Track1()->V0()) { hInfo1 = (AliFemtoModelHiddenInfo*)pair->Track1()->V0()->GetHiddenInfo(); } if(pair->Track2()->Track()) { hInfo2 = (AliFemtoModelHiddenInfo*)pair->Track2()->Track()->GetHiddenInfo(); } if(pair->Track2()->V0()) { hInfo2 = (AliFemtoModelHiddenInfo*)pair->Track2()->V0()->GetHiddenInfo(); } if(hInfo1 && hInfo2) { AliFemtoThreeVector *v1 = hInfo1->GetTrueMomentum(); AliFemtoThreeVector *v2 = hInfo2->GetTrueMomentum(); double hphi1 = v1->Phi(); double hphi2 = v2->Phi(); double heta1 = v1->PseudoRapidity(); double heta2 = v2->PseudoRapidity(); double dhphi = hphi1 - hphi2; while (dhphi<fphiL) dhphi+=PIT; while (dhphi>fphiT) dhphi-=PIT; double dheta = heta1 - heta2; fDPhiDEtaHiddenDenominator->Fill(dhphi, dheta,weight); if(hInfo1->GetOrigin()==0 && hInfo2->GetOrigin()==0) { fDPhiDEtaHiddenPrimaryDenominator->Fill(dhphi,dheta,weight); fDPhiDEtaHiddenPrimaryDenominatorData->Fill(dphi,deta,weight); } else if(hInfo1->GetOrigin()==1 || hInfo2->GetOrigin()==1) { fDPhiDEtaHiddenSecWeakDenominator->Fill(dhphi,dheta,weight); fDPhiDEtaHiddenSecWeakDenominatorData->Fill(dphi,deta,weight); } else if(hInfo1->GetOrigin()==2 || hInfo2->GetOrigin()==2) { fDPhiDEtaHiddenSecMatDenominator->Fill(dhphi,dheta,weight); fDPhiDEtaHiddenSecMatDenominator->Fill(dphi,deta,weight); } } } } void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::WriteHistos() { // Write out result histograms fDPhiDEtaNumerator->Write(); fDPhiDEtaDenominator->Write(); if(fReadHiddenInfo) { fDPhiDEtaHiddenNumerator->Write(); fDPhiDEtaHiddenDenominator->Write(); fDPhiDEtaHiddenPrimaryNumerator->Write(); fDPhiDEtaHiddenPrimaryDenominator->Write(); fDPhiDEtaHiddenSecWeakNumerator->Write(); fDPhiDEtaHiddenSecWeakDenominator->Write(); fDPhiDEtaHiddenSecMatNumerator->Write(); fDPhiDEtaHiddenSecMatDenominator->Write(); fDPhiDEtaHiddenPrimaryNumeratorData->Write(); fDPhiDEtaHiddenPrimaryDenominatorData->Write(); fDPhiDEtaHiddenSecWeakNumeratorData->Write(); fDPhiDEtaHiddenSecWeakDenominatorData->Write(); fDPhiDEtaHiddenSecMatNumeratorData->Write(); fDPhiDEtaHiddenSecMatDenominatorData->Write(); } } TList* AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::GetOutputList() { // Prepare the list of objects to be written to the output TList *tOutputList = new TList(); tOutputList->Add(fDPhiDEtaNumerator); tOutputList->Add(fDPhiDEtaDenominator); if(fReadHiddenInfo) { tOutputList->Add(fDPhiDEtaHiddenNumerator); tOutputList->Add(fDPhiDEtaHiddenDenominator); tOutputList->Add(fDPhiDEtaHiddenPrimaryNumerator); tOutputList->Add(fDPhiDEtaHiddenPrimaryDenominator); tOutputList->Add(fDPhiDEtaHiddenSecWeakNumerator); tOutputList->Add(fDPhiDEtaHiddenSecWeakDenominator); tOutputList->Add(fDPhiDEtaHiddenSecMatNumerator); tOutputList->Add(fDPhiDEtaHiddenSecMatDenominator); tOutputList->Add(fDPhiDEtaHiddenPrimaryNumeratorData); tOutputList->Add(fDPhiDEtaHiddenPrimaryDenominatorData); tOutputList->Add(fDPhiDEtaHiddenSecWeakNumeratorData); tOutputList->Add(fDPhiDEtaHiddenSecWeakDenominatorData); tOutputList->Add(fDPhiDEtaHiddenSecMatNumeratorData); tOutputList->Add(fDPhiDEtaHiddenSecMatDenominatorData); } return tOutputList; } void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::SetParticleTypes(ParticleType partType1, ParticleType partType2) { part1=partType1; part2=partType2; } void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::SetParticle1Type(ParticleType partType) { part1=partType; } void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::SetParticle2Type(ParticleType partType) { part2=partType; } void AliFemtoCorrFctnDEtaDPhiSimpleWithCorrections::SetReadHiddenInfo(bool read) { fReadHiddenInfo = read; int aEtaBins = fEtaBins; int aPhiBins = fPhiBins; // set up numerator char tTitHNumD[101] = "NumDPhiDEtaHidden"; strncat(tTitHNumD,ftitle, 100); fDPhiDEtaHiddenNumerator = new TH2D(tTitHNumD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHDenD[101] = "DenDPhiDEtaHidden"; strncat(tTitHDenD,ftitle, 100); fDPhiDEtaHiddenDenominator = new TH2D(tTitHDenD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up numerator char tTitHPrimNumD[101] = "NumDPhiDEtaHiddenPrimary"; strncat(tTitHPrimNumD,ftitle, 100); fDPhiDEtaHiddenPrimaryNumerator = new TH2D(tTitHPrimNumD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHPrimDenD[101] = "DenDPhiDEtaHiddenPrimary"; strncat(tTitHPrimDenD,ftitle, 100); fDPhiDEtaHiddenPrimaryDenominator = new TH2D(tTitHPrimDenD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up numerator char tTitHSWNumD[101] = "NumDPhiDEtaHiddenSecWeak"; strncat(tTitHSWNumD,ftitle, 100); fDPhiDEtaHiddenSecWeakNumerator = new TH2D(tTitHSWNumD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHSWDenD[101] = "DenDPhiDEtaHiddenSecWeak"; strncat(tTitHSWDenD,ftitle, 100); fDPhiDEtaHiddenSecWeakDenominator = new TH2D(tTitHSWDenD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up numerator char tTitHSMNumD[101] = "NumDPhiDEtaHiddenSecMat"; strncat(tTitHSMNumD,ftitle, 100); fDPhiDEtaHiddenSecMatNumerator = new TH2D(tTitHSMNumD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHSMDenD[101] = "DenDPhiDEtaHiddenSecMat"; strncat(tTitHSMDenD,ftitle, 100); fDPhiDEtaHiddenSecMatDenominator = new TH2D(tTitHSMDenD,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up numerator char tTitHPrimNumDData[101] = "NumDPhiDEtaHiddenPrimaryData"; strncat(tTitHPrimNumDData,ftitle, 100); fDPhiDEtaHiddenPrimaryNumeratorData = new TH2D(tTitHPrimNumDData,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHPrimDenDData[101] = "DenDPhiDEtaHiddenPrimaryData"; strncat(tTitHPrimDenDData,ftitle, 100); fDPhiDEtaHiddenPrimaryDenominatorData = new TH2D(tTitHPrimDenDData,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up numerator char tTitHSWNumDData[101] = "NumDPhiDEtaHiddenSecWeakData"; strncat(tTitHSWNumDData,ftitle, 100); fDPhiDEtaHiddenSecWeakNumeratorData = new TH2D(tTitHSWNumDData,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHSWDenDData[101] = "DenDPhiDEtaHiddenSecWeakData"; strncat(tTitHSWDenDData,ftitle, 100); fDPhiDEtaHiddenSecWeakDenominatorData = new TH2D(tTitHSWDenDData,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up numerator char tTitHSMNumDData[101] = "NumDPhiDEtaHiddenSecMatData"; strncat(tTitHSMNumDData,ftitle, 100); fDPhiDEtaHiddenSecMatNumeratorData = new TH2D(tTitHSMNumDData,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); // set up denominator char tTitHSMDenDData[101] = "DenDPhiDEtaHiddenSecMatData"; strncat(tTitHSMDenDData,ftitle, 100); fDPhiDEtaHiddenSecMatDenominatorData = new TH2D(tTitHSMDenDData,ftitle,aPhiBins,fphiL,fphiT,aEtaBins,-2.0,2.0); fDPhiDEtaHiddenNumerator->Sumw2(); fDPhiDEtaHiddenDenominator->Sumw2(); fDPhiDEtaHiddenPrimaryNumerator->Sumw2(); fDPhiDEtaHiddenPrimaryDenominator->Sumw2(); fDPhiDEtaHiddenSecWeakNumerator->Sumw2(); fDPhiDEtaHiddenSecWeakDenominator->Sumw2(); fDPhiDEtaHiddenSecMatNumerator->Sumw2(); fDPhiDEtaHiddenSecMatDenominator->Sumw2(); fDPhiDEtaHiddenPrimaryNumeratorData->Sumw2(); fDPhiDEtaHiddenPrimaryDenominatorData->Sumw2(); fDPhiDEtaHiddenSecWeakNumeratorData->Sumw2(); fDPhiDEtaHiddenSecWeakDenominatorData->Sumw2(); fDPhiDEtaHiddenSecMatNumeratorData->Sumw2(); fDPhiDEtaHiddenSecMatDenominatorData->Sumw2(); }
// Make sure that NCM VERSION_MISMATCH-es are retried TEST_F(NodeRegistrationHandlerTest, testRetryOnVersionMismatch) { auto settings = buildServerSettings("node1"); auto admin_settings = buildAdminServerSettings("node1"); auto store = std::make_shared<MockNodesConfigurationStore>(); testing::InSequence seq; EXPECT_CALL( *store, updateConfigSync( _, VersionedConfigStore::Condition(MembershipVersion::Type(0)), _, _)) .WillOnce(testing::Return(Status::VERSION_MISMATCH)); EXPECT_CALL(*store, getConfigSync(_, _)) .WillOnce(testing::Invoke([](auto* value_out, auto) { auto nc = NodesConfiguration{}.withVersion(MembershipVersion::Type(1)); if (value_out) { *value_out = NodesConfigurationCodec::serialize(std::move(*nc)); } return Status::OK; })); EXPECT_CALL( *store, updateConfigSync( _, VersionedConfigStore::Condition(MembershipVersion::Type(1)), _, _)) .WillOnce( testing::Invoke([](auto, auto, auto* version_out, auto* value_out) { auto version = MembershipVersion::Type(2); auto nc = NodesConfiguration{}.withVersion(version); if (version_out) { *version_out = version; } if (value_out) { *value_out = NodesConfigurationCodec::serialize(std::move(*nc)); } return Status::VERSION_MISMATCH; })); EXPECT_CALL( *store, updateConfigSync( _, VersionedConfigStore::Condition(MembershipVersion::Type(2)), _, _)) .WillOnce(testing::Return(Status::OK)); auto updateable_nc = std::make_shared<UpdateableNodesConfiguration>(); updateable_nc->update(std::make_shared<NodesConfiguration>()); NodeRegistrationHandler handler{ settings, admin_settings, updateable_nc, store}; auto res = handler.registerSelf(NodeIndicesAllocator{}); ASSERT_TRUE(res.hasValue()); ASSERT_EQ(MembershipVersion::Type(2), updateable_nc->get()->getVersion()); }
The four Guy children gathered last week for a Thanksgiving feast at their parents’ tidy, two-story home, situated on the corner lot of Goldenview Lane inside a manicured neighborhood in Knoxville, Tenn. It would be the family’s last holiday together in that house, for in two weeks, Joel Guy Sr., and his wife, Lisa, planned to move to his late mother’s mountain home 90 miles away in Surgoinsville. Family members there told the Kingsport Times-News that they were looking forward to a Christmas reunion. The Guys said their post-Thanksgiving goodbyes first to their three daughters, who all live in Tennessee, and planned to send-off their son, Joel Michael Guy Jr., 28, on Friday. He’d been living in Baton Rouge, for nearly a decade, and his parents financially supported him. After Thanksgiving, authorities say, they’d planned to tell him they were cutting him off. Louisiana authorities have arrested a man accused of stabbing his parents to death, dismembering their bodies, and trying to dissolve them in acid. (Reuters) It’s still unclear what happened next, whether the Guys ever had a chance to deliver their message. What authorities do know is that the 28-year-old stayed in Tennessee three days longer than he’d planned, and that by Sunday afternoon, his parents’ home had been turned into what police called a “horrific” and “very gruesome” crime scene. During a welfare check Monday, after Lisa Guy’s employer told police that she didn’t show up for work, authorities entered the home and discovered a barking dog locked in an upstairs room and the remains of Joel Sr. and Lisa Guy scattered throughout the house, their dismembered body parts resting in a homemade acidic solution concocted to erase evidence of the crimes that took place there. There were signs of a struggle, authorities said, and the scene was so toxic and spread out that it took investigators in hazmat suits all of Monday and Tuesday to process the evidence. Kingsport native and wife brutally murdered; son arrestedhttps://t.co/4fYMvS2XAw — Times-News Online (@timesnewsonline) November 30, 2016 On Tuesday night, Joel Guy Jr. was arrested outside his Baton Rouge apartment and charged with first-degree murder, the Knox County Sheriff’s Office announced at a news conference Wednesday. Authorities think he worked alone, killing his parents sometime between Friday evening and Saturday afternoon. He stayed in the house with their remains until Sunday, when he drove his car to Baton Rouge. At the news conference, Maj. Michael MacLean with the sheriff’s department called the crimes “very, very rare.” “It’s in the one percentile of homicides in the United States that involve mutilation or dismemberment. It’s not something we run across,” MacLean said. “Usually there’s a motivation behind it, in this case we just don’t know what that is.” Many unknowns still remain after Baton Rouge man kills, dismembers parents over Thanksgiving in Tennessee https://t.co/hFw2YFX46x pic.twitter.com/v8P155mp26 — The Advocate (@theadvocatebr) December 1, 2016 Although family members told authorities about the elder Guys’ plans to scale back their son’s financial support, MacLean would not confirm whether that was their son’s motive for allegedly killing and mutilating them. Authorities consider it a possible motive, MacLean said at the news conference, but Joel Guy Jr. has been unwilling to speak with police since they took him into custody Tuesday night. It’s unclear, but unlikely, that the son was a beneficiary of his parents’ life insurance policy, MacLean said. The sisters told police that nothing seemed amiss with their brother during Thanksgiving, and that there were no family disputes that day. MacLean said that authorities are not aware of any history of mental illness and that the suspect has no criminal record. [3 dead, 5 ill in Calif. after Thanksgiving charity dinner, health officials say] Although the acidic solution the victims were found in — a corrosive cocktail of liquid fire, hydrogen peroxide, sewer cleaner and bleach — caused them to “suffer excessive decomposition,” MacLean said investigators were still able to identify them. It did, however, make it difficult to determine the cause of death. The elder Guys suffered multiple “vicious” stabs wounds, law enforcement determined, and were possibly tortured. Police waited two days to tell the public about the gruesome killings and the arrest of Joel Guy Jr. because they said they didn’t want to tip the son off that he might be a suspect. MacLean described the 28-year-old as a college student in Baton Rouge. The sheriff’s office told the Advocate that the suspect attended Louisiana State University at one point before withdrawing last year, but an LSU spokesman would not confirm that when the newspaper asked. “I have no information to provide about that individual,” said Ernie Ballard, a spokesman. Neighbors told the Advocate that Joel Guy Jr. was reclusive and quiet. He lived alone in his Baton Rouge apartment, but had previously lived with a roommate, the newspaper reported. Rene Charles, the sister of Joel Guy Sr., told the Kingsport Times-News that she couldn’t believe her nephew could be capable of carrying out such brutal killings. “We’re very shocked that he would do something like this,” Charles told the newspaper. “It’s one thing to stab someone, but to do everything that he did, to dismember his parents’ bodies?” She said the family was looking forward to the end of December, when they’d all gather for the holidays. “We were going to have Christmases together again,” she told the Times-News. “We were just fixing to have all of us back together again.” More from Morning Mix: Tacoma, Wash., police officer shot dead. Suspect killed after 12-hour standoff ends in gunfire. ‘Ailing’ Buzz Aldrin, second man on the moon, evacuated from the South Pole after becoming ill ‘Vectors! Vectors!’: Pilot in soccer team tragedy reported fuel emergency in moments before crash
// TODO: Write a unit test for testing multi-client user handling func TestServer_Serve(t *testing.T) { type fields struct { address string port int } tests := []struct { name string clientCount int fields fields }{ {"local", 1, fields{"127.0.0.1", 8080}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { s := &Server{ Address: tt.fields.address, Port: tt.fields.port, } go s.Serve() addr := s.Address + ":" + strconv.Itoa(s.Port) conn, err := net.Dial("tcp", addr) if err != nil { log.Println("Cannot connect to server") } msg := `GET /a.txt HTTP/1.1\n User-Agent: Mozilla/4.0 (compatible; MSIE5.01; Windows NT)\n Host: www.tutorialspoint.com\n Accept-Language: en-us\n Accept-Encoding: gzip, deflate\n Connection: Keep-Alive\n` conn.Write([]byte(msg)) buf,err := bufio.NewReader(conn).ReadString('\n') if err != nil { fmt.Println("handle me") } fmt.Println(buf) fmt.Println("Client Body:" + buf) conn.Close() fmt.Println("client closed") }) } }