content
stringlengths
10
4.9M
x, y = map(int, input().split()) m = [300000, 200000, 100000, 0] if x > 3: x = 4 if y > 3: y = 4 ans = m[x - 1] + m[y - 1] if ans == 600000: print(ans + 400000) else: print(ans)
// Licensed to Elasticsearch B.V. under one or more contributor // license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright // ownership. Elasticsearch B.V. licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package operation import ( "context" "encoding/json" "errors" "fmt" "net/url" "path" "strconv" "strings" "github.com/imdario/mergo" "go.uber.org/zap" "golang.org/x/sync/errgroup" "golang.org/x/sync/semaphore" bundlev1 "github.com/elastic/harp/api/gen/go/harp/bundle/v1" "github.com/elastic/harp/pkg/bundle/secret" "github.com/elastic/harp/pkg/sdk/log" "github.com/elastic/harp/pkg/vault/kv" vaultPath "github.com/elastic/harp/pkg/vault/path" ) // Exporter initialize a secret exporter operation func Exporter(service kv.Service, backendPath string, output chan *bundlev1.Package, withMetadata bool, maxWorkerCount int64) Operation { return &exporter{ service: service, path: backendPath, withMetadata: withMetadata, output: output, maxWorkerCount: maxWorkerCount, } } // ----------------------------------------------------------------------------- type exporter struct { service kv.Service path string withMetadata bool output chan *bundlev1.Package maxWorkerCount int64 } // Run the implemented operation //nolint:funlen,gocognit,gocyclo // refactor func (op *exporter) Run(ctx context.Context) error { // Initialize sub context g, gctx := errgroup.WithContext(ctx) // Prepare channels pathChan := make(chan string) // Validate worker count if op.maxWorkerCount < 1 { op.maxWorkerCount = 1 } // Consumers --------------------------------------------------------------- // Secret reader g.Go(func() error { // Initialize a semaphore with maxReaderWorker tokens sem := semaphore.NewWeighted(op.maxWorkerCount) // Reader errGroup gReader, gReaderCtx := errgroup.WithContext(gctx) // Listen for message for secretPath := range pathChan { secPath := secretPath if err := gReaderCtx.Err(); err != nil { // Stop processing break } // Acquire a token if err := sem.Acquire(gReaderCtx, 1); err != nil { return fmt.Errorf("unable to acquire a semaphore token: %w", err) } log.For(gReaderCtx).Debug("Exporting secret ...", zap.String("path", secretPath)) // Build function reader gReader.Go(func() error { // Release token on finish defer sem.Release(1) if err := gReaderCtx.Err(); err != nil { // Context has already an error return nil } // Extract desired version from path vaultPackagePath, vaultVersion, errPackagePath := extractVersion(secPath) if errPackagePath != nil { return fmt.Errorf("unable to parse package path '%s': %w", secPath, errPackagePath) } // Read from Vault secretData, secretMeta, errRead := op.service.ReadVersion(gReaderCtx, vaultPackagePath, vaultVersion) if errRead != nil { // Mask path not found or empty secret value if errors.Is(errRead, kv.ErrNoData) || errors.Is(errRead, kv.ErrPathNotFound) { log.For(gReaderCtx).Debug("No data / path found for given path", zap.String("path", secPath)) return nil } return fmt.Errorf("unexpected vault error: %w", errRead) } // Prepare secret list chain := &bundlev1.SecretChain{ Version: uint32(0), Data: make([]*bundlev1.KV, 0), NextVersion: nil, PreviousVersion: nil, } // Prepare metadata holder metadata := map[string]string{} // Iterate over secret bundle for k, v := range secretData { // Check for old metadata prefix if strings.HasPrefix(strings.ToLower(k), legacyBundleMetadataPrefix) { metadata[strings.ToLower(k)] = fmt.Sprintf("%s", v) // Ignore secret unpacking for this value continue } // Check for new metadata prefix if strings.EqualFold(k, kv.VaultMetadataDataKey) { if rawMetadata, ok := v.(map[string]interface{}); ok { for k, v := range rawMetadata { metadata[k] = fmt.Sprintf("%s", v) } } else { log.For(gReaderCtx).Error("Vault metadata type has unexpected type, processing skipped.", zap.String("path", secPath)) } // Ignore secret unpacking for this value continue } // Pack secret value s, errPack := op.packSecret(k, v) if errPack != nil { return fmt.Errorf("unable to pack secret value for path '%s' with key '%s' : %w", secPath, k, errPack) } // Add secret to package chain.Data = append(chain.Data, s) } // Prepare the secret package pack := &bundlev1.Package{ Labels: map[string]string{}, Annotations: map[string]string{}, Name: vaultPackagePath, Secrets: chain, } // Extract useful metadata for k, v := range secretMeta { switch k { case "version": // Convert version rawVersion := json.Number(fmt.Sprintf("%s", v)) version, err := rawVersion.Int64() if err != nil { log.For(gReaderCtx).Warn("unable to unpack secret version as int64.", zap.Error(err), zap.Any("value", v)) } else { pack.Secrets.Version = uint32(version) } case "custom_metadata": // Copy as metadata customMap, ok := v.(map[string]interface{}) if ok { for k, v := range customMap { metadata[k] = v.(string) } } else { log.For(gReaderCtx).Warn("unable to unpack secret custom metadata, invalid type.", zap.Any("value", v)) } } } // Process package metadata distribution if op.withMetadata { for key, value := range metadata { // Merge with package switch { case strings.HasPrefix(key, "label#"): pack.Labels[strings.TrimPrefix(key, "label#")] = value case strings.HasPrefix(key, legacyBundleMetadataPrefix): // Legacy metadata // Clean key key = strings.TrimPrefix(key, legacyBundleMetadataPrefix) // Unpack value var data map[string]string if errDecode := json.Unmarshal([]byte(value), &data); errDecode != nil { log.For(gReaderCtx).Error("unable to decode package legacy metadata object as JSON", zap.Error(errDecode), zap.String("key", key), zap.String("path", secPath)) continue } var meta interface{} // Merge with package switch key { case "#annotations": meta = &pack.Annotations case "#labels": meta = &pack.Labels default: log.For(gReaderCtx).Warn("unhandled legacy metadata", zap.String("key", key), zap.String("path", secPath)) continue } // Merge with Vault metadata if errMergo := mergo.MergeWithOverwrite(meta, data, mergo.WithOverride); errMergo != nil { log.For(gReaderCtx).Warn("unable to merge package legacy metadata object", zap.Error(errMergo), zap.String("key", key), zap.String("path", secPath)) continue } default: pack.Annotations[key] = value } } } // Publish secret package select { case <-gReaderCtx.Done(): return gReaderCtx.Err() case op.output <- pack: return nil } }) } return gReader.Wait() }) // Producers --------------------------------------------------------------- // Vault crawler g.Go(func() error { defer close(pathChan) return op.walk(gctx, op.path, op.path, pathChan) }) // Wait for all goroutime to complete if err := g.Wait(); err != nil { return fmt.Errorf("vault operation error: %w", err) } // No error return nil } // ----------------------------------------------------------------------------- func (op *exporter) walk(ctx context.Context, basePath, currPath string, keys chan string) error { // List secret of basepath res, err := op.service.List(ctx, basePath) if err != nil { return fmt.Errorf("unable to list secret entries for '%s': %w", basePath, err) } // Check path is a leaf if res == nil { select { case <-ctx.Done(): return ctx.Err() case keys <- currPath: } return nil } // Iterate on all subpath for _, p := range res { if err := op.walk(ctx, path.Join(basePath, p), path.Join(currPath, p), keys); err != nil { return fmt.Errorf("unable to walk '%s' : %w", path.Join(basePath, p), err) } } // No error return nil } func (op *exporter) packSecret(key string, value interface{}) (*bundlev1.KV, error) { // Pack secret value payload, err := secret.Pack(value) if err != nil { return nil, fmt.Errorf("unable to pack secret '%s': %w", key, err) } // Build the secret object return &bundlev1.KV{ Key: key, Type: fmt.Sprintf("%T", value), Value: payload, }, nil } func extractVersion(packagePath string) (mountPath string, backendVersion uint32, err error) { // Check arguments if packagePath == "" { return "", 0, fmt.Errorf("unable to extract path and version from an empty string") } // Looks a little hack-ish for me u, err := url.ParseRequestURI(fmt.Sprintf("harp://bundle/%s", packagePath)) if err != nil { return "", 0, fmt.Errorf("unable to parse package path: %w", err) } // Get version versionRaw := u.Query().Get("version") if versionRaw == "" { // Get latest return vaultPath.SanitizePath(u.Path), 0, nil } // Convert versionUnit, errParse := strconv.ParseUint(versionRaw, 10, 32) if errParse != nil { return "", 0, fmt.Errorf("unable to parse version as a valid integer: %w", err) } // Return path elements return vaultPath.SanitizePath(u.Path), uint32(versionUnit), nil }
<gh_stars>0 import {accuracy} from "../tool/utils"; // 定义一个二维物体的基类 export abstract class QuadraticObject { public vx: number = 0; public vy: number = 0; protected constructor( public x: number = 0, public y: number = 0, public halfW: number = 0, public halfH: number = 0 ) { } setDirection(radian: number, speed: number) { // 拖过物体角度设置方向,角度可选范围0-360度 this.vx = accuracy(Math.cos(radian) * speed); this.vy = accuracy(Math.sin(radian) * speed); } setXY(val: number) { this.x = val; this.y = val; } setHalfWH(val: number) { this.halfW = val; this.halfH = val; } abstract step(...args: any[]): void abstract render(...args: any[]): void }
def expanddoc(f): f.__doc__ = f.__doc__.format(**vars(sys.modules[f.__module__])) return f
package main import ( "math" ) func feature02(arguments map[string]interface{}) map[string]interface{} { parameters := extractParameters(arguments, []string{"K", "G", "D"}) if parameters == nil { return nil } K := getParameter(parameters, "K") G := getParameter(parameters, "G") D := getParameter(parameters, "D") result := make(map[string]interface{}) Rad := math.Pi / 180.0 N := K / 2.0 M := G / 2.0 B := math.Atan((M-N)/D) / Rad U := 1.0 / math.Tan(B*Rad) result["R"] = (1.0 / 2.0) * (M + N) * U result["A"] = B return result }
def isValidInnerHTML(innerHTML: tuple): return all([True if (isinstance(node, HTML_Node) or isinstance(node, HTML_Node_Contentless) or isinstance(node, str)) else False for node in innerHTML])
def expand(self, heuristic): blank = self.state.index('b') children = np.array([]) if blank not in (0, 3, 6): children = np.append(children, [self.moveLeft(blank)], axis=0) if blank not in (2, 5, 8): children = np.append(children, [self.moveRight(blank)], axis=0) if blank not in (0, 1, 2): children = np.append(children, [self.moveUp(blank)], axis=0) if blank not in (6, 7, 8): children = np.append(children, [self.moveDown(blank)], axis=0) for child in children: if heuristic == 1: child.mDist = child.getManhattan() if heuristic == 2: child.misplaced = child.countMisplaced() if heuristic == 3: child.inversions = countInversions(child.state) return children
Differences in Firearm Suicides by Residential Location in Texas, 2006–2015 This study used Texas state vital statistics records, 2006–2015, to examine firearm use rates among 28,010 suicide decedents by residential location (urbanized vs. all others). Firearms were responsible for 44% of all teenage suicides and 76–90% and 50–60% of suicides of men and women aged 60 + years, respectively, and firearm use rates remained steady for both genders during the study period. Logistic regression analysis showed a significantly higher firearm use rate (AOR = 1.35, 95% CI = 1.28–1.42) among decedents who resided in nonurbanized areas. Differences in firearm use rates by residential location likely reflect higher firearm ownership in smaller communities and rural than urban areas. The findings underscore the importance of community- and individual-level suicide prevention strategies.
<filename>spinnaker/api/errors/errors.go package errors import "regexp" var ( pipelineAlreadyExistsRegexp = regexp.MustCompile(`.*A pipeline with name .* already exists.*`) ) // IsPipelineAlreadyExists returns true if the error indicates that a pipeline // already exists. func IsPipelineAlreadyExists(err error) bool { if err == nil { return false } return pipelineAlreadyExistsRegexp.MatchString(err.Error()) }
Cachectin/tumor necrosis factor decreases human adipose tissue lipoprotein lipase mRNA levels, synthesis, and activity. The effects of the cytokine cachectin/tumor necrosis factor (TNF) on human adipose tissue lipoprotein lipase (LPL) were studied. TNF is produced by activated macrophages and is thought to play a role in mediating hypertriglyceridemia and wasting of adipose tissue triglyceride stores (cachexia) that often accompany infection and malignancy. TNF effects were studied in human adipose tissue fragments maintained in organ culture in the presence of insulin and dexamethasone to induce high LPL activity. Addition of TNF to the culture medium for 20 h caused a dose-dependent inhibition of LPL activity to an average of 37% of controls at 50 U/ml TNF. This inhibition of LPL activity was explained by specific decreases in levels of LPL mRNA (to 40% of controls) and rates of LPL synthesis determined by biosynthetic labeling and immunoprecipitation (to 32% of controls). The decline in LPL synthesis was specific, as it occurred despite a small increase in overall protein synthesis in the presence of TNF. Comparable decreases in LPL activity were observed when TNF was added to adipose tissue cultured solely in the presence of insulin. Thus, similar to results in rodent models, TNF is a potent inhibitor of LPL gene expression in human adipose tissue. TNF may therefore play a role in the disorders of triglyceride catabolism and the pathogenesis of cachexia that occur with stimulation of the immune system in humans.
/** * Play the requested Sound. * Will check if the player is not in Quiet Mode and the sound is enabled. * * @param player player * @param soundType sound type */ public void playSound(Player player, SoundType soundType) { if (!PlayerInfo.isQuietMode(player)) { SoundDetails details = soundTypes.get(soundType); if (details != null) { player.playSound(player.getLocation(), details.getSound(), details.getVolume(), details.getPitch()); } } }
<gh_stars>1-10 /* eslint-disable */ /** * @openapi * components: * schemas: * UserInfo: * title: UserInfo * type: object * properties: * id: * type: string * description: The user's id. * example: 238ec288-ffc2-41ad-918b-7aa1f4f855dd * email: * type: string * description: The user's e-mail. * example: <EMAIL> * verified_email: * type: boolean * description: Whether the e-mail has been verified or not. * example: true * name: * type: string * description: The user's full name. * example: <NAME> * given_name: * type: string * description: The user's given name. * example: Niclas * family_name: * type: string * description: The user's family name. * example: Lindstedt * picture: * type: string * description: An url to the user's picture. * example: https://picsum.photos/200 * hd: * type: string * description: The hosted domain of the user. * example: etimo.se */ export type UserInfoDto = { id: string; email: string; verified_email: boolean; name: string; given_name: string; family_name: string; picture: string; locale: string; hd: string; };
def expand_ranges(ranges: Sequence[Sequence[int]], inclusive: bool = False) -> List[int]: values: Set[int] = set() for r in ranges: if len(r) == 2: values.update(range(r[0], r[1] + (1 if inclusive else 0))) elif len(r) == 1: values.add(r[0]) else: raise ValueError(f"Expected 1 or 2 element list for range definition. Got f{len(r)} element list instead.") return sorted(values)
import * as Handlebars from 'handlebars' import { Endpoint, IRefs, MarkdownTemplateData, Properties, Resource, ResponseSchema, } from './types' import { find, flatten, groupBy, isNil, omit, sortBy } from 'ramda' import { readFileSync, writeFileSync } from 'fs' import { OpenAPIV3 } from 'openapi-types' const deepcopy = require('deepcopy') const path = require('path') const CONTENT_TYPE = 'application/json' const METHODS = [ OpenAPIV3.HttpMethods.GET, OpenAPIV3.HttpMethods.PUT, OpenAPIV3.HttpMethods.PATCH, OpenAPIV3.HttpMethods.DELETE, OpenAPIV3.HttpMethods.POST, ] function mergeSchemaObjects(...schemaObjects: OpenAPIV3.SchemaObject[]): OpenAPIV3.SchemaObject { // mergeSchemaObjects merges objects. No resolution is performed: eg allOf or refs -> Just merge return schemaObjects.reduce( (currSchemaObject, schemaObject: OpenAPIV3.SchemaObject): OpenAPIV3.SchemaObject => { return { ...schemaObject, ...currSchemaObject, // add current state after current object: for allOf cases this preserves top level $ref names properties: { ...(currSchemaObject.properties ?? {}), ...(schemaObject.properties ?? {}), }, required: [...(currSchemaObject.required ?? []), ...(schemaObject.required ?? [])], } }, { properties: {}, required: [] } as OpenAPIV3.SchemaObject, ) } function resolveAllOf( schemaObject: OpenAPIV3.SchemaObject, refs: IRefs, depth: number | undefined = undefined, depthCounter: number = 0, ): OpenAPIV3.SchemaObject { if (!schemaObject.allOf) { return schemaObject } return mergeSchemaObjects( omit(['allOf'], schemaObject) as OpenAPIV3.SchemaObject, ...(schemaObject.allOf as OpenAPIV3.SchemaObject[]).map( (childSchemaObject: OpenAPIV3.SchemaObject): OpenAPIV3.SchemaObject => { return resolveSchemaOrReferenceObject( childSchemaObject, refs, depth, depthCounter + 1, ) }, ), ) } function resolveOneOf( schemaObject: OpenAPIV3.SchemaObject, refs: IRefs, depth: number | undefined = undefined, depthCounter: number = 0, ): OpenAPIV3.SchemaObject { if (!schemaObject.oneOf) { return schemaObject } return { ...schemaObject, oneOf: schemaObject.oneOf.map( ( childSchemaObject: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject, ): OpenAPIV3.SchemaObject => resolveSchemaOrReferenceObject(childSchemaObject, refs, depth, depthCounter), ), } } function resolveExampleAllOf( schemaObject: OpenAPIV3.SchemaObject, refs: IRefs, ): Record<string, any> | undefined { if (!schemaObject.allOf) { return undefined } return Object.fromEntries( (schemaObject.allOf as OpenAPIV3.SchemaObject[]).map( (childSchemaObject: OpenAPIV3.SchemaObject) => { const resolvedChildSchemaObject = resolveSchemaOrReferenceObject( childSchemaObject, refs, ) return Object.entries(buildExampleTree(resolvedChildSchemaObject, refs)!) }, ), ) } function resolveExampleOneOf( schemaObject: OpenAPIV3.SchemaObject, refs: IRefs, ): Record<string, any> | undefined { if (!schemaObject.oneOf) { return undefined } // for now, for simplicity, pick the first let childSchemaObject = schemaObject.oneOf[0] childSchemaObject = resolveSchemaOrReferenceObject(childSchemaObject, refs) return buildExampleTree(childSchemaObject, refs) } function resolveProperties( properties: Properties | undefined, refs: IRefs, depth: number | undefined = undefined, depthCounter: number = 0, ): Properties | undefined { if (!properties) { return properties } return Object.keys(properties).reduce((prevProperties: Properties, key: string): Properties => { const childSchemaObject: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject = properties[key] prevProperties[key] = resolveSchemaOrReferenceObject( childSchemaObject, refs, depth, depthCounter + 1, ) return prevProperties }, properties ?? {}) } function resolveExampleProperties( properties: Properties | undefined, refs: IRefs, ): Record<string, any> { if (!properties) { return {} } return Object.keys(properties).reduce((obj, key: string) => { const childSchemaObject = properties[key] const resolvedChildSchemaObject = resolveSchemaOrReferenceObject(childSchemaObject, refs) obj[key] = buildExampleTree(resolvedChildSchemaObject, refs) return obj }, {} as Record<string, any>) } function resolveExampleAdditionalProperties( additionalProperties: boolean | OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject | undefined, ): Record<string, any> { if (!additionalProperties) { return {} } // this is not a correct translation, but let's go with it for now return { property1: 'string', property2: 'number', } } function resolveRef( schemaOrRefObject: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject, refs: IRefs, ): OpenAPIV3.SchemaObject & { ref?: string } { const refObject = schemaOrRefObject as OpenAPIV3.ReferenceObject if (!refObject.$ref) { return deepcopy(schemaOrRefObject as OpenAPIV3.SchemaObject) // make a deepcopy for safety } const schemaObject = refs.get(refObject.$ref) as OpenAPIV3.SchemaObject // keep reference in `ref` property if it has been resolved return deepcopy({ ...schemaObject, ref: refObject.$ref }) // make a deep copy for safety } function resolveSchemaOrReferenceObject( schemaObject: Readonly<OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject>, refs: IRefs, depth: number | undefined = undefined, depthCounter: number = 0, ): OpenAPIV3.SchemaObject { // Sometimes there is no need to resolve the whole tree. // A reference table has only one dimension, however an example should describe the entire object if (depth && depthCounter === depth) { return deepcopy(schemaObject as OpenAPIV3.SchemaObject) } // Resolve ref let resolvedSchemaObject = resolveRef(schemaObject, refs) // Resolve allOf if (resolvedSchemaObject.allOf) { return resolveAllOf(resolvedSchemaObject, refs, depth, depthCounter) } // Resolve oneOf if (resolvedSchemaObject.oneOf) { return resolveOneOf(resolvedSchemaObject, refs, depth, depthCounter) } // NOTE: oneOf, anyOf is currently not supported // Resolve object if (resolvedSchemaObject.type === 'object') { resolvedSchemaObject = { ...resolvedSchemaObject, properties: resolveProperties( resolvedSchemaObject.properties, refs, depth, depthCounter, ), } } // Resolve array item if (resolvedSchemaObject.type === 'array') { resolvedSchemaObject = { ...resolvedSchemaObject, items: resolveSchemaOrReferenceObject( resolvedSchemaObject.items, refs, depth, depthCounter + 1, ), } } return resolvedSchemaObject } function buildExampleTree( schemaObject: Readonly<OpenAPIV3.SchemaObject>, refs: IRefs, ): object | any[] | undefined { if (['boolean', 'number', 'string', 'integer'].includes(schemaObject.type!)) { // leaf return schemaObject.example ?? schemaObject.type } // if example is set, even for objects or array, short circuit if (schemaObject.example) { return schemaObject.example } // Resolve ref. // Inefficient: the entire (sub)tree is traversed every time. O(n^k), // where k is the number of children for a node const resolvedSchemaObject = resolveSchemaOrReferenceObject(schemaObject, refs) if (resolvedSchemaObject.allOf) { return resolveExampleAllOf(resolvedSchemaObject, refs) } if (resolvedSchemaObject.oneOf) { return resolveExampleOneOf(resolvedSchemaObject, refs) } if (resolvedSchemaObject.type === 'object') { return { ...resolveExampleProperties(resolvedSchemaObject.properties, refs), ...resolveExampleAdditionalProperties(resolvedSchemaObject.additionalProperties), } } if (resolvedSchemaObject.type === 'array') { return [ buildExampleTree( resolveSchemaOrReferenceObject(resolvedSchemaObject.items, refs), refs, ), ] } // no example return undefined } function requestBodyObjects( opObject: OpenAPIV3.OperationObject, refs: IRefs, depth: number | undefined = undefined, ): { requestBodySchema?: OpenAPIV3.SchemaObject requestBodyExample?: any requestBodyRef?: string } { if (!opObject.requestBody) { return {} } const jsonContent = (opObject.requestBody! as OpenAPIV3.RequestBodyObject).content[CONTENT_TYPE] if (!jsonContent) { return {} } // assume 'application/json' only const object = jsonContent.schema as OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject const refObject = object as OpenAPIV3.ReferenceObject const schemaObject = object as OpenAPIV3.SchemaObject return { requestBodySchema: resolveSchemaOrReferenceObject(schemaObject, refs, depth), requestBodyExample: buildExampleTree(schemaObject, refs), requestBodyRef: refObject.$ref, } } function responseObject( opObject: OpenAPIV3.OperationObject, statusCode: string, refs: IRefs, depth: number | undefined = undefined, ): ResponseSchema { const responseObject = opObject.responses[statusCode] as OpenAPIV3.ResponseObject const content = responseObject.content // assume 'application/json' only if (!content || !content![CONTENT_TYPE]) { return { description: responseObject.description, } } const mediaTypeObject: OpenAPIV3.MediaTypeObject | undefined = content![CONTENT_TYPE] const object = mediaTypeObject.schema as OpenAPIV3.ResponseObject | OpenAPIV3.ReferenceObject const refObject = object as OpenAPIV3.ReferenceObject const schemaObject = object as OpenAPIV3.SchemaObject return { description: responseObject.description, schema: schemaObject ? resolveSchemaOrReferenceObject(schemaObject, refs, depth) : undefined, example: schemaObject ? buildExampleTree(schemaObject, refs) : undefined, ref: refObject.$ref, } } function responseObjects( opObject: OpenAPIV3.OperationObject, refs: IRefs, depth: number | undefined = undefined, ): Record<string, ResponseSchema> { return Object.keys(opObject.responses).reduce( (prev: Record<string, ResponseSchema>, statusCode: string) => { prev[statusCode] = responseObject(opObject, statusCode, refs, depth) return prev }, {}, ) } function generateEndpoints( api: OpenAPIV3.Document, refs: IRefs, depth: number | undefined = undefined, ): Endpoint[] { return flatten( Object.keys(api.paths ?? {}).map((path: string): Endpoint[] => { const pathObject = api.paths![path] as OpenAPIV3.PathItemObject const endpoints: Array<Endpoint | undefined> = METHODS.map( (method): Endpoint | undefined => { const opObject: OpenAPIV3.OperationObject | undefined = pathObject[method] if (opObject === undefined) { return undefined } return generateEndpoint(method, path, opObject, refs, depth) }, ) return endpoints.filter( (endpoint: Endpoint | undefined) => !isNil(endpoint), ) as Endpoint[] }), ) } function generateEndpoint( method: string, path: string, opObject: OpenAPIV3.OperationObject, refs: IRefs, depth: number | undefined = undefined, ): Endpoint { if (opObject.tags === undefined) { throw new Error(`${opObject} must have a tag`) } const pathParameters = ((opObject.parameters ?? []) as OpenAPIV3.ParameterObject[]).filter( (parameter) => parameter.in === 'path', ) const queryParameters = ((opObject.parameters ?? []) as OpenAPIV3.ParameterObject[]).filter( (parameter) => parameter.in === 'query', ) return { method, path, summary: opObject.summary, description: opObject.description, tags: opObject.tags!, ...requestBodyObjects(opObject, refs, depth), ...(pathParameters ? { pathParameters } : {}), ...(queryParameters ? { queryParameters } : {}), responses: responseObjects(opObject, refs, depth), } } function generateResource( name: string, schemaObject: OpenAPIV3.SchemaObject, refs: IRefs, depth: number | undefined = undefined, ): Resource { return { name, ...resolveSchemaOrReferenceObject(schemaObject, refs, depth), example: schemaObject ? buildExampleTree(schemaObject, refs) : undefined, } } export function generateMarkdownFiles({ api, refs, outputDirectory, endpointsPrefix, endpointsTemplate, resourceTemplate, resourceSchemaDepth, endpointSchemaDepth, }: { api: OpenAPIV3.Document refs: IRefs outputDirectory?: string endpointsPrefix?: string endpointsTemplate?: string resourceTemplate?: string resourceSchemaDepth?: number endpointSchemaDepth?: number }) { // resources const schemas = api.components?.schemas as Record<string, OpenAPIV3.SchemaObject> | undefined const resources = sortBy((name: string) => name, Object.keys(schemas ?? {})).map( (name: string): Resource => { const schemaObject = schemas![name] return generateResource(name, schemaObject, refs, resourceSchemaDepth) }, ) resources.map((resource: Resource) => generateResourceMarkdownFile(resource, outputDirectory, resourceTemplate), ) // endpoints const endpoints = generateEndpoints(api, refs, endpointSchemaDepth) const markdownTemplatesData = groupEndpointsByTag(api, endpoints) markdownTemplatesData.map((markdownTemplateData) => generateEndpointsMarkdownFile( markdownTemplateData, outputDirectory, endpointsPrefix, endpointsTemplate, ), ) } function generateEndpointsMarkdownFile( data: MarkdownTemplateData, outputDirectory?: string, endpointsPrefix?: string, endpointsTemplate?: string, ) { const file = endpointsTemplate ?? path.join(__dirname, `/../endpoints.md`) const templateContent = readFileSync(file) const template = Handlebars.compile(templateContent.toString()) const result = template(data) if (outputDirectory) { const filename = data.tag.name .replace(/[^a-zA-Z0-9_ ]/g, '') .replace(/\s\s+/g, ' ') .replace(/\s/g, '-') .toLowerCase() + '.md' const file = path.join(outputDirectory!, `${endpointsPrefix ?? ''}${filename}`) writeFileSync(file, result) console.log(`Endpoint saved: ${file}`) return } console.log(result) } function generateResourceMarkdownFile( resource: Resource, outputDirectory?: string, resourceTemplate?: string, ) { const file = resourceTemplate ?? path.join(__dirname, `/../resource.md`) const templateContent = readFileSync(file) const template = Handlebars.compile(templateContent.toString()) const result = template(resource) if (outputDirectory) { const filename = resource.name .replace(/[^a-zA-Z0-9_ ]/g, '') .replace(/\s\s+/g, ' ') .replace(/\s/g, '-') .toLowerCase() + '.md' const file = path.join(outputDirectory!, filename) writeFileSync(file, result) console.log(`Resource saved: ${file}`) return } console.log(result) } function extractTagByName( api: OpenAPIV3.Document, tagName: string, ): OpenAPIV3.TagObject | undefined { return find( (tagObject: OpenAPIV3.TagObject | undefined): boolean => tagObject?.name === tagName, api.tags ?? [], ) } function groupEndpointsByTag( api: OpenAPIV3.Document, endpoints: Endpoint[], ): MarkdownTemplateData[] { const denormaliseByTag: Endpoint[] = flatten( endpoints.map((endpoint: Endpoint): Endpoint[] => endpoint.tags.map((tag: string): Endpoint => ({ ...endpoint, tags: [tag] })), ), ) const groupedByTagName: Record<string, Endpoint[]> = groupBy( (endpoint: Endpoint) => endpoint.tags[0], denormaliseByTag, ) return Object.keys(groupedByTagName).map((tagName: string): MarkdownTemplateData => { const endpoints = groupedByTagName[tagName] const tagObject = extractTagByName(api, tagName) return { tag: { name: tagName, description: tagObject?.description, }, endpoints, } }) }
Frank Williams back up to speed after hospital stay Sir Frank Williams is out of hospital and back up to speed, and intends traveling to at least 15 Formula 1 races this season. Late last year, when the Grove team’s 72-year-old boss and co-founder stopped coming to races, it emerged he had been hospitalised with pressure sores. The Briton, whose daughter Claire is Williams’ deputy team principal, has been tetraplegic and confined to a wheelchair since a road car crash in 1986. He was in hospital for several weeks, but Telegraph newspaper on Wednesday revealed that Williams is now back at work and planning to attend 15 grands prix in 2015. “I’m seven days a week here,” Sir Frank confirmed. “I just love what I do.” He does not hide, however, that while he never originally envisaged the top job for his 38-year-old daughter, Claire Williams is now in pole position to one day take over from him. Williams says Claire, who started work at Grove as a junior press officer in 2001, is “very industrious” and “excels” in her current role. “I see her running the business,” Sir Frank Williams said.
// Plugins returns the plugins. func (c *Context) Plugins() Plugins { ps, ok := c.ctx.Value(keyPlugins{}).(Plugins) if ok { return ps } return nil }
Ian Botham has hailed Ben Stokes' "enormous potential" and suggested he may be a better player than he was at the same age. Stokes broke Botham's record for the quickest Test double-century by an England player in the game at Cape Town - a moment Botham compared to his exploits in the Headingley Ashes Test of 1981 - and, despite being the youngest member of the squad, has quickly emerged as a key figure in a developing side. In statistical terms, the 24-year-old Stokes has some way to go to match Botham. By the time Botham was 25, he had scored six Test centuries (Stokes currently has three), claimed 14 five-wicket hauls (to Stokes' two) and was captain of the side. But such comparisons are unnecessary. In terms of the aggressive role he plays with the bat and the balance he lends the side by virtue of his all-round skills, there are obvious similarities between the two. Botham recognises that and sees in Stokes a kindred spirit who will be at the centre of every plan England make for much of the next decade. Botham on... Nick Compton "He's going to have to find a balance. He was left out in the first place as he had no change of tempo. There's not a lot of point being out there for 70 balls for 15 runs. Tavare, Boycott and Brearley all scored faster than that." England's rise "This is the most exciting England team since they were world No. 1. England are an emerging side and they will progress quite quickly. They will be competing by the end of this calendar year for No. 1 status. They might not actually be No. 1 because of the way the fixtures work, but they will be up there and ringing the door bell." Hashim Amla "I like his timing. He said captaincy was hurting his batting. But he went out there and showed what character he has by getting 200. If it was hurting his batting before, then God help us now." "As a player, he's probably better than I was at 24," Botham said. "What I did was yesterday's news, what he did is today's news. "He is not a jack of all trades and master of none. He is actually probably master of the three most important assets you need: batting, bowling and fielding. "His bowling can only get better. He has all the attributes: he has pace; he can swing the ball; he can reverse it; he can bowl orthodox. And he is terrific fielder close to the bat, in the covers or midwicket, backward point, on the boundary." Certainly Botham holds no resentment in losing a record to Stokes. He already feels Stokes is a better player than Andrew Flintoff (who scored five Test centuries and claimed three five-wicket hauls in his career) at a comparable stage - "he'll take more five-fors" - and suggests that Stokes should be allowed to play his natural game without complication or impediment. "I held that record for 33 years, so it was about time it got broken," Botham said. "I enjoyed watching Ben Stokes. I have been a massive fan of his since I first saw him on the international scene. I think he has got tremendous, enormous potential to go all the way. "That was one of those moments when you can just say 'I was there'. When you think back to Headingley '81, I know there were only about 10,000 people in the ground but I have met three-and-a-half million who say they were. Well, I was there for Ben Stokes. "It was fun. And Ben Stokes will be enjoying it as well. He'll see it as fun as well. You walk out and you empty the bars rather then fill them and it's a nice feeling. Then you get out and you see them all going back in for a beer and you think 'sorry, lads: can't do it every day.' And he can't do it every day, either. That's what people have to remember. You have to give him the right to fail. "Just let him go. Mike Brearley did absolutely the same with me. There was no leash in any way. "I didn't think too much. There is too much thinking about the game, too much analysis, looking at computers. I don't need to look at a computer to know I've played a **** shot. It's not that hard. "But he is crucial to Alastair Cook's selection policy. If he's playing, then Cook actually has an option of two other players in his mind. He is the genuine article." Ben Stokes broke Ian Botham's record for the fastest double-hundred by an England player © Getty Images Botham also believes that Jonny Bairstow's maiden century at Cape Town will give him the confidence to settle into the side and improve his wicketkeeping. "I think Jonny Bairstow offers a lot to English cricket," he said. "I have always been a fan of his. He was a rough diamond three years ago but you knew those skills were there. "He only put one down one chance and it wasn't easy. He also had the first stumping for three years by an English keeper in Durban. I think there are more pluses than negatives. He works very hard at his game and he will only get better. I genuinely think he is good enough." Kumala Wines were proud supporters of 'Beefy Walking The Rainbow Nation'. For more information visit kumalawines.com George Dobell is a senior correspondent at ESPNcricinfo © ESPN Sports Media Ltd.
/// Generates documentation comments containing /// markdown code `doc`. fn format_doc_extended(doc: &str, is_outer: bool) -> String { if doc.is_empty() { return String::new(); } let prefix = if is_outer { "//! " } else { "/// " }; let lines = doc.trim().split('\n').collect_vec(); let lines = trim_slice(&lines, |x| x.is_empty()); if lines.is_empty() { return String::new(); } let extra_line_breaks = if is_outer { "\n\n" } else { "\n" }; lines .iter() .map(|x| { if x.starts_with(" ") { format!("{}{}", prefix, x.replace(" ", "&#32; ")) } else { format!("{}{}", prefix, x) } }) .join("\n") + extra_line_breaks }
/** * This work is licensed under the Creative Commons Attribution 4.0 International License. To view a copy of this * license, visit http://creativecommons.org/licenses/by/4.0/. */ @SuppressWarnings("WeakerAccess") public class MaximumShearReinforcementCard extends AbstractCard { private ReinforcementDiameterPanel asswDiameterPanel; private LabeledTextField ltfWidth; private LabeledTextField ltfDg; private LabeledTextField ltfCnom; public MaximumShearReinforcementCard(String name) { super(name); } @Override protected void modifyDataPanel(JPanel jPanel) { jPanel.setLayout(new BoxLayout(jPanel, BoxLayout.Y_AXIS)); asswDiameterPanel = new ReinforcementDiameterPanel("Średnica zbrojenia na ścinanie", false); ltfWidth = new LabeledTextField("Szerokość przekroju [m]: ", "0.3"); ltfDg = new LabeledTextField("Maksymalny wymiar kruszywa [mm]: ", "32"); ltfCnom = new LabeledTextField("Otulina [mm]: ", "30"); JPanel pProps = new BaseJPanel("Inne"); pProps.add(ltfDg); pProps.add(ltfCnom); pProps.add(ltfWidth); jPanel.add(asswDiameterPanel); jPanel.add(pProps); } @Override protected String getSuccessMessage() throws Exception { ShearReinforcement asw = calculateMaximumShearReinforcement(); return "Maksymalne zbrojenie na ścinanie: " + String.format("%.0f", asw.getNleg()) + "-cięte \u00D8" + String.format("%.0f", asw.getPhi() * 1000) + " co " + String.format("%.0f", 100 / (asw.getN() - 1)) + "cm"; } protected ShearReinforcement calculateMaximumShearReinforcement() throws ImproperDataException, LSException { return notNull(() -> new MaximumShearReinforcementFactory(getDg(), getClearance(), getPhisw()).build()); } protected CrossSection getCrossSection() throws ImproperDataException, LSException { return new CrossSection(getShape(), null, null, null, CrossSectionType.BEAM, null, new ShearReinforcement(0, getPhisw(), 0, 0), getCnom() ); } protected double getClearance() throws ImproperDataException, LSException { return pos(() -> getCrossSection().getAswClearance()); } protected double getPhisw() throws ImproperDataException, LSException { return pos(() -> asswDiameterPanel.getDiameters()[0]); } protected double getDg() throws ImproperDataException, LSException { return pos(() -> ltfDg.getDouble() / 1000); } protected double getCnom() throws ImproperDataException, LSException { return pos(() -> ltfCnom.getDouble() / 1000); } protected Shape getShape() throws ImproperDataException, LSException { return notNull(() -> new Shape(ltfWidth.getDouble(), 0.0)); } }
/** * Uninstall current location collection client. * * @return true if uninstall was successful */ static boolean uninstall() { boolean uninstalled = false; synchronized (lock) { if (locationCollectionClient != null) { locationCollectionClient.locationEngineController.onDestroy(); locationCollectionClient.settingsChangeHandlerThread.quit(); locationCollectionClient.sharedPreferences.unregisterOnSharedPreferenceChangeListener(locationCollectionClient); locationCollectionClient = null; uninstalled = true; } } return uninstalled; }
#!/usr/bin/env python3 import scroller import sys from unittest import TestCase, main from unittest.mock import patch, call class ScrollerTest(TestCase): _TEST_STRING = 'argument clinic' _TEST_STRING_PERMUTED = 'rgument clinica' def setUp(self): self.test_str = self.__class__._TEST_STRING self.test_str_permuted = self.__class__._TEST_STRING_PERMUTED def test_permute(self): string = scroller.permute(self.test_str) self.assertEqual(string, self.test_str_permuted) string = scroller.permute(string, rev=True) self.assertEqual(string, self.test_str) def test_scroll(self): scrolled = scroller.scroll(self.test_str) string = self.test_str for _ in range(len(self.test_str)): string = scroller.permute(string) self.assertEqual(next(scrolled), string) def test_scroll_reverse(self): scrolled = scroller.scroll(self.test_str) test_length = len(self.test_str) - 1 for _ in range(test_length): string = next(scrolled) scrolled = scroller.scroll(string, rev=True) for _ in range(test_length): string = next(scrolled) self.assertEqual(string, self.test_str) def test_scroll_separator(self): sep = ' ' scrolled = scroller.scroll(self.test_str, sep=sep) string = self.test_str + sep for _ in range(len(self.test_str)): string = scroller.permute(string) self.assertEqual(next(scrolled), string) def test_scroll_separator_when_reversed(self): sep = ' ' scrolled = scroller.scroll(self.test_str, sep=sep, rev=True) string = self.test_str + sep for i in range(len(self.test_str)): string = scroller.permute(string, rev=True) self.assertEqual(next(scrolled), string) def test_scroll_static(self): scrolled = scroller.scroll(self.test_str, static=True) for i in range(len(self.test_str)): self.assertEqual(next(scrolled), self.test_str) def test_scroll_static_when_reversed(self): scrolled = scroller.scroll(self.test_str, static=True, rev=True) for i in range(len(self.test_str)): self.assertEqual(next(scrolled), self.test_str) def test_scroller_count(self): s = scroller.scroller(self.test_str, count=len(self.test_str)) string = self.test_str for x in s: string = x self.assertEqual(string, self.test_str) def test_scroller(self): s = scroller.scroller(self.test_str, count=len(self.test_str)) e = self.test_str for a in s: e = scroller.permute(e) self.assertEqual(a, e) def test_scroller_when_reversed(self): s = scroller.scroller( self.test_str, count=len(self.test_str), rev=True ) e = self.test_str for a in s: e = scroller.permute(e, rev=True) self.assertEqual(a, e) def test_scroller_static(self): s = scroller.scroller( self.test_str, count=len(self.test_str), static=True ) e = self.test_str for a in s: self.assertEqual(a, e) def test_scroller_static_when_reversed(self): s = scroller.scroller(self.test_str, count=3, static=True, rev=True) e = self.test_str for a in s: self.assertEqual(a, e) class ScrollerCLITest(TestCase): _TEST_STRING = 'gumby brain specialist' def setUp(self): self.test_str = self.__class__._TEST_STRING @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_count(self, mock_print, mock_sleep): c = 5 args = scroller.parser.parse_args(['-c', str(c)]) scroller.main(self.test_str, args) self.assertEqual(mock_print.call_count, c) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_length_when_should_not_be_static(self, mock_print, mock_sleep): l = len(self.test_str) args = scroller.parser.parse_args(['-l', str(l), '-c', '5']) scroller.main(self.test_str, args) self.assertNotEqual(mock_print.call_args, self.test_str) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_length_when_should_be_static(self, mock_print, mock_sleep): l = len(self.test_str) + 2 c = 2 args = scroller.parser.parse_args([ '-l', str(l), '-c', str(c), '-s', '' ]) scroller.main(self.test_str, args) expected_call = call(self.test_str, end='\n') mock_print.assert_has_calls([expected_call for i in range(c)]) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_interval(self, mock_print, mock_sleep): i = 2 args = scroller.parser.parse_args(['-c', '1', '-i', str(i)]) scroller.main(self.test_str, args) mock_sleep.assert_called_with(i) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_reverse(self, mock_print, mock_sleep): args = scroller.parser.parse_args(['-c', '1', '-s', '', '-r']) scroller.main(self.test_str, args) (calling_args,), _ = mock_print.call_args p = scroller.permute(self.test_str, rev=True) self.assertEqual(calling_args, p) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_separator(self, mock_print, mock_sleep): s = '_' args = scroller.parser.parse_args(['-c', '1', '-s', s, '-r']) scroller.main(self.test_str, args) (calling_args,), _ = mock_print.call_args self.assertEqual(calling_args, s + self.test_str) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_newline(self, mock_print, mock_sleep): args = scroller.parser.parse_args(['-c', '1']) scroller.main(' ', args) _, calling_kwargs = mock_print.call_args self.assertEqual(calling_kwargs['end'], '\n') @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) @patch('sys.stdout.write', autospe=True) @patch('builtins.input', autospec=True) @patch('select.select', autospec=True) def test_open(self, mock_select, mock_input, mock_write, mock_print, mock_sleep): mock_input.side_effect = ['life of brian', 'flying circus'] def gen_side_effect(): yield ([sys.stdin], [], []) while True: yield ([], [], []) mock_select.side_effect = gen_side_effect() args = scroller.parser.parse_args(['-c', '2', '-o', '-m']) scroller.main(args=args) self.assertEqual(mock_input.call_count, 2) self.assertTrue(mock_write.called) self.assertTrue(mock_sleep.called) @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_prefix(self, mock_print, mock_sleep): args = scroller.parser.parse_args([ '-c', '1', '-b', 'prefix', '-s', '' ]) scroller.main(' ', args) calling_args, _ = mock_print.call_args self.assertEqual(calling_args[0], 'prefix ') @patch('time.sleep', autospec=True) @patch('builtins.print', autospec=True) def test_postfix(self, mock_print, mock_sleep): args = scroller.parser.parse_args([ '-c', '1', '-a', 'postfix', '-s', '' ]) scroller.main(' ', args) calling_args, _ = mock_print.call_args self.assertEqual(calling_args[0], ' postfix') if __name__ == '__main__': main()
<filename>src/Manager/MemoryContainer.h #pragma once #include "../SObject.h" #include "Base/SContainer.h" namespace CSE { class SObject; class MemoryContainer : public SContainer<SObject*> { protected: MemoryContainer(); ~MemoryContainer() override; }; }
The forces of the Kiev government used cluster munitions in populated areas in the city of Donetsk, eastern Ukraine, says Human Rights Watch. It adds that the use of this forbidden weaponry violates the laws of war. Human Rights Watch (HRW) was documenting the “widespread use of cluster munitions” in fighting between government troops and self-defense forces, according to investigation carried by the watchdog “While it was not possible to conclusively determine responsibility for many of the attacks, the evidence points to Ukrainian government forces’ responsibility for several cluster munition attacks on Donetsk [Donetsk Region, Eastern Ukraine],” says the report. The UN is “concerned” by the report, a spokesman for Secretary General Stephane Dujarric said at a briefing, adding that Ban Ki-moon is calling for a “political solution.” Kiev however denied the use of cluster munitions by the Ukrainian military in the operation in eastern Ukraine. "Ukrainian military did not use weapons forbidden by international legal law. This also applies to cluster munitions," Andrey Lysenko, spokesman for Ukraine's National Security Council, said at a briefing on Tuesday. He also said that the observers could have been given "provocative information" from DPR militia, as he proposed to increase the number of international observers in eastern Ukraine. "When it comes to the use of cluster munitions on civilian quarters of Donetsk, then I must say that the Ukrainian military did not use weapons on the peaceful quarter of the city." Donetsk, which before the launch of the Kiev military operation in April had a population of about 1 million people, is now literally in ruins. Heavy shelling claimed hundreds of civilians in the city. On Monday a huge blast rocked a chemical factory in Donetsk in eastern Ukraine, the city council says on its website. The blast wave reportedly shattered windows in houses in a radius of several kilometers. Blast rocks chemical plant in Donetsk, claims of tactical missile An investigation says that at least six civilians were killed and dozens injured in these attacks. But the real casualty number is probably higher, says HRW, as the watchdog hasn’t yet probed all the allegations of the cluster munition use in the conflict zone. “It is shocking to see a weapon that most countries have banned used so extensively in eastern Ukraine,” said Mark Hiznay, senior arms researcher at HRW. “Ukrainian authorities should make an immediate commitment not to use cluster munitions and join the treaty to ban them.” The danger of cluster munitions is that each of them contains hundreds of smaller submunitions. After the bomb explodes the container opens up “dispersing the submunitions, which are designed to explode when they hit the ground,” says the investigation. War-torn Donetsk airport reminds of Chernobyl wasteland (PHOTOS, VIDEO) “The submunitions are spread indiscriminately over a wide area, often the size of a football field, putting anyone in the area at the time of attack, whether combatants or civilians, at risk of death or injury.” The Convention on Cluster Munitions signed in 2009 includes 114 countries so far. However Ukraine has yet to join the treaty. “There is particularly strong evidence that Ukrainian government forces were responsible for several cluster munition attacks on central Donetsk in early October,” HRW said. The watchdog identified cluster munitions by the distinctive craters, remnants of the submunitions found at the impact sites, and remnants of the rockets found in the vicinity. “Ukrainian forces should immediately make a commitment to not use cluster munitions and to investigate and hold accountable any personnel responsible for firing cluster munitions into populated areas. Ukraine should accede to the treaty banning their use,” HRW said. Ukraine’s authorities neither confirmed nor denied the allegations, says the group, adding that Kiev didn’t respond to a letter sent by the Cluster Munition Coalition in July or a letter sent by HRW on October 13. “Firing cluster munitions into populated areas is utterly irresponsible and those who ordered such attacks should be held to account,” Hiznay said. “The best way for the Ukrainian authorities to demonstrate a commitment to protect civilians would be an immediate promise to stop using cluster munitions.”
<gh_stars>1-10 package org.antlr.gunit.swingui; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.ImageIcon; public class ImageFactory { private static ImageFactory singleton ; public static ImageFactory getSingleton() { if(singleton == null) singleton = new ImageFactory(); return singleton; } private ImageFactory() { ACCEPT = getImage("accept.png"); ADD = getImage("add.png"); DELETE = getImage("delete24.png"); TEXTFILE = getImage("textfile24.png"); TEXTFILE16 = getImage("textfile16.png"); ADDFILE = getImage("addfile24.png"); WINDOW16 = getImage("windowb16.png"); FAV16 = getImage("favb16.png"); SAVE = getImage("floppy24.png"); OPEN = getImage("folder24.png"); EDIT16 = getImage("edit16.png"); FILE16 = getImage("file16.png"); RUN_PASS = getImage("runpass.png"); RUN_FAIL = getImage("runfail.png"); TESTSUITE = getImage("testsuite.png"); TESTGROUP = getImage("testgroup.png"); TESTGROUPX = getImage("testgroupx.png"); NEXT = getImage("next24.png"); } private ImageIcon getImage(String name) { name = IMG_DIR + name; try { final ClassLoader loader = ImageFactory.class.getClassLoader(); final InputStream in = loader.getResourceAsStream(name); final byte[] data = new byte[in.available()]; in.read(data); in.close(); return new ImageIcon(data); } catch (IOException ex) { System.err.println("Can't load image file: " + name); System.exit(1); } catch(RuntimeException e) { System.err.println("Can't load image file: " + name); System.exit(1); } return null; } private static final String IMG_DIR = "org/antlr/gunit/swingui/images/"; public ImageIcon ACCEPT; public ImageIcon ADD; public ImageIcon DELETE; public ImageIcon TEXTFILE ; public ImageIcon ADDFILE; public ImageIcon TEXTFILE16 ; public ImageIcon WINDOW16; public ImageIcon FAV16; public ImageIcon SAVE ; public ImageIcon OPEN ; public ImageIcon EDIT16; public ImageIcon FILE16; public ImageIcon NEXT; public ImageIcon RUN_PASS; public ImageIcon RUN_FAIL; public ImageIcon TESTSUITE; public ImageIcon TESTGROUP ; public ImageIcon TESTGROUPX; }
<filename>DuetPkg/FSVariable/MemStorage.c<gh_stars>10-100 /*++ Caution: This file is used for Duet platform only, do not use them in real platform. All variable code, variable metadata, and variable data used by Duet platform are on disk. They can be changed by user. BIOS is not able to protoect those. Duet trusts all meta data from disk. If variable code, variable metadata and variable data is modified in inproper way, the behavior is undefined. Copyright (c) 2006 - 2014, Intel Corporation. All rights reserved.<BR> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. Module Name: MemStorage.c Abstract: handles variable store/reads with emulated memory Revision History --*/ #include "FSVariable.h" VOID EFIAPI OnVirtualAddressChangeMs ( IN EFI_EVENT Event, IN VOID *Context ); EFI_STATUS EFIAPI MemEraseStore( IN VARIABLE_STORAGE *This ); EFI_STATUS EFIAPI MemWriteStore ( IN VARIABLE_STORAGE *This, IN UINTN Offset, IN UINTN BufferSize, IN VOID *Buffer ); EFI_STATUS MemStorageConstructor ( OUT VARIABLE_STORAGE **VarStore, OUT EFI_EVENT_NOTIFY *GoVirtualEvent, IN UINTN Size ) { EFI_STATUS Status; VS_DEV *Dev; Status = gBS->AllocatePool (EfiRuntimeServicesData, sizeof(VS_DEV), (VOID **) &Dev); ASSERT_EFI_ERROR (Status); ZeroMem (Dev, sizeof(VS_DEV)); Dev->Signature = VS_DEV_SIGNATURE; Dev->Size = Size; Dev->VarStore.Erase = MemEraseStore; Dev->VarStore.Write = MemWriteStore; Status = gBS->AllocatePool (EfiRuntimeServicesData, Size, (VOID **) &VAR_DATA_PTR (Dev)); ASSERT_EFI_ERROR (Status); DEBUG ((EFI_D_ERROR, "VStorage: Size = 0x%x\n", Size)); *VarStore = &Dev->VarStore; *GoVirtualEvent = OnVirtualAddressChangeMs; return EFI_SUCCESS; } VOID EFIAPI OnVirtualAddressChangeMs ( IN EFI_EVENT Event, IN VOID *Context ) { VS_DEV *Dev; Dev = DEV_FROM_THIS (Context); EfiConvertPointer (0, (VOID **)&VAR_DATA_PTR (Dev)); EfiConvertPointer (0, (VOID **)&Dev->VarStore.Erase); EfiConvertPointer (0, (VOID **)&Dev->VarStore.Write); } EFI_STATUS EFIAPI MemEraseStore( IN VARIABLE_STORAGE *This ) { VS_DEV *Dev; Dev = DEV_FROM_THIS(This); SetMem (VAR_DATA_PTR (Dev), Dev->Size, VAR_DEFAULT_VALUE); return EFI_SUCCESS; } EFI_STATUS EFIAPI MemWriteStore ( IN VARIABLE_STORAGE *This, IN UINTN Offset, IN UINTN BufferSize, IN VOID *UserBuffer ) { VS_DEV *Dev; Dev = DEV_FROM_THIS(This); ASSERT (Offset + BufferSize < Dev->Size); // For better performance if (VAR_DATA_PTR (Dev) + Offset != UserBuffer) { CopyMem (VAR_DATA_PTR (Dev) + Offset, UserBuffer, BufferSize); } return EFI_SUCCESS; }
# Problem https://atcoder.jp/contests/abc102/tasks/abc102_b # Python 2nd Try import sys def solver(allN, numberList): answer = 0 for j in range(0, allN): for k in range(0, allN): nowAbs = (numberList[j] - numberList[k]).__abs__() if answer < nowAbs: answer = nowAbs return answer if __name__ == "__main__": N = sys.stdin.readline().rsplit() Ai = list(map(int, sys.stdin.readline().split())) answer = solver(int(N[0]), Ai) print(answer)
package database import ( goharborv1 "github.com/goharbor/harbor-operator/apis/goharbor.io/v1beta1" "github.com/goharbor/harbor-operator/pkg/cluster/lcm" corev1 "k8s.io/api/core/v1" ) func databaseReadyStatus(reason, message string, properties lcm.Properties) *lcm.CRStatus { return lcm.New(goharborv1.DatabaseReady). WithStatus(corev1.ConditionTrue). WithReason(reason). WithMessage(message). WithProperties(properties) } func databaseNotReadyStatus(reason, message string) *lcm.CRStatus { return lcm.New(goharborv1.DatabaseReady). WithStatus(corev1.ConditionFalse). WithReason(reason). WithMessage(message) } func databaseUnknownStatus() *lcm.CRStatus { return lcm.New(goharborv1.DatabaseReady). WithStatus(corev1.ConditionUnknown) }
The grayscale world conjured up each season by Thom Browne is bizarre, twisted, sometimes unintentionally hilarious, and sometimes entirely intentionally so. The last is far more entrancing that the first—when you realize Browne is laughing with you, rather than you at him (or, perhaps, vice versa). And so it was for Spring, when Browne decided life was a beach and unleashed one of the wittiest, most memorable, and certainly most bizarre runway stagings of his entire career. Considering they’ve previously involved gilded fauns, menageries of animals and hyperinflated Elmer Fudd lookalikes, and male flappers in Maxim’s, that’s a tall order. Here was the scene: a post-apocalyptic beach of black sand, gray palm tree, and a lounging sunbather in a tight zippered wet suit, like a black-and-white movie still. John Williams’s ominous Jaws theme began to play, and a model in a black suit—drop-crotch pants, jacket with a dorsal fin attached, head hidden under a leather shark mask—perambulated out and circled the set. No word of a lie. Then a bunch of shark-attack-ready models waddled out in Fatty Arbuckle onesies, plodding to take their place about the tree. A quick zip and, like a cheap infomercial before and after, the corpulence literally peeled right off, to reveal spangled short suits, tailcoats, and overcoats in brilliant, poolside cocktail shades of orange sorbet, cassata, and piña colada, in fur or tweed or hibiscus flower laces with embroideries of surfboards, and islands, and sharks.
package validate import "regexp" // uuid regex helper var UUIDRegExp = regexp.MustCompile("^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$")
// LOCAL export enum SveltekitDataEndpointsQa { SERVICE = '/service', } export enum SveltekitSearchEndpointsQa { SEARCH = '/search', }
<gh_stars>1-10 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package algoritma_ve_programlama_sorular; /** * * @author bahad */ public class String_bir_adim_uzakta { public static void main(String[] args) { String str0 = "BAHADR"; String str1 = "bahadr"; String str2 = "BAHADRX"; String str3 = "BAHADYR"; String str4 = "YBAHA DR"; String str5 = "BAHADr"; String str6 = "BAHAdr"; String str7 = "baHADr"; String str8 = "bAHADR"; String str9 = "BAHADR"; String str10 = "BAH"; String str11 = "BAHAD"; System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str1)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str2)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str3)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str4)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str5)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str6)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str7)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str8)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str9)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str10)); System.out.println(String_bir_adim_uzakta.String_mesafe_kontrol(str0, str11)); } public static boolean String_mesafe_kontrol(String str0, String str1) { int[] adetler = new int[256]; int sonuc = 0; if (str0.length() < str1.length()) { int i; for (i = 0; i < str0.length(); ++i) { char c = str0.charAt(i); adetler[c] = adetler[c] + 1; } for (i = 0; i < str1.length(); ++i) { char c = str1.charAt(i); adetler[c] = adetler[c] - 1; } for (i = 0; i < adetler.length; ++i) { if (adetler[i] >= 0) continue; sonuc += adetler[i]; } } else { int i; for (i = 0; i < str1.length(); ++i) { char c = str1.charAt(i); adetler[c] = adetler[c] + 1; } for (i = 0; i < str0.length(); ++i) { char c = str0.charAt(i); adetler[c] = adetler[c] - 1; } for (i = 0; i < adetler.length; ++i) { if (adetler[i] >= 0) continue; sonuc += adetler[i]; } } System.out.print(" uzaklık: " + -1 * sonuc + " "); return sonuc == 0 || sonuc == -1; } }
<filename>Java-Kotlin/src/main/java/array/_384.java package array; import java.util.ArrayList; import java.util.List; import java.util.Random; public class _384 { private int[] array; private int[] original; private Random random = new Random(); public _384(int[] nums) { this.array = nums; this.original = nums.clone(); } private List<Integer> getCopyArray() { List<Integer> copyArray = new ArrayList<>(array.length); for (int value : array) { copyArray.add(value); } return copyArray; } /** * Resets the array to its original configuration and return it. */ public int[] reset() { array = original; original = original.clone(); return array; } /** * Returns a random shuffling of the array. */ public int[] shuffle() { List<Integer> aux = getCopyArray(); for (int i = 0; i < array.length; i++) { int removeIdx = random.nextInt(aux.size()); array[i] = aux.get(removeIdx); //this is the time consuming part TODO: using the fisher-yates algorithms aux.remove(removeIdx); } return array; } }
/** * Holds information about custom field. * * @author rajeevg * */ public class FieldConfig { /*----------------------------------------------------------------------- * ATTRIBUTES *---------------------------------------------------------------------*/ /* primary key */ private String id ; /* Entity to which this custom field belongs. * Entity name should be same as Java class name. It should be in proper case. * E.g. "Requirement" and not "requirement" * */ private String entityName; /*system field or custom field */ private Boolean systemField; /* data-type: int, long, string, etc. * Explicit foreign key is not added to FieldTypeMetaData.id table */ private String fieldTypeMetadata ; /* field name, all lower-case, starts with alphabetic character. Holds value of "name" attribute * <property column="zcf_myCustomField" name="myCustomField" not-null="false" type="java.lang.String"/> */ private String fieldName ; /* column name, all lower-case, starts with alphabetic character. Holds value of "column" attribute * <property column="zcf_myCustomField" name="myCustomField" not-null="false" type="java.lang.String"/> */ private String columnName ; /* Descriptive field name */ private String displayName ; /* Long description */ private String description ; /* value is mandatory at client */ private Boolean mandatory ; /* value is searchable */ private Boolean searchable ; /* value is importable */ private Boolean importable ; /* value is exportable */ private Boolean exportable ; /* key-value mappings if datatype is LOV. * This value is saved in Preference table. * Preference.name is entity.fieldname.LOV, e.g.: requirement.zcf_1001.LOV*/ // @Column(name="lovValue", length = 255) // @Transient // private String lovValue ; /* length of column if of type String */ private Integer length; private List allowedValues; public FieldConfig(String id, String entityName, Boolean systemField, String fieldTypeMetadata, String fieldName, String columnName, String displayName, String description, Boolean mandatory, Boolean searchable, Boolean importable, Boolean exportable, Integer length, List allowedValues) { super(); this.id = id; this.entityName = entityName; this.systemField = systemField; this.fieldTypeMetadata = fieldTypeMetadata; this.fieldName = fieldName; this.columnName = columnName; this.displayName = displayName; this.description = description; this.mandatory = mandatory; this.searchable = searchable; this.importable = importable; this.exportable = exportable; this.length = length; this.allowedValues = allowedValues; } /*----------------------------------------------------------------------- * OVERRIDE *---------------------------------------------------------------------*/ @Override public String toString() { return "FieldConfig: id:" + id + " entityName:" + entityName + " systemField:" + systemField + " fieldTypeMetadata:" + fieldTypeMetadata + " fieldName:" + fieldName + " columnName:" + columnName + " displayName:" + displayName + " mandatory:" + mandatory + " searchable:" + searchable + " importable: " + importable + " exportable:" + exportable + " length:" + length ; // + " lovValue:" + lovValue ; } /** * Merges (i..e. copies) attributes from input entity to itself. * * @param updatedFieldConfig */ public void merge(FieldConfig updatedFieldConfig) { this.setId(updatedFieldConfig.getId()); this.setEntityName(updatedFieldConfig.getEntityName()); this.setSystemField(updatedFieldConfig.getSystemField()); this.setFieldTypeMetadata(updatedFieldConfig.getFieldTypeMetadata()); this.setFieldName(updatedFieldConfig.getFieldName()); this.setColumnName(updatedFieldConfig.getColumnName()); this.setDisplayName(updatedFieldConfig.getDisplayName()); this.setDescription(updatedFieldConfig.getDescription()); this.setMandatory(updatedFieldConfig.getMandatory()); this.setSearchable(updatedFieldConfig.getSearchable()); this.setImportable(updatedFieldConfig.getImportable()); this.setExportable(updatedFieldConfig.getExportable()); // this.setLength(updatedFieldConfig.getLength()); } /*----------------------------------------------------------------------- * GETTER/SETTER *---------------------------------------------------------------------*/ public String getId() { return id; } public void setId(String id) { this.id = id; } public String getEntityName() { return entityName; } public void setEntityName(String entityName) { this.entityName = entityName; } public Boolean getSystemField() { return systemField; } public void setSystemField(Boolean systemField) { this.systemField = systemField; } public String getFieldTypeMetadata() { return fieldTypeMetadata; } public void setFieldTypeMetadata(String fieldTypeMetadata) { this.fieldTypeMetadata = fieldTypeMetadata; } public String getFieldName() { return fieldName; } public void setFieldName(String fieldName) { this.fieldName = fieldName; } public String getColumnName() { return columnName; } public void setColumnName(String columnName) { this.columnName = columnName; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Boolean getMandatory() { return mandatory; } public void setMandatory(Boolean mandatory) { this.mandatory = mandatory; } public Boolean getSearchable() { return searchable; } public void setSearchable(Boolean searchable) { this.searchable = searchable; } public Boolean getImportable() { return importable; } public void setImportable(Boolean importable) { this.importable = importable; } public Boolean getExportable() { return exportable; } public void setExportable(Boolean exportable) { this.exportable = exportable; } public Integer getLength() { return length; } public void setLength(Integer length) { this.length = length; } public List getAllowedValues() { return allowedValues; } }
// SubmitTaskStateChange this action is only used by the Amazon EC2 // Container Service agent, and it is not intended for use outside of the // agent. Sent to acknowledge that a task changed states. func (c *ECS) SubmitTaskStateChange(req *SubmitTaskStateChangeRequest) (resp *SubmitTaskStateChangeResult, err error) { resp = &SubmitTaskStateChangeResult{} err = c.client.Do("SubmitTaskStateChange", "POST", "/", req, resp) return }
Cadarache: In the dusty highlands of Provence in southern France, workers have excavated a vast rectangular pit 17 metres (56 feet) down into the unforgiving rocks. From my raised vantage point, I can see bright yellow mechanical diggers and trucks buzzing around the edge of the pit, looking toy-like in the huge construction site. Above us, the fireball Sun dries the air at an unrelenting 37C. These are embryonic stages to what is perhaps humankind's most ambitious scientific and engineering project: to replicate the Sun here on Earth. When construction is complete, the pit will host a 73-metre-high machine (240 feet) that will attempt to create boundless energy by smashing hydrogen nuclei together, in much the same way as stars like our Sun do. Physicists have dreamed of being able to produce cheap, safe and plentiful energy through atomic fusion since the 1950s. Around the world, researchers continue to experiment with creating fusion energy using various methods. But as people within the field have said the dream has always been "30 years away" from realisation. The need for a new energy source has never been more pressing. Global energy demand is expected to double by 2050, while the share coming from fossil fuels – currently 85% – needs to drop dramatically if we are to reduce carbon emissions and limit global warming. Fusion, many believe, could be the answer. It works by forcing together two types, or isotopes, of hydrogen at such a high temperature that the positively charged atoms are able to overcome their mutual repulsion and fuse. The result of this fusion is an atom of helium plus a highly energetic neutron particle. Physicists aim to capture the energy released by these emitted neutrons, and use it to drive steam turbines and produce electricity. When the reaction occurs in the core of the Sun, the giant ball of gas applies a strong gravitational pressure that helps force the hydrogen nuclei together. Here on Earth, any fusion reaction will have to take place at a tiny fraction of the scale of the Sun, without the benefit of its gravity. So to force hydrogen nuclei together on Earth, engineers need to build the reactor to withstand temperatures at least ten times that of the Sun – which means hundreds of millions of degrees. Heated doughnuts It's just one of the huge number of challenges facing the designers of this groundbreaking project. The concept was discussed and argued over for several decades before finally being agreed in 2007 as a multinational cooperation between the European Union, China, India, Japan, South Korea, Russia and the US – in total, 34 countries representing more than half of the world's population. Since then, the budget of 5 billion euros has trebled, the scale of the reactor has been halved, the completion date has been pushed back, and the project has somewhat lost its shine – which is somewhat ironic given the project is called Iter, meaning 'the way' in Latin. But despite the difficulties, some progress is being made. The parts are being manufactured and tested by the participating nations, many of whom hope to develop the expertise to compete in any new fusion energy market that would be expected to follow a successful outcome at Iter. Since they don't have access to the special conditions available in the Sun, physicists have designed a doughnut-shaped reaction chamber, called a tokamak. Hydrogen isotopes are heated to the point to which they lose electrons and form a plasma, and this is held in place for fusion but held away from the reactor walls, which could not withstand the heat. The tokamak deploys a powerful magnetic field to suspend and compress the hydrogen plasma using an electromagnet made of superconducting coils of a niobium tin alloy. Once atomic fusion occurs, the heat produced will help to keep the core hot. But unlike a fission reaction that takes place in nuclear power stations and atomic bombs, the fusion reaction is not self perpetuating. It requires a constant input of material or else it quickly fizzles out, making the reaction far safer. And unlike what you might have seen in a recent Batman movie, the chamber cannot be transformed into a nuclear bomb. The neutrons will then be absorbed by the surrounding walls of the tokamak, transferring their energy to the walls as heat, and this in turn will be dissipated through cooling towers. Because one of the hydrogen isotopes used, tritium, is radioactive (with a half-life of 12 years), the entire site must conform to France's strict nuclear safety laws. And to complicate matters further, the site is also moderately seismically active, meaning that the buildings are being supported on rubber pads to protect them from earthquakes. These issues, plus the logistics of dealing with multiple nations with their own fluctuating domestic budget constraints, mean that the site won't be ready for the first experiments until 2020. Even then, they will just be testing the reactor and its equipment. The first proper fusion tests, reacting deuterium (a hydrogen isotope abundant in sea water) and tritium (which will be made from lithium), won't take place until 2028. Power up Those will be the key tests, though. If all goes to plan, the physicists hope to prove that they can produce ten times as much energy as the experiment requires. The plan is to use 50 megawatts (in heating the plasma and cooling the reactor), and get 500 MW out. Larger tokamaks should, theoretically, be able to deliver an even greater input to output power ratio, in the range of gigawatts. And that is the big gamble. So far, the world's best and biggest tokamak, the JET experiment in the UK, hasn't even managed to break even, energy-wise. Its best ever result, in 1997, achieved a 16 MW output with a 25 MW input. Scale is an extremely important factor for tokamaks, though. Iter will be twice the size of JET, as well as featuring a number of design improvements. If Iter is successful in its proof of principle mission, the first demonstration fusion plants will be built, capable of actually using and storing the energy generated for electricity production. These plants are slated to begin operation in about 2040 - around 30 years away, in fact... Despite the seductive promise of finally getting a supply of electricity that's "too cheap to meter", the long wait to readiness and the fact that the technology remains unproven, means that many politicians are hesitant or even hostile to the expensive project. Additionally, because fusion energy won't be ready for decades, even if it works, other low-carbon energy sources must still be pursued in the short-term at least. But if we do manage to replicate the Sun on Earth, the consequences would be spectacular. An era of genuinely cheap energy – both environmentally and financially, would have far reaching implications for everything from poverty reduction to conflict easement. It’s exciting to think that the next generation could in some way be fusion powered – perhaps even within the lifetimes of the workman digging below me. But I can’t help but remember the 30-year rule. Update (14/08): The original text contained factual inaccuracies regarding the fusion reaction within the reactor. This has now been rectified. If you would like to comment on this article or anything else you have seen on Future, head over to our Facebook page or message us on Twitter.
// // ofxPixelObject.cpp // sACN_Mapped // // Created by bluejaylouche on 19/7/21. // // Adapted from: https://github.com/DHaylock/ofxOPC #include "ofxPixelObject.hpp" //-------------------------------------------------------------- vector <ofColor> ofxPixelObject::colorData() { // Transmit Data return colors; } //-------------------------------------------------------------- vector<glm::vec2> ofxPixelObject::getPixelCoordinates() { colors.clear(); return pos; } //-------------------------------------------------------------- void ofxPixelObject::drawGrabRegion(bool hideArea) { } //-------------------------------------------------------------- void ofxPixelObject::draw(int x, int y) { }
// Writes the last config::web_log_bytes of the INFO logfile to a webpage // Note to get best performance, set GLOG_logbuflevel=-1 to prevent log buffering void logs_handler(const WebPageHandler::ArgumentMap& args, std::stringstream* output) { (*output) << "<br/>Couldn't open INFO log file: "; }
import { Buffer } from './buffer'; type elementWithTimestamp<T> = { e: T, t: number, n?: elementWithTimestamp<T> } export class TimeBuffer<T> implements Buffer<T> { private readonly maxAge: number; private tail?: elementWithTimestamp<T>; private head?: elementWithTimestamp<T>; constructor(maxAge: number) { this.maxAge = maxAge; } size(): number { return Number.POSITIVE_INFINITY; } len(): number { this.forwardTail(); let cur = this.tail; let i = 0; while(cur !== undefined) { i++; cur = cur.n; } return i; } read(e: T[]): number { this.forwardTail(); if(e.length === 0) { return 0; } let cur = this.tail; let i = 0; while(cur !== undefined) { e[i++] = cur.e; if( i === e.length) { break; } cur = cur.n; } return i; } write(e: T[]): number { for(let i = 0; i < e.length; i++) { this.putElement(e[i]) } return e.length; } private putElement(e: T) { const newElement = { e, t: Date.now(), n: undefined } as elementWithTimestamp<T>; if(this.tail === undefined) { this.tail = newElement; } if(this.head === undefined) { this.head = newElement } else { this.head.n = newElement; this.head = newElement } } forEach(fn: (e: T) => any): number { this.forwardTail(); let cur = this.tail; let i = 0; while (cur !== undefined) { fn(cur.e); i++; cur = cur.n; } return i; } private forwardTail() { if(this.tail === undefined) return; const now = Date.now(); while(now - this.tail.t > this.maxAge) { if(this.tail === this.head) { this.tail = undefined; this.head = undefined; } else { this.tail = this.tail?.n; } if(this.tail === undefined) break; } } clear() : void { } }
/* Copyright 2019 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package state import ( "errors" ) func OptionConfig(config Config) Option { return func(c *Config) error { *c = config return nil } } func OptionInit(init Init) Option { return func(config *Config) error { config.Init = init return nil } } func OptionUpdate(update Update) Option { return func(config *Config) error { config.Update = update return nil } } func OptionView(view View) Option { return func(config *Config) error { config.View = view return nil } } func OptionProducer(producer Producer) Option { return func(config *Config) error { config.Producer = producer return nil } } func OptionConsumer(consumer Consumer) Option { return func(config *Config) error { config.Consumer = consumer return nil } } func OptionReplay(replay func() bool) Option { return func(config *Config) error { config.Replay = replay return nil } } func OptionFetcher(fetcher Fetcher) Option { return func(config *Config) error { config.Fetcher = fetcher return nil } } func OptionStore(store Store) Option { return func(config *Config) error { config.Store = store return nil } } func OptionHydrator(hydrator Hydrator) Option { return func(config *Config) error { config.Hydrator = hydrator return nil } } func OptionDehydrator(dehydrator Dehydrator) Option { return func(config *Config) error { config.Dehydrator = dehydrator return nil } } func OptionKey(key string) Option { return func(config *Config) error { config.Key = key return nil } } func RunValidator(config *Config) error { if config.Init == nil { return errors.New("state.RunValidator nil init") } if config.Update == nil { return errors.New("state.RunValidator nil update") } if config.View == nil { return errors.New("state.RunValidator nil view") } if config.Producer == nil { return errors.New("state.RunValidator nil producer") } if config.Consumer == nil { return errors.New("state.RunValidator nil consumer") } return nil } func RunOptions( init Init, update Update, view View, producer Producer, consumer Consumer, ) []Option { return []Option{ OptionInit(init), OptionUpdate(update), OptionView(view), OptionProducer(producer), OptionConsumer(consumer), } } func BatchValidator(config *Config) error { if err := RunValidator(config); err != nil { return err } if config.Fetcher == nil { return errors.New("state.BatchValidator nil fetcher") } return nil } func BatchOptions( init Init, update Update, view View, producer Producer, consumer Consumer, fetcher Fetcher, ) []Option { return append( RunOptions( init, update, view, producer, consumer, ), OptionFetcher(fetcher), ) } func AggregateValidator(config *Config) error { if err := BatchValidator(config); err != nil { return err } if config.Store == nil { return errors.New("state.AggregateValidator nil store") } if config.Hydrator == nil { return errors.New("state.AggregateValidator nil hydrator") } if config.Dehydrator == nil { return errors.New("state.AggregateValidator nil dehydrator") } return nil } func AggregateOptions( init Init, update Update, view View, producer Producer, consumer Consumer, fetcher Fetcher, store Store, hydrator Hydrator, dehydrator Dehydrator, key string, ) []Option { return append( BatchOptions( init, update, view, producer, consumer, fetcher, ), OptionStore(store), OptionHydrator(hydrator), OptionDehydrator(dehydrator), OptionKey(key), ) }
Re-imagining occupational therapy clients as communities: Presenting the community-centred practice framework Abstract Background: Occupational therapists’ are increasingly working with communities and providing services at the community level. There is, however, a lack of conceptual frameworks to guide this work. Aim: The aim of this article is to present a new conceptual framework for community-centered practice in occupational therapy. Material and Method: The conceptual framework was developed from qualitative multi-case research on exemplars of community participation. The first was, a network of Canadian food security programs, and the second, a rural Australian community banking initiative. Key themes were identified from across the case studies, and cross-case findings interpreted using occupational therapy and occupational science knowledge, and relevant social theory. The outcome is a four-stage, occupation-focused, community-centered practice framework. Findings: The Community-Centred Practice Framework can be used by occupational therapists to understand and apply a community-centered practice approach. The four stages are: (1) Community Identity, (2) Community Occupations, (3) Community Resources and Barriers, and (4) Participation Enablement. Conclusions: Further research is needed to trial and critically evaluate the framework, to assess its usefulness as a robust, occupation-focused, frame of reference to guide community-centered practice in occupational therapy. Significance: The proposed framework should assist occupational therapists to conceptualize community-centered practice, and to utilize and apply theory.
/** * A CPU * * @author widdis[at]gmail[dot]com */ public class SolarisCentralProcessor extends AbstractCentralProcessor { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(SolarisCentralProcessor.class); /** * Create a Processor */ public SolarisCentralProcessor() { super(); // Initialize class variables initVars(); // Initialize tick arrays initTicks(); LOG.debug("Initialized Processor"); } private void initVars() { // Get first result Kstat ksp = KstatUtil.kstatLookup("cpu_info", -1, null); // Set values if (ksp != null && KstatUtil.kstatRead(ksp)) { setVendor(KstatUtil.kstatDataLookupString(ksp, "vendor_id")); setName(KstatUtil.kstatDataLookupString(ksp, "brand")); setStepping(KstatUtil.kstatDataLookupString(ksp, "stepping")); setModel(KstatUtil.kstatDataLookupString(ksp, "model")); setFamily(KstatUtil.kstatDataLookupString(ksp, "family")); } setCpu64("64".equals(ExecutingCommand.getFirstAnswer("isainfo -b").trim())); setProcessorID(getProcessorID(getStepping(), getModel(), getFamily())); } /** * Updates logical and physical processor counts from psrinfo */ @Override protected void calculateProcessorCounts() { List<Kstat> kstats = KstatUtil.kstatLookupAll("cpu_info", -1, null); Set<String> chipIDs = new HashSet<>(); Set<String> coreIDs = new HashSet<>(); this.logicalProcessorCount = 0; for (Kstat ksp : kstats) { if (ksp != null && KstatUtil.kstatRead(ksp)) { this.logicalProcessorCount++; chipIDs.add(KstatUtil.kstatDataLookupString(ksp, "chip_id")); coreIDs.add(KstatUtil.kstatDataLookupString(ksp, "core_id")); } } this.physicalPackageCount = chipIDs.size(); if (this.physicalPackageCount < 1) { LOG.error("Couldn't find physical package count. Assuming 1."); this.physicalPackageCount = 1; } this.physicalProcessorCount = coreIDs.size(); if (this.physicalProcessorCount < 1) { LOG.error("Couldn't find physical processor count. Assuming 1."); this.physicalProcessorCount = 1; } if (this.logicalProcessorCount < 1) { LOG.error("Couldn't find logical processor count. Assuming 1."); this.logicalProcessorCount = 1; } } /** * {@inheritDoc} */ @Override public synchronized long[] getSystemCpuLoadTicks() { long[] ticks = new long[TickType.values().length]; // Average processor ticks long[][] procTicks = getProcessorCpuLoadTicks(); for (int i = 0; i < ticks.length; i++) { for (long[] procTick : procTicks) { ticks[i] += procTick[i]; } ticks[i] /= procTicks.length; } return ticks; } /** * {@inheritDoc} */ @Override public double[] getSystemLoadAverage(int nelem) { if (nelem < 1 || nelem > 3) { throw new IllegalArgumentException("Must include from one to three elements."); } double[] average = new double[nelem]; int retval = Libc.INSTANCE.getloadavg(average, nelem); if (retval < nelem) { for (int i = Math.max(retval, 0); i < average.length; i++) { average[i] = -1d; } } return average; } /** * {@inheritDoc} */ @Override public long[][] getProcessorCpuLoadTicks() { long[][] ticks = new long[this.logicalProcessorCount][TickType.values().length]; int cpu = -1; for (Kstat ksp : KstatUtil.kstatLookupAll("cpu", -1, "sys")) { // This is a new CPU if (++cpu >= ticks.length) { // Shouldn't happen break; } if (KstatUtil.kstatRead(ksp)) { ticks[cpu][TickType.IDLE.getIndex()] = KstatUtil.kstatDataLookupLong(ksp, "cpu_ticks_idle"); ticks[cpu][TickType.SYSTEM.getIndex()] = KstatUtil.kstatDataLookupLong(ksp, "cpu_ticks_kernel"); ticks[cpu][TickType.USER.getIndex()] = KstatUtil.kstatDataLookupLong(ksp, "cpu_ticks_user"); } } return ticks; } /** * {@inheritDoc} */ @Override public long getSystemUptime() { Kstat ksp = KstatUtil.kstatLookup("unix", 0, "system_misc"); if (ksp == null) { return 0L; } // Snap Time is in nanoseconds; divide for seconds return ksp.ks_snaptime / 1000000000L; } /** * Fetches the ProcessorID by encoding the stepping, model, family, and * feature flags. * * @param stepping * @param model * @param family * @return The Processor ID string */ private String getProcessorID(String stepping, String model, String family) { List<String> isainfo = ExecutingCommand.runNative("isainfo -v"); StringBuilder flags = new StringBuilder(); for (String line : isainfo) { if (line.startsWith("32-bit")) { break; } else if (!line.startsWith("64-bit")) { flags.append(' ').append(line.trim()); } } return createProcessorID(stepping, model, family, ParseUtil.whitespaces.split(flags.toString().toLowerCase())); } /** * {@inheritDoc} */ @Override public long getContextSwitches() { long swtch = 0; List<String> kstat = ExecutingCommand.runNative("kstat -p cpu_stat:::/pswitch\\\\|inv_swtch/"); for (String s : kstat) { swtch += ParseUtil.parseLastLong(s, 0L); } return swtch > 0 ? swtch : -1L; } /** * {@inheritDoc} */ @Override public long getInterrupts() { long intr = 0; List<String> kstat = ExecutingCommand.runNative("kstat -p cpu_stat:::/intr/"); for (String s : kstat) { intr += ParseUtil.parseLastLong(s, 0L); } return intr > 0 ? intr : -1L; } }
#include "World.h" #include "NeuralNet.h" double getScore(Cell* cell); World::World(std::string s) { seed = s; if (seed != "") { std::cout << "World Seed: " << seed << std::endl; std::srand(getSum(seed)); std::cout << "World Seed to int: " << getSum(seed) << std::endl; } else { std::srand(time(NULL)); } for (int i = 0; i < cellCount; i++) { Cell* cell = new Cell(this, i); cellList.push_back(cell); } GenerateFood(); } World::~World() { for (int i = 0; i < cellList.size(); i++) { delete cellList[i]; } for (int i = 0; i < foodList.size(); i++) { delete foodList[i]; } } void World::GenerateFood() { //std::cout << "Removing old food..." << std::endl; for (int i = 0; i < foodList.size(); i++) { delete foodList[i]; } foodList.clear(); std::vector<Food*>().swap(foodList); foodList.shrink_to_fit(); //std::cout << "Generating food lines..." << std::endl; /*for (int i = 0; i < foodChainCount; i++) { Generate(foodChainLength, 0, 0, (nextRand() * 2 - 1) * M_PI * 2, 0.1); }*/ for (int n = 0; n < foodChainCount; n++) { //double angle = (nextRand() * 2 - 1) * M_PI * 2; double angle = 0; double dist = 0.1; double x = 0; double y = 0.3; double energy = 10; //std::cout << "[Angle: " << angle << ", dist: " << dist << ", X: " << x << ", Y " << y << "]" << std::endl; Food* food = new Food(this, x, y, energy, 0); foodList.push_back(food); for (int i = 1; i < foodChainLength; i++) { int id = n * foodChainLength + i; angle += (nextRand() * 2 - 1) * (M_PI / 5); x += sin(angle) * dist; y += cos(angle) * dist; dist *= 1.6; energy *= 2; Food* food = new Food(this, x, y, energy,id); foodList.push_back(food); //Generate(chainlink - 1, x, y, angle, dist); } } } void World::Generate(int chainlink, double x = 0, double y = 0, double angle = 0, double dist = 0.1) { if (chainlink <= 0) { return; } angle += (nextRand() * 2 - 1) * (M_PI / 32) * 2; x += sin(angle) * dist; y += cos(angle) * dist; dist *= 1.5; Food* food = new Food(this, x, y, 0, foodList.size()); foodList.push_back(food); Generate(chainlink - 1, x, y, angle, dist); } void World::Update(int deltaTime) { if (this->checkAllDead() && cellList.size() > 0) { this->SortCells(); std::cout << "Sorted! Highest score is: " << getScore(cellList[0]) << std::endl; std::vector<Cell*> winnerList; double indexLikelihood = 0.1; for (int i = 0; i < cellList.size(); i++) { Cell* winner = cellList[(cellList.size() - 1) * indexLikelihood * nextRand()]; std::cout << "Winner chosen with score of: " << getScore(winner) << std::endl; winnerList.push_back(winner); if (indexLikelihood < 0) indexLikelihood = 0; else indexLikelihood -= indexLikelihood / ((double)cellList.size()); } std::vector<Cell*> newCells; for (int i = 0; i < cellList.size(); i++) { newCells.push_back(new Cell(*winnerList[nextRand() * winnerList.size()], worldMutator)); } for (int i = 0; i < cellList.size(); i++) { delete cellList[i]; cellList[i] = newCells[i]; } //GenerateFood(); } for (int i = 0; i < cellList.size(); i++) { cellList[i]->Update(deltaTime); } } void World::Render() { for (int i = 0; i < cellList.size(); i++) { cellList[i]->Render(); } for (int i = 0; i < foodList.size(); i++) { foodList[i]->Render(); } //food1->Render(); } double World::nextRand() { return ((double)(rand() % 100)) / 100.0; } unsigned int World::getSum(std::string seed) { //std::cout << "World Seed: " << seed << std::endl; //std::cout << "World Seed Length: " << seed.length() << std::endl; unsigned int sum = 0; for (int i = 0; i < seed.length(); i++) { int a = seed[i]; int b = a + seed[i] * i; sum += (a + b); } sum = sum % (1028 * 2 * 2 * 2); //std::cout << "World Seed to int: " << sum << std::endl; return sum; } double getScore(Cell* cell) { return cell->getDistanceTravelled() * 100 + cell->getTotalFood() * 100; } void World::SortCells() { std::vector<Cell*> sortedCells; for (int i = 0; i < cellList.size(); i++) { if (i == 0) { sortedCells.push_back(cellList[i]); continue; } for (int j = 0; j < sortedCells.size(); j++) { if (getScore(cellList[i]) > getScore(sortedCells[j])) { sortedCells.insert(sortedCells.begin() + j, cellList[i]); break; } else if (j == sortedCells.size() - 1) { sortedCells.push_back(cellList[i]); break; } } } for (int i = 0; i < cellList.size(); i++) { cellList[i] = sortedCells[i]; //std::cout << getScore(cellList[i]) << std::endl; } } bool World::checkAllDead() { for (int i = 0; i < cellList.size(); i++) { if (cellList[i]->isAlive()) { return false; } } return true; } Food* World::getNearestFood(double xPos, double yPos, std::vector<int> foods) { Food* closest = nullptr; double cDist = -1; for (int i = 0; i < foodList.size(); i++) { bool flag = false; for (int j = 0; j < foods.size(); j++) { // if the food that has already been eaten includes this id, then continue //std::cout << foods[j] << std::endl; //std::cout << foodList[i]->getID() << std::endl; if (foods[j] == foodList[i]->getID()) { flag = true; break; } } if (flag) continue; double dist = foodList[i]->getDist(xPos, yPos); if (closest == nullptr || dist < cDist || dist == -1) { closest = foodList[i]; cDist = dist; continue; } } return closest; }
class WebResponse: """This gets returned from an api request""" time: datetime.datetime status: int content: dict from_cache: bool def __bool__(self): return self.status == 200 @classmethod def from_dict(cls, data: dict): return cls(**{k: v for k, v in data.items() if k in inspect.signature(cls).parameters})
<filename>CombancPushMessageSection/NewsManagerPageViewController.h // // NewsManagerPageViewController.h // PushNotice // // Created by Golden on 2018/12/25. // Copyright © 2018年 Combanc. All rights reserved. // #import "WMPageController.h" @interface NewsManagerPageViewController : WMPageController @property (nonatomic, copy) NSString *baseUrl; @property (nonatomic, copy) NSString *token; @end
import nodemailer from 'nodemailer'; import { TransportOptions } from 'nodemailer'; import { envConfig } from '../utils'; export class MailSender { transporter: nodemailer.Transporter; adminEmail: string; constructor(service: string, host: string, user: string, pass: string) { this.transporter = nodemailer.createTransport({ service, host, auth: { user, pass, } } as TransportOptions); this.adminEmail = user; } sendEmail(to: string|string[], subject: string, content: string) { const target: string[] = Array.isArray(to) ? to : [to]; this.transporter.sendMail({ from: `"Admin" <${this.adminEmail}>`, to: target.join(','), subject, html: content, }); } static getInstance(): MailSender { const globalContext = (global as any); if (!globalContext.$emailer) { globalContext.$emailer = new MailSender( envConfig('EMAIL_SENDER_SERVICE'), envConfig('EMAIL_SENDER_HOST'), envConfig('EMAIL_SENDER_USER'), envConfig('EMAIL_SENDER_PASS') ); } return globalContext.$emailer; } }
Nutritional Composition and Bioactive Properties of Wild Edible Mushrooms from Native Nothofagus Patagonian Forests Nothofagus forests of the Andean Patagonian region are home to numerous wild edible mushroom (WEM) species with interesting organoleptic characteristics, although many of them have unknown nutritional and nutraceutical profiles. The proximal composition, fatty and organic acids, soluble sugars, phenolic compounds, ergosterol, as well as antioxidant and antimicrobial activity of 17 WEMs were analyzed. Carbohydrates, the most abundant macronutrients, varied between 49.00 g/100 g dw (C. magellanicus) and 89.70 g/100 g dw (F. antarctica). Significantly higher values were found for total fat in G. gargal (5.90 g/100 g dw) followed by A. vitellinus (4.70 g/100 g dw); for crude protein in L. perlatum (36.60 g/100 g dw) followed by L. nuda (30.30 g/100 g dw); and for energy in G. gargal (398 Kcal/100g) and C. hariotii (392 Kcal/100g). The most effective extracts regarding the TBARS antioxidant capacity were those of Ramaria. This is the first time that a study was carried out on the chemical composition of G. sordulenta, C. xiphidipus, F. pumiliae, and L. perlatum. The promotion of sustainable use of WEMs, including their incorporation in functional diets that choose WEMs as nutritious, safe, and healthy foods, and their use in an identity mycogastronomy linked to tourism development, requires the detailed and precise nutritional and nutraceutical information of each species. Introduction Edible wild mushrooms are highly available functional foods. Its consumption has been developed and perpetuated in various countries from all over the world . Their commercial and culinary importance is mainly due to their organoleptic properties, such as aroma and flavor, their nutritional qualities, and their medicinal characteristics , due to their high protein and fiber content, essential amino acids, bioactive compounds, and low lipids content . Different mushrooms have been studied in search of new therapeutic alternatives, finding that they have bioactive properties and that they constitute rich sources of nutraceuticals molecules , which are responsible for their antioxidant and antitumor properties . Antioxidants from edible natural products are currently widely studied for their ability to protect organisms and cells from damage caused by oxidative stress, which is one of the causes of aging and degenerative diseases . Patagonian Andean forests comprise 3,240,996 h dominated by Nothofagus spp. (N. antarctica, N. dombeyi, and N. pumilio are the most representative species) . The region harbors numerous species of wild fungi that are potentially edible, with high nutritional and Nutritional Characterization Samples of each species were analyzed for nutritional composition (protein, carbohydrates, fat, ash, and energy) using AOAC procedures . The total carbohydrates were obtained by difference, and the energy values were calculated with the equation Energy (kcal) = 4 × (g protein + g carbohydrates) + 9 × (g fat). The results are expressed in kcal per 100 g of dry weight (dw). Nutritional Characterization Samples of each species were analyzed for nutritional composition (protein, carbohydrates, fat, ash, and energy) using AOAC procedures . The total carbohydrates were obtained by difference, and the energy values were calculated with the equation Energy (kcal) = 4 × (g protein + g carbohydrates) + 9 × (g fat). The results are expressed in kcal per 100 g of dry weight (dw). Chemical Composition 2.3.1. Free Sugars Free sugars determination followed the methodology by Barros et al. . Analysis was performed by liquid chromatography (HPLC, Knauer, Smartline 1000 systems, Berlin, Germany), coupled with a refraction index detector (Knauer Smartline 2300). The detected compounds were identified by comparison with the retention times of the standards. Trehalose was used as the internal standard. Results are expressed in g/100 g of dry weight (dw). Fatty Acids The fatty acids were identified by gas chromatography with flame ionization detection (GC-FID), as previously described by Pereira et al. . The identification of fatty acids was made according to their relative retention times of the FAME peaks of the sample standards (mixture 37, 47885-U purchased from Sigma). To process the results, we used CSW 1.7 software (DataApex 1.7, Prague, Czech Republic); results are expressed as a relative percentage (%). Ergosterol The ergosterol was quantified after extraction following Vieira Junior et al. . It was determined by high-performance liquid chromatography (HPLC) coupled to a UV detector (280 nm), as described by Cardoso et al. , and was identified and quantified by comparison with the pure chemical standard and expressed in mg/100g dw. Organic Acids Composition The organic acids were determined by high-performance liquid chromatography coupled to a photodiode detector (UFLC-PDA) following the methodology described by Barros et al. . The detection of organic acids was achieved using a DAD system, applying a wavelength of 215 nm (and 245 nm for ascorbic acid). The quantification was carried out by comparing the area of their recorded peaks with the calibration curves obtained from the standards of the respective compound. The results are expressed in mg/100 g (fw). Phenolic Composition Samples of 0.5 g of freeze-dried specimens were used for the extract preparation. They were initially macerated at room temperature with the addition of a solution (30 mL) of ethanol/water (80:20, v/v), for 1 h (150 rpm). Ethanol was removed under reduced pressure. Afterwards, the aqueous phase of both extracts was frozen and lyophilized. The identification and quantification of the phenolic compounds followed the previously optimized methodology , using a Dionex Ultimate 3000 UPLC system (Thermo Scientific, San Jose, CA, USA). The DAD and mass spectrometer (LTQ XL mass spectrometer, Thermo Finnigan, San Jose, CA, USA) were working in negative mode. Evaluation of Antioxidant Activity An in vitro assay based on the monitoring of malondialdehyde (MDA)-TBA complexes was carried out as previously reported to measure the extract capacity to inhibit the formation of thiobarbituric acid reactive substances (TBARS). Porcine brain cells were used as biological substrates. The results are expressed as IC 50 values (mg/mL). Evaluation of Antibacterial Activity The extracts were tested against five Gram-negative bacteria, namely, Enterobacter cloacae (ATCC 49741), Escherichia coli (ATCC 25922), Pseudomonas aeruginosa (ATCC 9027), Salmonella enterica subsp. enterica serovar Enteritidis (ATCC 13076), and Yersinia enterocolitica (ATCC 8610), and three Gram-positive bacteria, namely, Bacillus cereus (ATCC 11778), Listeria monocytogenes (ATCC 19111), and Staphylococcus aureus (ATCC 25923). The minimum inhibitory (MIC) and minimum bactericidal concentrations were determined for all bacteria using colorimetric assays, following Pires et al. . The MIC was defined as the lowest concentration inhibiting visible bacterial growth, determined by a change from yellow to pink coloration if the microorganisms are viable. The MBC was defined as the lowest concentration required to kill bacteria. To evaluate the antifungal activity, the methodology described by Heleno et al. , using Aspergillus fumigatus (ATCC 204305) and Aspergillus brasiliensis (ATCC 16404), was used. The organisms were obtained from Frilabo, Porto, Portugal. The minimum inhibitory concentration (MIC) and minimum fungicidal concentration (MFC) were determined by a serial dilution technique using 96-well microplates. The lowest concentrations without visible growth (at the binocular microscope) were defined as the MICs. The lowest concentration with no visible growth was defined as the MFC, indicating 99.5% killing of the original inoculum. The commercial fungicide ketoconazole (Frilabo, Porto, Portugal) was used as positive control. Statistical Analysis Three independent samples per mushroom species were analyzed, and the data are expressed as the mean ± standard deviation. All statistical tests were performed at a 5% significance level in RStudio (version 1.1.485-© 2009-2022 RStudio, Inc.) . The homogeneity of variance and normal distribution of the residuals were tested by means of the Shapiro-Wilk and Levene tests, respectively, to fulfill the one-way ANOVA requirements. All dependent variables were compared using Tukey's tests. When normality or heteroscedasticity could not be verified, the variables were Box-Cox transformed before performing the ANOVA. Kruskal-Wallis tests were carried out when a normal distribution and heteroscedasticity were not achieved after Box-Cox transformation. Results and Discussion The obtained chemical compositions and energetic values are shown in Table 1. Protein contents varied between 3.20 g/100 g dw in F. antarctica and 36.60 g/100 g dw in L. perlatum. The top-five values concerning of highest protein content was L. perlatum (36.60 g/100 g dw), L. nuda (30.30 g/100 g dw), H. dusenii (22.20 g/100 g dw), R. patagonica (18.10 g/100 g dw), and C. magellanicus (14.40 g/100 g dw). Comparing with previous studies, Ramaria patagonica and R. botrytis showed similar results than those reported by other authors with a value of 19.68 g/100 g dw and 16.60 g/100 g dw . However, in a Portuguese mushroom study , R. botrytis showed higher protein values (39.8 g/100 g dw) than our results. Flammulina velutipes (17.89 g/100 g dw) and C. aegerita (19.65 g/100 g dw) showed lower levels than those reported by Jacinto-Azevedo et al. . Other studies on G. gargal showed similar results with values of 5.96 and 5.00 g/100 g dw . Previous studies on Cyttaria have reported higher values than those reported here; for example, C. espinosae had values of 17.46 g/100 g dw and C. darwini values of 17.20 g/100 g dw . However, C. hariotii showed similar results (3.35 g/100 g dw) than those reported by Toledo et al. . The protein values for A. vittelinus, C. magellanicus, F. antarctica, and F. endoxantha are in concordance with other reports . On a dry weight basis, mushrooms normally contain 19 to 35% protein. Therefore, regarding the amount of crude protein, mushrooms are positioned below most animal meats but well above most other foods, including milk, rice, and wheat . Table 1. Proximate composition (g/100 g) and energetic value (kcal/100 g) of the studied wild mushrooms (mean ± SD). For each mushroom sample, means within a column with different letters differ significantly (p < 0.05). Ash varied from 4.80 g/100 g in P. ostreatus to 32.00 g/100 g in C. magellanicus. Cyttaria hariotii yielded lower values (7.0 g/100 g dw) than what Schmeda-Hirschmann et al. previously reported, and similar values than those reported by Jacinto-Azevedo et al. for C. espinosae (4.90 g/100 g dw). However, lower values were observed in F. velutipes, C. aegerita, and R. botrytis , in G. gargal , and in F. antarctica and F. endoxantha . Similar results to Toledo et al. were found for L. nuda (8.58 g/100 g dw) and R. patagonica (8.47 g/100 g dw), but lower for L. nuda (18.5 g/100 g dw) compared to Barros et al. . The ash content in edible mushrooms ranges from 1 to 29 g/100 g dry matter and comprise a source of essential minerals. Concentrations of P, K, Ca, Na, and Mg constitute more than 56% of the total ash content . Concerning sugar composition (Table 2), mannitol and trehalose were the principal sugars, which is in agreement with the data presented in the literature; they are essential in energetic metabolism and necessary in the synthesis of storage or structural polysaccharides . Mannitol content was significantly higher for A. vitellinus (8.83 g/100 g dw), R. botrytis (6.34 g/100 g dw), and R. patagonica (8.64 g/100 g dw), although absent in F. endoxantha, in concordance with Toledo et al. . The support and expansion of the mushroom fruiting bodies is guaranteed by the presence of mannitol . Trehalose predominated in C. xiphidipus (17.60 g/100 g dw), P. ostreatus (15.81 g/100 g dw), C. aegerita (13.54 g/100 g dw), and G. sordulenta (11.25 g/100 g dw), but was absent in R. botrytis and R. patagonica. The ingestion, hydrolysis, absorption, and metabolism of trehalose is highly similar to all the other digestible disaccharides . On the other hand, fructose and one unidentified sugar were predominant in all three Fistulina species (F. endoxantha, F. antarctica, and F. pumiliae) in concordance with previous reports . Fructose was also the predominant sugar in Flammulina velutipes (8.56 g/100 g dw), agreeing with Reis et al. , while it was absent in A. vitellinus, C. magellanicus, L. perlatum, and P. ostreatus, and present in lower abundance in the rest of the species. In terms of total sugar content, F. endoxantha revealed the highest value (33.88 g/100 g dw), while G. gargal the lowest (3.63 g/100 g dw). This is the first study that reports the composition in free sugars of endemic species C. xiphidipus, F. pumiliae, and G. sordulenta. Organic acids comprise a group of mono-, di-, and tricarboxylic acids physiologically occurring as intermediates in a variety of intracellular metabolic pathways, such as catabolism of amino acids, the tricarboxylic acid cycle, and neurotransmitters, as well as in cholesterol biosynthesis . The organic acid composition is presented in Table 2. Most of the analyzed specimens had oxalic, malic, and fumaric acids in their composition. On the other hand, quinic, shikimic, citric, and succinic acids were present just in a few species. Citric acid was detected in high amounts in C. magellanicus (57.40 mg/100 g dw), R. patagonica (57.31 mg/100 g dw), A. vitellinus (31.11 mg/100 g dw), and C. hariotii (23.41 mg/100 g dw). Organic Acids Sugars Within polyunsaturated fatty acids, ω-3 and ω-6 are the most abundant in mammals. Its precursors, α-linolenic acid (ALA) and linoleic acid (LA), are considered essential fatty acids, as the body requires them for normal operation but which cannot be synthesized endogenously . Within the series of omega-3, the most important in the human diet are eicosapentaenoic acid (EPA) and docosahexaenoic acid (DHA), both difficult to synthesize endogenously and with important functions in the human body. DHA is a structural fatty acid, since it forms part of cell membranes and is also important for visual (it makes up 20% of all fatty acids present in the retina) and neuronal development during gestation and early childhood . In our study, C. hariotii showed high amounts of DHA (7.74%). Interestingly, some ethnomycological reports showed that in the Selknam, Ahonikenk, and Kawesqar native Patagonian populations, after giving birth and while the quarantine lasted, the mothers lived exclusively on Cyttaria fungus (C. darwinii and C. hariotii). In the omega-6 series one has to pay special attention to γ-linolenic acid (GLA) and arachidonic acid (AA), important for prostaglandin production and anti-inflammatory activity . From all the above, the importance of supplementing a diet with fatty acids, which are clearly present in edible mushrooms, can be deduced, since their current intake is insufficient. The ergosterol content (Table 4) ranged from 0.40 mg/100 g in C. hariotii to 123.57 mg/100 g dw in G. gargal. In other studies of edible and medicinal mushrooms, similar differences have been reported, ranging from traces for Armilaria mellea, to values of 25.71 mg/100 g dw for Laetiporus sulphureus or 445.32 mg/100 g dw for Macrolepiota procera. The differences in ergosterol and other nutritional and bioactive compounds depend on the species, stage of development, tissues, nutrient substrate, and microclimate . Vieira Junior et al. showed that ergosterol biosynthesis and its bioconversion into ergocalciferol were also affected by the cultivation process in Agaricus subrufescens production, showing differences between field culture and controlled conditions. Ergosterol is metabolized into a prohormone-vitamin D. Its action is related with bone mineral metabolism and with the balance of phosphorus and calcium, related to various mechanisms such as secretion and effect of insulin, regulation of the renin-angiotensin-aldosterone system, endothelial function, cell cycle control and apoptosis, immunological self-tolerance, and immune response against infections, among other effects . For these reason, edible mushrooms are promising sources of vitamin D, and thus able to improve food supplements for human consumption. Species p-Coumaric Acid Gallic Acid p-Hidroxibenzoic Acid Protocatechuic Acid Table 6 shows the in vitro antioxidant activity of the studied species. Ramaria patagonica (156 µg/mL), R. botrytis (167 µg/mL), G. sordulenta (299 µg/mL), and A. vitellinus (551 µg/mL) presented the best results in the TBARS assays; meanwhile, L. perlatum (90 µg/mL), L. nuda (93 µg/mL), A. vitellinus (113 µg/mL), and G. sordulenta (155 µg/mL) presented the best results in OxHLIA, all with IC 50 values ≤ 1000 µg/mL. The result of the antioxidant activity in OxHLIA for L. perlatum is in concordance with its highest total levels of phenolic compounds. That the antioxidant activity of mushrooms correlates with the phenolic compounds content has already been reported . Fistulina antarctica comparatively presented the lowest antioxidant activity (IC 50 of 2627 µg/mL for TBARS and of 1066 µg/mL for OxHLIA), in concordance with Toledo et al. . Lepista nuda showed lower values (711 µg/mL) for the TBARS assay compared to those reported by other authors, with IC 50 values of 5800 µg/mL and 6100 µg/mL . The high antioxidant activity of Ramaria is in agreement with the literature; for example, for R. flava, R. botrytis, and R. subaurantiaca with DPPH assays by Jacinto-Azevedo et al. , and for R. patagonica with different assays by Toledo et al. . The antioxidant activity by DPPH or reducing the power test was also evaluated applying different extracting methodologies in Grifola samples; for example, Brujin et al. , using different solvents or heat treatments in Grifola gargal, and Postemsky et al. , using wheat grain biotransformed with mycelium of G. gargal and G. sordulenta. Among the three edible Rusulla species, R. integra ethanolic extract showed the best antihemolytic activity, with an IC 50 value of 139 ± 3 µg/mL, also for a 60 min ∆t . This is the first study on the anti-hemolytic capacity of wild edible species. Table 6. Antioxidant activity of the mushroom extracts measured by inhibition of lipid peroxidation (TBARS) and the oxidative hemolysis inhibition assay (OxHLIA). IC 50 values were expressed in µg/mL. na: no activity (∆t values less than 60 min were obtained). For each mushroom sample, means within a column with different letters differ significantly (p < 0.05). All extracts were tested against ten bacteria and fungi considered food contaminants (Table 7). Each mushroom species showed different intensities of positive antimicrobial activity against the tested microorganisms. The antibacterial effects were more effective against Salmonella enterocolitica, Yersinia enterocolitica (Gram-negative bacteria), and Staphylococcus aureus (Gram-positive bacteria). The antifungal effect was more effective in Aspergillus brasiliensis. Among these active extracts, those produced by A. vitellinus (MIC 1.25 mg/mL) F. velutipes (MIC 2.5 mg/mL), G. gargal (MIC 2.5 mg/mL), P. ostreatus (MIC 2.5 mg/mL), and R. botrytis (MIC 1.25 mg/mL) exhibited a good inhibitory activity against Yersinia enterocolitica; and the extracts from G. sordulenta and R. botrytis (MIC 0.3 mg/mL) against Staphylococcus aureus. The Gram-negative bacteria, Enterobacter cloacae, Escherichia coli, and Pseudomonas aeruginosa, and the Gram-positive bacteria Bacillus cereus and Listeria monocytogenes were less sensitive to the extracts used. C. hariotii showed no activity against the analyzed bacteria. None of the extracts presented bactericidal and fungicidal activity. All these results must be considered taking into account the already established fact that the chemical composition of mushrooms could vary with the genetic structure and strains within the same species. Our ranks also considered dehydrated, complete fruiting bodies, in the mature stage, with no stratification by site conditions nor post-harvest treatments. Maturation stage at harvest, a specific part of the mushroom analyzed (stem, cup, lamellae), and environmental variables, such as soil composition, as well as the postharvest preservation method (freeze dry, oven-dry, cooled, fresh) and cooking process may affect their chemical composition . Conclusions This study highlights the value of the native and endemic mushrooms of the Patagonian forest, regarding, for example, their antioxidant qualities, as in the case of Ramaria spp., or their energetic value, as in the case of G. gargal and C. hariotti. Species such as C. aegerita, F. velutipes, L. nuda, and P. ostreatus demonstrated the importance of edible mushroom with a cosmopolitan distribution growing in native forests, resulting in an invaluable source of food with high protein values, low contents of fat, along with other bioactive compounds with remarkable antioxidant and antimicrobial activity. The data provided by this study, along with previous ones, will strengthen and support the inclusion of new species of wild edible fungi in the Argentine food code. In this way, we expect to revalue these resources as non-timber forest products from Patagonia, promoting multiple and sustainable uses of native forests. Institutional Review Board Statement: Not applicable. Informed Consent Statement: Not applicable. Data Availability Statement: The data presented in this study are available upon request from the corresponding author.
/** * LimitRangeSpec defines a min/max usage limit for resources that match on kind. **/ @ApiModel(description="LimitRangeSpec defines a min/max usage limit for resources that match on kind.") public class IoK8sApiCoreV1LimitRangeSpec { @ApiModelProperty(required = true, value = "Limits is the list of LimitRangeItem objects that are enforced.") @Valid /** * Limits is the list of LimitRangeItem objects that are enforced. **/ private List<IoK8sApiCoreV1LimitRangeItem> limits = new ArrayList<IoK8sApiCoreV1LimitRangeItem>(); /** * Limits is the list of LimitRangeItem objects that are enforced. * @return limits **/ @JsonProperty("limits") @NotNull public List<IoK8sApiCoreV1LimitRangeItem> getLimits() { return limits; } public void setLimits(List<IoK8sApiCoreV1LimitRangeItem> limits) { this.limits = limits; } public IoK8sApiCoreV1LimitRangeSpec limits(List<IoK8sApiCoreV1LimitRangeItem> limits) { this.limits = limits; return this; } public IoK8sApiCoreV1LimitRangeSpec addLimitsItem(IoK8sApiCoreV1LimitRangeItem limitsItem) { this.limits.add(limitsItem); return this; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class IoK8sApiCoreV1LimitRangeSpec {\n"); sb.append(" limits: ").append(toIndentedString(limits)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private static String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
Study gives personalized predictions for the disease's next move Both the doctors who treat multiple sclerosis and the people who experience it agree that the disease is highly unpredictable. While that remains true for the disease in general, a new study introduces a method for making personalized predictions. The research is a culmination of an eight-year partnership between BYU statistician David Engler and researchers at Massachusetts General Hospital. "The goal all along has been to develop personalized transition probabilities with regard to where they are in the disease process and where they're most likely to go in the near future," Engler said. MS puts the body's immune system against its nervous system. Forecasting the course of MS in an individual is a challenge because the disease can take so many different turns. And how a patient feels today is not very predictive of how they will feel in the near future. Instead, Engler's method is based heavily on the individual patient's history. Every six months, people with MS share certain information with their doctor. First, they answer yes or no whether they experienced a "relapse." Second, they estimate the intensity of their symptoms on a 21-point scale (0, 0.5, 1.0 - 9.0, 9.5, 10). A doctor using the model would simply plug in the relapse and intensity information for the past two checkups, along with a few bits of demographic data. The model then returns the odds that MS will retreat to a milder stage, advance to a more aggressive stage, or maintain the status quo in the next six months. "If the model suggests you are likely to be in a more debilitated state six months from now, your doctor might recommend a more rigorous treatment regime," Engler said. The model can also reduce fears in other cases. Alison Wadsworth received her diagnosis 25 years ago and has continued a very active life. She says the literature she received after her diagnosis painted very dire pictures of the future. "I would have loved knowing that there are many of us that manage to lead an almost-normal life with diet and exercise and lifestyle changes rather than becoming dependent on medicine that is very expensive," Wadsworth said. To test the model's usefulness, they applied it to 1,123 MS patients in Boston. As the authors write in Statistical Methods in Medical Research, their approach is well-suited to identify predictors of a transition from the relapse-remitting phase to the secondary progressive phase of the disease. "This is important because currently, the majority of MS treatments are effective in preventing new relapses, however to date, most of these therapies have shown little impact on overall disease progression," said Tanuja Chitnis, a study co-author. "This tool may help to identify new treatments which improve overall disability measures." Before he began teaching at BYU, Engler earned a Ph.D. in biostatistics from Harvard in 2007 along with his co-author, Brian Healy. This is the fifth study on MS that they've authored together. One of those studies demonstrated how to cut down on false diagnoses. Another demonstrated how to measure the impact that medication is having for MS patients. "As with anyone publishing medical literature, you hope that what you find makes a difference out there," Engler said. "Every day, there are new medical findings, and you hope that proven methods are implemented." This research was supported by funding from the National Institutes of Health.
def last_snapshot(self) -> Snapshot: if not self.snapshots: return None return self.snapshots[-1]
/** * AppLaunchUser contains user information which is used by AppLaunch APIs. */ public class AppLaunchUser { private String userId; private Hashtable parameters; private AppLaunchUser(Builder builder) { this.userId = builder.userId; this.parameters= builder.parameters; } protected String getUserId() { return userId; } protected Hashtable getParameters() { return parameters; } /** * Builder class of AppLaunchUser. */ public static class Builder { private String userId; private Hashtable parameters = new Hashtable(); /** * Initializer for builder class of AppLaunchUser. */ public Builder() { } /** * Use this method to set userID * @param userId * @return */ public Builder userId(String userId) { this.userId = userId; return this; } /** * This is an optional method which can be used to set any custom user attribute of String type. * * @param key Key value * @param value Value of String type * @return */ public Builder custom(String key, String value) { parameters.put(key, value); return this; } /** * This is an optional method which can be used to set any custom user attribute of String type. * * @param key Key value * @param value Value of boolean type * @return */ public Builder custom(String key, boolean value) { parameters.put(key, value); return this; } /** * This is an optional method which can be used to set any custom user attribute of String type. * * @param key Key value * @param value Value of integer type * @return */ public Builder custom(String key, int value) { parameters.put(key, value); return this; } /** * This method builds AppLaunch User object. * * @return AppLaunchUser object. */ public AppLaunchUser build() { return new AppLaunchUser(this); } } }
/** * @author auto create * @version */ public class UpdateServerGroupServersAttributeRequest extends RpcAcsRequest<UpdateServerGroupServersAttributeResponse> { private String clientToken; private String serverGroupId; private List<Servers> servers; private Boolean dryRun; public UpdateServerGroupServersAttributeRequest() { super("Alb", "2020-06-16", "UpdateServerGroupServersAttribute", "alb"); setMethod(MethodType.POST); try { com.aliyuncs.AcsRequest.class.getDeclaredField("productEndpointMap").set(this, Endpoint.endpointMap); com.aliyuncs.AcsRequest.class.getDeclaredField("productEndpointRegional").set(this, Endpoint.endpointRegionalType); } catch (Exception e) {} } public String getClientToken() { return this.clientToken; } public void setClientToken(String clientToken) { this.clientToken = clientToken; if(clientToken != null){ putQueryParameter("ClientToken", clientToken); } } public String getServerGroupId() { return this.serverGroupId; } public void setServerGroupId(String serverGroupId) { this.serverGroupId = serverGroupId; if(serverGroupId != null){ putQueryParameter("ServerGroupId", serverGroupId); } } public List<Servers> getServers() { return this.servers; } public void setServers(List<Servers> servers) { this.servers = servers; if (servers != null) { for (int depth1 = 0; depth1 < servers.size(); depth1++) { if (servers.get(depth1) != null) { putQueryParameter("Servers." + (depth1 + 1) + ".ServerType" , servers.get(depth1).getServerType()); putQueryParameter("Servers." + (depth1 + 1) + ".Port" , servers.get(depth1).getPort()); putQueryParameter("Servers." + (depth1 + 1) + ".Description" , servers.get(depth1).getDescription()); putQueryParameter("Servers." + (depth1 + 1) + ".ServerIp" , servers.get(depth1).getServerIp()); putQueryParameter("Servers." + (depth1 + 1) + ".Weight" , servers.get(depth1).getWeight()); putQueryParameter("Servers." + (depth1 + 1) + ".ServerId" , servers.get(depth1).getServerId()); } } } } public Boolean getDryRun() { return this.dryRun; } public void setDryRun(Boolean dryRun) { this.dryRun = dryRun; if(dryRun != null){ putQueryParameter("DryRun", dryRun.toString()); } } public static class Servers { private String serverType; private Integer port; private String description; private String serverIp; private Integer weight; private String serverId; public String getServerType() { return this.serverType; } public void setServerType(String serverType) { this.serverType = serverType; } public Integer getPort() { return this.port; } public void setPort(Integer port) { this.port = port; } public String getDescription() { return this.description; } public void setDescription(String description) { this.description = description; } public String getServerIp() { return this.serverIp; } public void setServerIp(String serverIp) { this.serverIp = serverIp; } public Integer getWeight() { return this.weight; } public void setWeight(Integer weight) { this.weight = weight; } public String getServerId() { return this.serverId; } public void setServerId(String serverId) { this.serverId = serverId; } } @Override public Class<UpdateServerGroupServersAttributeResponse> getResponseClass() { return UpdateServerGroupServersAttributeResponse.class; } }
<filename>src/module.ts import { DataSourcePlugin } from '@grafana/data'; import { Datasource } from './datasource'; import { InfinityConfigEditor } from './config.editor'; import { QueryEditor } from './query.editor'; import { InfinityAnnotationCtrl } from './annotations.editor'; export const plugin = new DataSourcePlugin(Datasource) .setConfigEditor(InfinityConfigEditor) .setQueryEditor(QueryEditor) .setAnnotationQueryCtrl(InfinityAnnotationCtrl);
Experimental determination of threshold dose in photodynamic therapy in normal rat liver Using normal rat liver we investigated the depth of necrosis induced by photodynamic therapy when different light doses and photosensitizer (Photogem®) concentrations. All experiments were done with a fluence rate of 250 mW/cm2. Photosensitizer concentration was varied from 1.0, 1.5, 2.0, and 5.0 mg/kg of body weight and it was administered through the left tail vein. For each photosensitizer concentration the light dose was varied from 10, 50, 100, 150, and 200 J/cm2. Each experimental point was done using five animals. The depth of necrosis analysis allows us to determine the threshold dose and compare its value with the existent results in the literature. Our result suggested a value about 3 times higher than the conventionally adopted value. It indicates the dependence of such value with the employed concentration photosensitizer. The use of simple models to understand basic features of the PDT (Photodynamic Therapy) may contribute to the solid establishment of dosimetry in PDT enhancing its use in the clinical management of cancers and others lesions.
A Unified Continuous Learning Framework for Multi-modal Knowledge Discovery and Pre-training Multi-modal pre-training and knowledge discovery are two important research topics in multi-modal machine learning. Nevertheless, none of existing works make attempts to link knowledge discovery with knowledge guided multi-modal pre-training. In this paper, we propose to unify them into a continuous learning framework for mutual improvement. Taking the open-domain uni-modal datasets of images and texts as input, we maintain a knowledge graph as the foundation to support these two tasks. For knowledge discovery, a pre-trained model is used to identify cross-modal links on the graph. For model pre-training, the knowledge graph is used as the external knowledge to guide the model updating. These two steps are iteratively performed in our framework for continuous learning. The experimental results on MS-COCO and Flickr30K with respect to both knowledge discovery and the pre-trained model validate the effectiveness of our framework. Image based Uni-modal Dataset Text based Uni-modal Dataset a dog running with frisbee in its mouth through the snow. a dog following a man on his horse in a field. a man throwing a frisbee with a dog catching it. Continuous Learning horse man dog frisbee a dog running with frisbee in its mouth through the snow. a man throwing a frisbee with a dog catching it. a dog following a man on his horse in a field. Figure 1: The unified framework which unifies knowledge discovery and multi-modal pre-training. Multi-modal Knowledge Graph To this end, we propose to integrate knowledge discovery and knowledge guided multi-modal pretraining into a unified continuous learning framework in this paper. The overall architecture is presented in Figure 1. We take open-domain unstructured data of images and texts as input to align the two modalities. A multi-modal knowledge graph is maintained as the foundation of our framework to support knowledge discovery and model pre-training. For knowledge discovery, a pre-trained model is used to extract knowledge units from the uni-modal datasets and update the knowledge graph by linking multi-modal semantic units. For model pre-training, the knowledge graph is used as the external knowledge to guide the updating of the multi-modal model. In practice, the multi-modal graph includes three kinds of nodes to cover multiple levels of semantic units, namely, image, sentence and concept (objects in the vision side and phrases in the language side). Knowledge discovery is then treated as a link prediction task on the knowledge graph taking pre-training model as a probe. Model training is then guided by the links predicted in the knowledge discovery process. These two steps are performed iteratively for continuous learning. In the optimization, we design confidence score aware soft labels and add regularization to mitigate the problem of error propagation. For knowledge discovery, we evaluate the quality of edges identified in the multi-modal knowledge graph. For the pre-trained model, we evaluate its performance on one typical downstream task, i.e., image-text retrieval. Experiments on MS-COCO and Flickr30K demonstrate the effectiveness of our proposed framework. We also configure the noisy uni-modal datasets to validate the robustness of our model and results show that the framework can perform reliable model training even in the noisy environment. Related Work Vision-Language Representation Learning. Following BERT , UNITER , Unicoder , ERNIE-ViL and VinVL pre-train the vision-language transformer model on the large-scale annotated image-sentence pairs to learn the knowledge of the inter-modality links. In language pretraining, explicitly incorporating external knowledge to enhance model pre-training is investigated by ERNIE and widely explored in recent years . The knowledge facilitates model pre-training by an improved structural masked language model to enhance the modeling in structure, such as syntactic and semantic. In the vision-language pre-training, ERNIE-ViL and ROSITA integrate the knowledge of concepts from image and sentence to enhance the semantic alignments. In our work, we introduce a multi-modal knowledge graph in the dataset where the concepts, namely, phrases and objects, are unified. Knowledge Discovery via Link Prediction. The construction of knowledge graph aims to predict the missing relation or entity in the triple . In multi-modal knowledge graph, constructs datasets to predict the multi-relational links between entities associated with numerical and visual data. VisualSem adopts CLIP to identify semantic concepts from images. To test the model's generalization ability, presents an open-world knowledge graph completion model to predict the links among entities unseen in training set. These works concentrate on knowledge discovery with the fixed multi-modal model. Different from these works, we perform knowledge discover and multi-modal model training in a iterative way, thus the knowledge graph is obtained with the progressively improved multi-modal model. Pseudo Labeling Given the class labels, pseudo-labeling aims to pick a pseudo one for unlabeled samples with a model trained on labeled data. produces pseudo-labels from the prediction of a trained model. assigns labels for unlabeled samples through the propagation on the graphs. incorporates confidence scores for unlabeled samples on the density of a local neighborhood. considers the uncertainty of the confidence score for better network calibration. In the classification based pseudo labeling, the label is determined by the threshold. The same method in link assignment leads to popular nodes dominate the graph which degrades the graph quality and hinders the training of nodes with less edges. Thus, we estimate the popularity and devise the label-aware threshold for link assignment. Continuous Learning Framework The workflow of our continuous learning framework is presented in Algorithm. 1. Given two unimodal datasets (images and texts) and an initialized version of multi-modal pre-training model, we aim to deliver a multi-modal knowledge graph and an updated version of the pre-training model. In each iteration t, the knowledge graph and the pre-training model can be updated through knowledge discovery and model training. Task Preliminary Uni-modal Datasets. Given two Uni-modal datasets of images I and texts T , we construct a knowledge graph and train the multi-modal pre-training model. Multi-modal Knowledge Graph. We maintain a graph G to store the multi-modal knowledge. It has four kinds of nodes: image I ∈ I and sentence T ∈ T in global level, object O and phrase P in local level. It has two kinds of edges: intra-modality ones (L (O,I) , L (P,T ) ) and inter-modality ones Knowledge Discovery. We identify cross-modality edges, i.e., L (I,T ) , L (O,P ) , in the multi-modal knowledge graph for knowledge discovery. This starts with a multi-modality pre-training model F θ , and it assigns confidence scores in to cross-modality edges as Eq. (1). Multi-modal Pre-training. We sample cross-modality edges ({L t (I,T ) ,L t (O,P ) }) from the multimodal knowledge graph as supervisions for continuous multi-modal pre-training. Knowledge Discovery as Link Prediction on Multi-modal Graph We treat knowledge discovery as the task of link prediction on the multi-modal graph on two semantic levels, namely, global level (image and sentence) and local level (object and phrase). Global Level Link Prediction Given a node pair that contains an image I and a sentence T , the pre-trained model is employed to compute a confidence score to determine the existence of the edge. This is regarded as a classification problem and a threshold λ is introduced to make the decision . The calculation is shown in Label-aware Link Assignment (LA). After link prediction on the graph, images are labeled with connected sentences, or vice versa. Following this interpretation, we can regard I as input and T as label in Eq. (2). Some nodes (image or sentences) in the graph might be more popular than others. Nodes with less edges suffer from the problem of under-training. And links connected to these kinds of nodes tend to obtain lower confidence scores. To mitigate this problem, we propose to set the threshold dynamically for a link considering the popularity of nodes (also known as labels) it connects Algorithm 1 The overall procedure of our framework. Input: Unimodal datasets I and T . A pre-trained model F θ . The maximal iteration step t max . 28: F t θ := F θ , t := t + 1. 29: end while to. First, we estimate the confidence score P L = 1 I of the label to measure its popularity E(I) as Eq. (3). We assume sentence T is sampled from an empirical (uniform) distribution and P T |I is thus constant. In addition, we add the power hyperparameter µ to compute the corresponding label-aware threshold λ(I). So is for sentence T . Bi-label Link Assignment (BL). For a cross-modality edge, both nodes it connects obtain popularity scores. Thus, we propose to consider thresholds of both nodes for link prediction and classify links into two categories, namely strong and weak ones. If the confidence score of a link is higher than both thresholds, it is treated as a strong one. If the score is only higher than one threshold, it is a weak one. We denoteL t (I,T ) = 0.5 for weak links. Overall, the links between image I and sentence T can be summarized as Eq. (5). Local Level Link Prediction For the local level, we link concepts in different modalities. For computation efficiency, we only consider links between objects and phrases whose global nodes (image and sentence) are connected in the graph. On the language side, we utilize spacy 1 for noun phrases identification and filter out those with low frequency to form a phrase set. On the vision side, an object detector is used to locate objects . Following , the representation of phrase is computed through mean-pooling of its tokens, then we measure the cosine similarity between the phrase and all objects in image I. Softmax is utilized to normalize the similarity scores. At last, the object whose similarity score is larger than λ C would be linked to the phrase. Multi-modal Pre-training Based on the constructed multi-modal knowledge graph, we sample cross-modality links for model training. Links in the graph are identified automatically without human monitoring, therefore it is likely to contain noisy supervision. In order to mitigate the problem of error propagation, we propose a confidence score aware mechanism for model pre-training. We use image-text matching as the pre-training task. Note that, our framework is compatible to include other pre-training tasks. Graph Structure Enhanced Representation Learning For an input image or sentence, we learn their representations taking the graph structure into consideration. For each node (image or sentence), two-hop neighbors are considered. Take image node as an example, the directly linked sentences and phrase nodes connected to these sentences are included. Therefore, we integrate multiple levels of semantic information into the learning process. In practice, the linked sentences N I = {T |L t (I,T ) = 1} and corresponding phrases are concatenated to the original image to form an input sequence. The phrases share the token embedding with the sentence, and we add a new segment embedding vector to denote that it shares the semantics with the image instead of the sentence. Confidence Score Aware Training Instead of directly using hard labels as supervision we use confidence scores of links to construct soft labels for training. Losses are computed in both global (image-text) and local (phrase-object) levels. Global Level Loss. There are three types of cross-modality edges between nodes according to the confidence score, namely, strong one, weak one and none. We construct a triplet for loss computation. Take the image as an example. For each image I, we randomly select a linked sentence node T + ∈ {T |L t (I,T ) = 1} to compose a positive pair. If there is no T ∈ T satisfies the requirement, the sentence is sampled from the weak linked sentences T + ∈ {T |L t (I,T ) = 0.5}. The negative sentence T − is sampled from {T |L t (I,T ) = 0} that do not link to image I. For these sampled links, we sharpen the confidence score by the exponential operation with hyperparameter γ. We observe that weak links are more likely to be mistakenly assigned than strong ones, another hyperparameter µ < 1 is thus adopted on the group of weak ones. The label construction is shown in Eq. (7). The loss is shown in Eq. (8). Local Level Loss. For the linked pairs of objects and phrases, we regard the phrase P as the anchor and collect all of O ∈ I for the cross-entropy computation. In this level, we simply take Y t (O,P ) =L t (O,P ) and optimize with the following Eq. (9). Regularization to Reduce Uncertainty of Confidence. To ensure the reliability of the linking confidence scores, we further reduce the uncertainty during the training. MC Dropout is employed for uncertainty estimation. For each input pair (I, T ), we perform forward computation twice of the model with different dropout and get the confidence scores F θ,1 (I, T ) and F θ,2 (I, T ) which can be regard as the two bernoulli distributions, then utilize the Jensen-Shannon divergence to minimize the distance which is shown in Eq. (10). The overall training loss of our model has three components as Eq. (11) with hyperparameters λ IT , λ C , and λ U . Experiment Setup To validate the effectiveness of the proposed framework, we evaluate the performance on two tasks, knowledge discovery and cross-modal image-text retrieval under transductive setting. For knowledge discovery, we evaluate the performance of link prediction for both global level (image and sentence) and local level (object and phrase). Dataset. The evaluation is performed on two benchmark datasets, MS-COCO and Flickr30K . In test split, MS-COCO and Flickr30K contain 5,000 and 1,000 images respectively, and each image is annotated with 5 text descriptions. In addition to text descriptions, Fickr30K also provides bounding box annotation for each phrase in the description, which enables local level link prediction. In total, there are 16,576 pairs of phrase and object. Evaluation Metrics. For knowledge discovery, we follow and use recall (R), precision (P) and F1 (F) as evaluation metrics for global level link prediction; while for local level link prediction, since it is equivalent to phrase grounding task, we follow and use the accuracy as the evaluation metric. The accuracy is defined as the fraction of query phrases whose predicted bounding box overlaps ground-truth box with IoU larger than 0.5. For image-text retrieval, We report recall at K (R@K) following the metrics in , Implementation. We employ VinVL(base) as our backbone. In our continuous learning framework, we need to initialize the multi-modal graph for an iterative optimization. As VinVL (base) are not pre-trained with image-text matching task, CLIP is used to produce the initial linking confidence score between image and text for graph initialization. The hyperparameters are selected according to the results on randomly sampled 100 image-sentence pairs in validation set. During knowledge discovery, for computing image (sentence) popularity, we sample the counterpart sentences (images) which earn top-K tiers. In MS-COCO, we set K = 7, 2 and the power parameter µ = 0.98, 1.0 in Eq. (4) for image and sentence, respectively. While in Flickr30K, we set K = 10, 2 and power parameter µ = 0.96, 1.0. For ablation study ( §4.3) and other experiments ( §4.4), we set the hyperparameters of K and µ as the same with that in popularity computation for equal comparison. For computation efficiency, during the knowledge discovery, instead of computing the linking confidence score for each pair across the unimodal datasets, which needs #T × #I times of calculation, we firstly perform cross-modal retrieval with CLIP and select the top 40 candidate sentences (images) for each image (sentence), then the knowledge discovery of each image or sentence is limited in the corresponding 40 pairs. The number of calculation is reduced to (#T + #I) × 40. Performance Comparison Compared Baselines. We compare the proposed framework with several vision language pre-trained models, including CLIP(ViT-B/32) and VinVL(base) . For knowledge discovery, in addition to CLIP and VinVL, we also compare with two weakly supervised phase grounding models, i.e., CLPG and CKD on Flickr30K in terms of local level link prediction. For Image-text retrieval, since our framework does not require any paired image-text data for training, for a fair comparison, the CLIP is evaluated under zero-shot setting, and VinVL is trained with the links obtained from CLIP. Results and Analysis. Table 1 summarizes the performance comparison results. For knowledge discovery, our method achieves remarkable improvement compared with CLIP and VinVL for both global level and local level link prediction, showing the effectiveness of our method in knowledge discovery. It is worthwhile to mention that our method also outperforms weakly-supervised phase grounding methods CLPG and CKD for local level link prediction without any supervision. For image-text retrieval, similar observations can be founded. Our method outperforms both CLIP and VinVL, attaining the best performances in terms of image-text retrieval. The superior performances attained on both knowledge discovery and image-text retrieval demonstrate the effectiveness of the proposed iterative vision-text pre-training framework. Ablation Study We further investigate the effectiveness of different modules in our framework, which includes knowledge discovery (KD), confidence score aware loss (CAL), graph structure enhanced representation learning (GL) and uncertainty reducing (UR) in multi-modal pre-training. Table 2 summarizes the ablation study results on Flickr30K using VinVL as the base model. From the results, we have the following observations. First, when adding each of the modules, the performances of both knowledge discovery and image-text retrieval have been improved, which suggests the effectiveness of these components. Second, it achieves the best performance when using all components, demonstrating these four modules are complementary to each other. Discussion Continuous Learning Performance w.r.t Iteration Step. In the test set of Flickr30K , we evaluate each iteration step of the framework to validate the effectiveness of the proposed continuous learning strategy. The performance with respect to global level knowledge discovery and imagetext retrieval is shown in Figure 2. From the table, we can see that as the iteration goes on, the overall performance in both tasks gets better in the early stage and is maintained in the later. This demonstrates that (1) the knowledge discovery and the multi-modal model can benefit each other for successive improvement; (2) despite the error in knowledge discovery is inevitable during the iteration, our framework with confidence score aware training can alleviate this issue hence preventing the failure of training. (a) Performance of global level knowledge discovery with respect to iteration t. X-axis is the iteration step t, Y-axis is the score of precision, recall and f1. Sentence-to-Image Image-to-Sentece (b) Performance in image-text retrieval with respect to iteration t. X-axis is the iteration step t, Y-axis is the score of R@1. Figure 2: Evaluation of each iteration step of the continuous learning framework in the test set of Flickr30K . Effects of Different Link Assignment Strategies. As mentioned in §3.2.1, we have introduced different link assignment strategies in the process of knowledge discovery. These strategies include (1) the comparison one, absolute threshold based link assignment (AT) in Eq. (2), (2) label-aware link assignment (LA) and bi-label link assignment (BL) that can be successively added to VinVL. To investigate the effects of different strategies, we conduct experiments on Flickr30K datasets and the experimental results are reported in Table 3. To demonstrate that the proposed link assignment strategies are also effective in dealing with the imbalance problem in link prediction, we report the proportion of the links from popular nodes (%PP) that have more than 10 links. From the results, we find that: (1) The problem of imbalance is serious in VinVL and VinVL+AT. Through integrating LA and BL into VinVl, no nodes will have more than 10 links as the %PP is reduced to 0. (2) Compared to VinVL and AT, LA and BL successively improve the performance of link prediction as well as the image-text retrieval. Performance of Inductive Learning. We then test the generalization ability of our pre-trained model under inductive learning where neither the pseudo graph nor the graph modeling is available. In our implementation, we randomly sample 1,000 images and the corresponding 5,000 sentences from the training set of Flickr30K to construct two unimodal datasets for training, and test the model in the Flickr30K. For comparison, we also train the CLIP and VinVL with the pairs under supervised setting. Supervised VinVL can be regarded as the upper bound. The results are summarized in Table 4. From the results, our model outperforms VinVL in unsupervised setting and the supervised CLIP, and achieve comparable performance with supervised VinVL. The results basically suggest that our method generalizes well under inductive learning. (1) and (2). The noisy samples do not have ground-truth links. We test it on the whole test set of Flickr30K with 1,000 images and 5,000 sentences. We list the results in Table 5. From the table, we find that: (1) Compare to adding noise to either image or sentence based unimodal dataset, adding noise to both fails to predict another 2% ground-truth links. (2) Despite the degradation in the quality of the pseudo graph, our noisy-resistant model training is comparable among different settings. This validates the learning efficiency of our framework in the noisy environment. To demonstrate that our framework does not heavily rely on the selection of the unimodal datasets, we setup a random environment where none of image or sentence has ground-truth counterpart. In the environment, instead of selecting both of image and text based uni-modal datasets from Flickr30K testing set in §4.1, we keep the 1,000 images of Flickr30K testing set as the image based uni-modal dataset, and pick up 5,000 sentences from MS-COCO training set to compose the text based uni-modal dataset. In evaluation, considering that no ground-truth link exists in the random environment, we ignore the task of knowledge discovery and only report the performance of image-text retrieval in Flickr30K testing set. The first three comparison models are trained with the uni-modal datasets where both of the image and text based uni-modal datasets from the Flickr30K testing set. The result is listed in Table 6. From the table, we find that the missing of ground-truth counterpart degrades the model performance in image-text retrieval. Compare with VinVL(Random) and the counterpart model VinVL, Ours(Random) has less performance drop from its counterpart. The performance of Ours(Random) is better than CLIP and comparable with VinVL which are trained with the uni-modal datasets where the ground-truth link exists. This demonstrates that our framework is effective when training on random unimodal datasets. Conclusion In this paper, we propose a unified continuous learning framework for multi-modal knowledge discovery and pre-training based on two uni-modal datasets. In the framework, knowledge discovery is regarded as the problem of inter-modality link prediction on a multi-modal knowledge graph. Multimodal pre-training is then performed with the guidance of the knowledge graph. In order to mitigate the error propagation problem in the iterative process, we propose label-aware link assignment strategy based on the popularity of nodes in the graph construction. For model pre-training, we take graph structure as the knowledge for guidance. In practice, two-hop neighbors are utilized for the node representation learning. The experiments on Flickr30K and MS-COCO validate the quality of the multi-modal graph and the performance of pre-training model. The further analysis in the noisy environment also demonstrates that our system can perform reliable model training.
import numpy as np import h5py from progressbar import ProgressBar from PIL import Image """ This model is used to convert data files to h5 database to facilitate training """ # IMG_MEAN for Pascal dataset IMG_MEAN = np.array( (122.67891434, 116.66876762, 104.00698793), dtype=np.float32) # RGB def read_images(data_list): with open(data_list, 'r') as f: data = [line.strip("\n").split(' ') for line in f] return data def process_image(image, shape, resize_mode=Image.BILINEAR): img = Image.open(image) img = img.resize(shape, resize_mode) img.load() return np.asarray(img, dtype="float32") def build_h5_dataset(data_dir, list_path, out_dir, shape, name, norm=False): images = read_images(list_path) images_size = len(images) dataset = h5py.File(out_dir+name+'.h5', 'w') dataset.create_dataset('X', (images_size, *shape, 3), dtype='f') dataset.create_dataset('Y', (images_size, *shape), dtype='f') pbar = ProgressBar() for index, (image, label) in pbar(enumerate(images)): image = process_image(data_dir+image, shape) label = process_image(data_dir+label, shape, Image.NEAREST) image -= IMG_MEAN image = image / 255. if norm else image dataset['X'][index], dataset['Y'][index] = image, label dataset.close() if __name__ == '__main__': shape = (256, 256) data_dir = './dataset' list_dir = './dataset/' output_dir = './dataset/' data_files = { 'training': 'train.txt', 'validation': 'val.txt', 'testing': 'test.txt' } for name, list_path in data_files.items(): build_h5_dataset(data_dir, list_dir+list_path, output_dir, shape, name)
#ifndef PORT_H #define PORT_H #include <QByteArray> #include <QSettings> #include <QUdpSocket> #include <AbstractPort.h> #include <DataBaseManager.h> #include <DataQueueItem.h> class Port : public AbstractPort { Q_OBJECT private: QAbstractSocket *m_ptrSocket = nullptr; QString strPort; QString strIp; QByteArray readArray; const int portIndex; DataBaseManager *m_dbm = nullptr; QSet<QPair<QString, QString> > stIpPort; QSet<QHostAddress> stHostAddress; QList<DataQueueItem> localReadQueue; QList<DataQueueItem> localWriteQueue; int timeIntervalProcDK = 11000; QTimer timerBeatProcDK; bool procDK = false; public: explicit Port(const AbstractPort::Protocol &protocol = AbstractPort::UDP, QObject *parent = nullptr, const int index = 0, DataBaseManager *dbm = nullptr); virtual ~Port(); // interface --> void retranslate(); void loadConfig(QSettings *config); void saveConfig(QSettings *config); bool open(); void close(); void write(const QList<DataQueueItem> &data); void write(const DataQueueItem &data, bool dbIns = true); bool portStatus(QString *string); bool isOpen(); // interface <-- void prepareUdpScoket(QString strIp, QString strPort); static Port * typeDefPort(const AbstractPort * port); // getter setter --> QString getStrPort() const; void setStrPort(const QString &value); QString getStrIp() const; void setStrIp(const QString &value); //AbstractPort::Protocol AbstractPort::getProtocol() const; void setProtocol(const AbstractPort::Protocol &value); QByteArray getReadArray() const; void setReadArray(const QByteArray &value); void appendToReadArray(const QByteArray &value); int getPortIndex() const; QSet<QPair<QString, QString> > getStIpPort() const; void setSetIpPort(const QSet<QPair<QString, QString> > &value); void addToSetIpPort(const QPair<QString, QString> &value); QSet<QHostAddress> getStHostAddress() const; void setStHostAddress(const QSet<QHostAddress> &value); QList<DataQueueItem> getLocalReadQueue() const; QList<DataQueueItem> popLocalReadQueue(); void setLocalReadQueue(const QList<DataQueueItem> &value); void pushLocalReadQueue(const QList<DataQueueItem> &value); void pushLocalReadQueue(const DataQueueItem &value); QList<DataQueueItem> getLocalWriteQueue() const; QList<DataQueueItem> popLocalWriteQueue(); void setLocalWriteQueue(const QList<DataQueueItem> &value); void pushLocalWriteQueue(const QList<DataQueueItem> &value); void pushLocalWriteQueue(const DataQueueItem &value); // getter setter <-- void setDbm(DataBaseManager *dbm); bool getProcDK() const; void setProcDK(bool value); void setProcDK_wTW(); private: bool openUdpScoket(QString strPort); bool openTcpScoket(QString host, QString strPort); void readUdpDatagrams(); void readTcpDatagrams(); QHostAddress hostAddress(); QString localHost(); public slots: // interface --> QList<DataQueueItem> readAll(); void write(); // interface <-- private slots: void readMessage(); void beatProcDK_wTW(); signals: }; #endif // PORT_H
<reponame>rpanchal1996/python-tss<gh_stars>10-100 typedef uint8_t BYTE; typedef int8_t TSS_BOOL; typedef uint16_t UINT16; typedef uint32_t UINT32; typedef uint64_t UINT64; typedef uint16_t TSS_UNICODE; typedef void* PVOID; typedef UINT32 TSS_RESULT; typedef UINT32 TSS_HANDLE; typedef UINT32 TSS_FLAG; typedef UINT32 TSS_HOBJECT; typedef TSS_HOBJECT TSS_HCONTEXT; typedef TSS_HOBJECT TSS_HPOLICY; typedef TSS_HOBJECT TSS_HTPM; typedef TSS_HOBJECT TSS_HKEY; typedef TSS_HOBJECT TSS_HENCDATA; typedef TSS_HOBJECT TSS_HPCRS; typedef TSS_HOBJECT TSS_HHASH; typedef TSS_HOBJECT TSS_HNVSTORE; typedef TSS_HOBJECT TSS_HMIGDATA; typedef TSS_HOBJECT TSS_HDELFAMILY; typedef TSS_HOBJECT TSS_HDAA_CREDENTIAL; typedef TSS_HOBJECT TSS_HDAA_ISSUER_KEY; typedef TSS_HOBJECT TSS_HDAA_ARA_KEY; typedef UINT32 TSS_EVENTTYPE; typedef UINT16 TSS_MIGRATE_SCHEME; typedef UINT32 TSS_ALGORITHM_ID; typedef UINT32 TSS_KEY_USAGE_ID; typedef UINT16 TSS_KEY_ENC_SCHEME; typedef UINT16 TSS_KEY_SIG_SCHEME; typedef BYTE TSS_KEY_AUTH_DATA_USAGE; typedef UINT32 TSS_CMK_DELEGATE; typedef UINT32 TSS_NV_INDEX; typedef UINT32 TSS_COUNTER_ID; typedef BYTE TPM_BOOL; typedef BYTE TPM_LOCALITY_MODIFIER; typedef UINT32 TPM_COMMAND_CODE; typedef UINT32 TPM_COUNT_ID; typedef UINT32 TPM_REDIT_COMMAND; typedef UINT32 TPM_HANDLE; typedef UINT32 TPM_AUTHHANDLE; typedef UINT32 TPM_TRANSHANDLE; typedef UINT32 TPM_KEYHANDLE; typedef UINT32 TPM_DIRINDEX; typedef UINT32 TPM_PCRINDEX; typedef UINT32 TPM_RESULT; typedef UINT32 TPM_MODIFIER_INDICATOR; typedef UINT16 TPM_STRUCTURE_TAG; typedef UINT32 TPM_RESOURCE_TYPE; typedef BYTE TPM_PAYLOAD_TYPE; typedef UINT16 TPM_ENTITY_TYPE; typedef UINT32 TPM_KEY_HANDLE; typedef UINT16 TPM_STARTUP_TYPE; typedef UINT16 TPM_PROTOCOL_ID; typedef UINT32 TPM_ALGORITHM_ID; typedef UINT16 TPM_PHYSICAL_PRESENCE; typedef UINT16 TPM_MIGRATE_SCHEME; typedef UINT16 TPM_EK_TYPE; typedef UINT16 TPM_PLATFORM_SPECIFIC; typedef struct tdTPM_STRUCT_VER { BYTE major; BYTE minor; BYTE revMajor; BYTE revMinor; } TPM_STRUCT_VER; typedef struct tdTPM_VERSION_BYTE { int leastSigVer : 4; int mostSigVer : 4; } TPM_VERSION_BYTE; typedef struct tdTPM_VERSION { BYTE major; BYTE minor; BYTE revMajor; BYTE revMinor; } TPM_VERSION; typedef struct tdTPM_DIGEST { BYTE digest[0x14]; } TPM_DIGEST; typedef TPM_DIGEST TPM_CHOSENID_HASH; typedef TPM_DIGEST TPM_COMPOSITE_HASH; typedef TPM_DIGEST TPM_DIRVALUE; typedef TPM_DIGEST TPM_HMAC; typedef TPM_DIGEST TPM_PCRVALUE; typedef TPM_DIGEST TPM_AUDITDIGEST; typedef struct tdTPM_NONCE { BYTE nonce[0x14]; } TPM_NONCE; typedef TPM_NONCE TPM_DAA_TPM_SEED; typedef TPM_NONCE TPM_DAA_CONTEXT_SEED; typedef struct tdTPM_AUTHDATA { BYTE authdata[0x14]; } TPM_AUTHDATA; typedef TPM_AUTHDATA TPM_SECRET; typedef TPM_AUTHDATA TPM_ENCAUTH; typedef struct tdTPM_KEY_HANDLE_LIST { UINT16 loaded; TPM_KEY_HANDLE *handle; } TPM_KEY_HANDLE_LIST; typedef UINT16 TPM_KEY_USAGE; typedef UINT16 TPM_SIG_SCHEME; typedef UINT16 TPM_ENC_SCHEME; typedef BYTE TPM_AUTH_DATA_USAGE; typedef UINT32 TPM_KEY_FLAGS; typedef struct tdTPM_CHANGEAUTH_VALIDATE { TPM_SECRET newAuthSecret; TPM_NONCE n1; } TPM_CHANGEAUTH_VALIDATE; typedef UINT32 TPM_ACTUAL_COUNT; typedef struct tdTPM_COUNTER_VALUE { TPM_STRUCTURE_TAG tag; BYTE label[4]; TPM_ACTUAL_COUNT counter; } TPM_COUNTER_VALUE; typedef struct tdTPM_SIGN_INFO { TPM_STRUCTURE_TAG tag; BYTE fixed[4]; TPM_NONCE replay; UINT32 dataLen; BYTE *data; } TPM_SIGN_INFO; typedef struct tdTPM_MSA_COMPOSITE { UINT32 MSAlist; TPM_DIGEST *migAuthDigest; } TPM_MSA_COMPOSITE; typedef struct tdTPM_CMK_AUTH { TPM_DIGEST migrationAuthorityDigest; TPM_DIGEST destinationKeyDigest; TPM_DIGEST sourceKeyDigest; } TPM_CMK_AUTH; typedef UINT32 TPM_CMK_DELEGATE; typedef struct tdTPM_SELECT_SIZE { BYTE major; BYTE minor; UINT16 reqSize; } TPM_SELECT_SIZE; typedef struct tdTPM_CMK_MIGAUTH { TPM_STRUCTURE_TAG tag; TPM_DIGEST msaDigest; TPM_DIGEST pubKeyDigest; } TPM_CMK_MIGAUTH; typedef struct tdTPM_CMK_SIGTICKET { TPM_STRUCTURE_TAG tag; TPM_DIGEST verKeyDigest; TPM_DIGEST signedData; } TPM_CMK_SIGTICKET; typedef struct tdTPM_CMK_MA_APPROVAL { TPM_STRUCTURE_TAG tag; TPM_DIGEST migrationAuthorityDigest; } TPM_CMK_MA_APPROVAL; typedef UINT16 TPM_TAG; typedef struct tdTPM_PERMANENT_FLAGS { TPM_STRUCTURE_TAG tag; TSS_BOOL disable; TSS_BOOL ownership; TSS_BOOL deactivated; TSS_BOOL readPubek; TSS_BOOL disableOwnerClear; TSS_BOOL allowMaintenance; TSS_BOOL physicalPresenceLifetimeLock; TSS_BOOL physicalPresenceHWEnable; TSS_BOOL physicalPresenceCMDEnable; TSS_BOOL CEKPUsed; TSS_BOOL TPMpost; TSS_BOOL TPMpostLock; TSS_BOOL FIPS; TSS_BOOL Operator; TSS_BOOL enableRevokeEK; TSS_BOOL nvLocked; TSS_BOOL readSRKPub; TSS_BOOL tpmEstablished; TSS_BOOL maintenanceDone; TSS_BOOL disableFullDALogicInfo; } TPM_PERMANENT_FLAGS; typedef struct tdTPM_STCLEAR_FLAGS { TPM_STRUCTURE_TAG tag; TSS_BOOL deactivated; TSS_BOOL disableForceClear; TSS_BOOL physicalPresence; TSS_BOOL physicalPresenceLock; TSS_BOOL bGlobalLock; } TPM_STCLEAR_FLAGS; typedef struct tdTPM_STANY_FLAGS { TPM_STRUCTURE_TAG tag; TSS_BOOL postInitialise; TPM_MODIFIER_INDICATOR localityModifier; TSS_BOOL transportExclusive; TSS_BOOL TOSPresent; } TPM_STANY_FLAGS; typedef BYTE TPM_LOCALITY_SELECTION; typedef struct tdTPM_PCR_SELECTION { UINT16 sizeOfSelect; BYTE *pcrSelect; } TPM_PCR_SELECTION; typedef struct tdTPM_PCR_COMPOSITE { TPM_PCR_SELECTION select; UINT32 valueSize; TPM_PCRVALUE *pcrValue; } TPM_PCR_COMPOSITE; typedef struct tdTPM_PCR_INFO { TPM_PCR_SELECTION pcrSelection; TPM_COMPOSITE_HASH digestAtRelease; TPM_COMPOSITE_HASH digestAtCreation; } TPM_PCR_INFO; typedef struct tdTPM_PCR_INFO_LONG { TPM_STRUCTURE_TAG tag; TPM_LOCALITY_SELECTION localityAtCreation; TPM_LOCALITY_SELECTION localityAtRelease; TPM_PCR_SELECTION creationPCRSelection; TPM_PCR_SELECTION releasePCRSelection; TPM_COMPOSITE_HASH digestAtCreation; TPM_COMPOSITE_HASH digestAtRelease; } TPM_PCR_INFO_LONG; typedef struct tdTPM_PCR_INFO_SHORT { TPM_PCR_SELECTION pcrSelection; TPM_LOCALITY_SELECTION localityAtRelease; TPM_COMPOSITE_HASH digestAtRelease; } TPM_PCR_INFO_SHORT; typedef struct tdTPM_PCR_ATTRIBUTES { BYTE pcrReset; TPM_LOCALITY_SELECTION pcrExtendLocal; TPM_LOCALITY_SELECTION pcrResetLocal; } TPM_PCR_ATTRIBUTES; typedef struct tdTPM_STORED_DATA { TPM_STRUCT_VER ver; UINT32 sealInfoSize; BYTE *sealInfo; UINT32 encDataSize; BYTE *encData; } TPM_STORED_DATA; typedef struct tdTPM_STORED_DATA12 { TPM_STRUCTURE_TAG tag; TPM_ENTITY_TYPE et; UINT32 sealInfoSize; BYTE *sealInfo; UINT32 encDataSize; BYTE *encData; } TPM_STORED_DATA12; typedef struct tdTPM_SEALED_DATA { TPM_PAYLOAD_TYPE payload; TPM_SECRET authData; TPM_NONCE tpmProof; TPM_DIGEST storedDigest; UINT32 dataSize; BYTE *data; } TPM_SEALED_DATA; typedef struct tdTPM_SYMMETRIC_KEY { TPM_ALGORITHM_ID algId; TPM_ENC_SCHEME encScheme; UINT16 size; BYTE *data; } TPM_SYMMETRIC_KEY; typedef struct tdTPM_BOUND_DATA { TPM_STRUCT_VER ver; TPM_PAYLOAD_TYPE payload; BYTE *payloadData; } TPM_BOUND_DATA; typedef struct tdTPM_KEY_PARMS { TPM_ALGORITHM_ID algorithmID; TPM_ENC_SCHEME encScheme; TPM_SIG_SCHEME sigScheme; UINT32 parmSize; BYTE *parms; } TPM_KEY_PARMS; typedef struct tdTPM_RSA_KEY_PARMS { UINT32 keyLength; UINT32 numPrimes; UINT32 exponentSize; BYTE *exponent; } TPM_RSA_KEY_PARMS; typedef struct tdTPM_SYMMETRIC_KEY_PARMS { UINT32 keyLength; UINT32 blockSize; UINT32 ivSize; BYTE *IV; } TPM_SYMMETRIC_KEY_PARMS; typedef struct tdTPM_STORE_PUBKEY { UINT32 keyLength; BYTE *key; } TPM_STORE_PUBKEY; typedef struct tdTPM_PUBKEY { TPM_KEY_PARMS algorithmParms; TPM_STORE_PUBKEY pubKey; } TPM_PUBKEY; typedef struct tdTPM_STORE_PRIVKEY { UINT32 keyLength; BYTE *key; } TPM_STORE_PRIVKEY; typedef struct tdTPM_STORE_ASYMKEY { TPM_PAYLOAD_TYPE payload; TPM_SECRET usageAuth; TPM_SECRET migrationAuth; TPM_DIGEST pubDataDigest; TPM_STORE_PRIVKEY privKey; } TPM_STORE_ASYMKEY; typedef struct tdTPM_KEY { TPM_STRUCT_VER ver; TPM_KEY_USAGE keyUsage; TPM_KEY_FLAGS keyFlags; TPM_AUTH_DATA_USAGE authDataUsage; TPM_KEY_PARMS algorithmParms; UINT32 PCRInfoSize; BYTE *PCRInfo; TPM_STORE_PUBKEY pubKey; UINT32 encSize; BYTE *encData; } TPM_KEY; typedef struct tdTPM_KEY12 { TPM_STRUCTURE_TAG tag; UINT16 fill; TPM_KEY_USAGE keyUsage; TPM_KEY_FLAGS keyFlags; TPM_AUTH_DATA_USAGE authDataUsage; TPM_KEY_PARMS algorithmParms; UINT32 PCRInfoSize; BYTE *PCRInfo; TPM_STORE_PUBKEY pubKey; UINT32 encSize; BYTE *encData; } TPM_KEY12; typedef struct tdTPM_MIGRATE_ASYMKEY { TPM_PAYLOAD_TYPE payload; TPM_SECRET usageAuth; TPM_DIGEST pubDataDigest; UINT32 partPrivKeyLen; BYTE *partPrivKey; } TPM_MIGRATE_ASYMKEY; typedef UINT32 TPM_KEY_CONTROL; typedef struct tdTPM_MIGRATIONKEYAUTH { TPM_PUBKEY migrationKey; TPM_MIGRATE_SCHEME migrationScheme; TPM_DIGEST digest; } TPM_MIGRATIONKEYAUTH; typedef struct tdTPM_CERTIFY_INFO { TPM_STRUCT_VER version; TPM_KEY_USAGE keyUsage; TPM_KEY_FLAGS keyFlags; TPM_AUTH_DATA_USAGE authDataUsage; TPM_KEY_PARMS algorithmParms; TPM_DIGEST pubkeyDigest; TPM_NONCE data; TPM_BOOL parentPCRStatus; UINT32 PCRInfoSize; BYTE *PCRInfo; } TPM_CERTIFY_INFO; typedef struct tdTPM_CERTIFY_INFO2 { TPM_STRUCTURE_TAG tag; BYTE fill; TPM_PAYLOAD_TYPE payloadType; TPM_KEY_USAGE keyUsage; TPM_KEY_FLAGS keyFlags; TPM_AUTH_DATA_USAGE authDataUsage; TPM_KEY_PARMS algorithmParms; TPM_DIGEST pubkeyDigest; TPM_NONCE data; TPM_BOOL parentPCRStatus; UINT32 PCRInfoSize; BYTE *PCRInfo; UINT32 migrationAuthoritySize; BYTE *migrationAuthority; } TPM_CERTIFY_INFO2; typedef struct tdTPM_QUOTE_INFO { TPM_STRUCT_VER version; BYTE fixed[4]; TPM_COMPOSITE_HASH compositeHash; TPM_NONCE externalData; } TPM_QUOTE_INFO; typedef struct tdTPM_QUOTE_INFO2 { TPM_STRUCTURE_TAG tag; BYTE fixed[4]; TPM_NONCE externalData; TPM_PCR_INFO_SHORT infoShort; } TPM_QUOTE_INFO2; typedef struct tdTPM_EK_BLOB { TPM_STRUCTURE_TAG tag; TPM_EK_TYPE ekType; UINT32 blobSize; BYTE *blob; } TPM_EK_BLOB; typedef struct tdTPM_EK_BLOB_ACTIVATE { TPM_STRUCTURE_TAG tag; TPM_SYMMETRIC_KEY sessionKey; TPM_DIGEST idDigest; TPM_PCR_INFO_SHORT pcrInfo; } TPM_EK_BLOB_ACTIVATE; typedef struct tdTPM_EK_BLOB_AUTH { TPM_STRUCTURE_TAG tag; TPM_SECRET authValue; } TPM_EK_BLOB_AUTH; typedef struct tdTPM_IDENTITY_CONTENTS { TPM_STRUCT_VER ver; UINT32 ordinal; TPM_CHOSENID_HASH labelPrivCADigest; TPM_PUBKEY identityPubKey; } TPM_IDENTITY_CONTENTS; typedef struct tdTPM_IDENTITY_REQ { UINT32 asymSize; UINT32 symSize; TPM_KEY_PARMS asymAlgorithm; TPM_KEY_PARMS symAlgorithm; BYTE *asymBlob; BYTE *symBlob; } TPM_IDENTITY_REQ; typedef struct tdTPM_IDENTITY_PROOF { TPM_STRUCT_VER ver; UINT32 labelSize; UINT32 identityBindingSize; UINT32 endorsementSize; UINT32 platformSize; UINT32 conformanceSize; TPM_PUBKEY identityKey; BYTE *labelArea; BYTE *identityBinding; BYTE *endorsementCredential; BYTE *platformCredential; BYTE *conformanceCredential; } TPM_IDENTITY_PROOF; typedef struct tdTPM_ASYM_CA_CONTENTS { TPM_SYMMETRIC_KEY sessionKey; TPM_DIGEST idDigest; } TPM_ASYM_CA_CONTENTS; typedef struct tdTPM_SYM_CA_ATTESTATION { UINT32 credSize; TPM_KEY_PARMS algorithm; BYTE *credential; } TPM_SYM_CA_ATTESTATION; typedef struct tdTPM_CURRENT_TICKS { TPM_STRUCTURE_TAG tag; UINT64 currentTicks; UINT16 tickRate; TPM_NONCE tickNonce; } TPM_CURRENT_TICKS; typedef UINT32 TPM_TRANSPORT_ATTRIBUTES; typedef struct tdTPM_TRANSPORT_PUBLIC { TPM_STRUCTURE_TAG tag; TPM_TRANSPORT_ATTRIBUTES transAttributes; TPM_ALGORITHM_ID algId; TPM_ENC_SCHEME encScheme; } TPM_TRANSPORT_PUBLIC; typedef struct tdTPM_TRANSPORT_INTERNAL { TPM_STRUCTURE_TAG tag; TPM_AUTHDATA authData; TPM_TRANSPORT_PUBLIC transPublic; TPM_TRANSHANDLE transHandle; TPM_NONCE transNonceEven; TPM_DIGEST transDigest; } TPM_TRANSPORT_INTERNAL; typedef struct tdTPM_TRANSPORT_LOG_IN { TPM_STRUCTURE_TAG tag; TPM_DIGEST parameters; TPM_DIGEST pubKeyHash; } TPM_TRANSPORT_LOG_IN; typedef struct tdTPM_TRANSPORT_LOG_OUT { TPM_STRUCTURE_TAG tag; TPM_CURRENT_TICKS currentTicks; TPM_DIGEST parameters; TPM_MODIFIER_INDICATOR locality; } TPM_TRANSPORT_LOG_OUT; typedef struct tdTPM_TRANSPORT_AUTH { TPM_STRUCTURE_TAG tag; TPM_AUTHDATA authData; } TPM_TRANSPORT_AUTH; typedef struct tdTPM_AUDIT_EVENT_IN { TPM_STRUCTURE_TAG tag; TPM_DIGEST inputParms; TPM_COUNTER_VALUE auditCount; } TPM_AUDIT_EVENT_IN; typedef struct tdTPM_AUDIT_EVENT_OUT { TPM_STRUCTURE_TAG tag; TPM_COMMAND_CODE ordinal; TPM_DIGEST outputParms; TPM_COUNTER_VALUE auditCount; TPM_RESULT returnCode; } TPM_AUDIT_EVENT_OUT; typedef struct tdTPM_CONTEXT_BLOB { TPM_STRUCTURE_TAG tag; TPM_RESOURCE_TYPE resourceType; TPM_HANDLE handle; BYTE label[16]; UINT32 contextCount; TPM_DIGEST integrityDigest; UINT32 additionalSize; BYTE *additionalData; UINT32 sensitiveSize; BYTE *sensitiveData; } TPM_CONTEXT_BLOB; typedef struct tdTPM_CONTEXT_SENSITIVE { TPM_STRUCTURE_TAG tag; TPM_NONCE contextNonce; UINT32 internalSize; BYTE *internalData; } TPM_CONTEXT_SENSITIVE; typedef UINT32 TPM_NV_INDEX; typedef UINT32 TPM_NV_PER_ATTRIBUTES; typedef struct tdTPM_NV_ATTRIBUTES { TPM_STRUCTURE_TAG tag; TPM_NV_PER_ATTRIBUTES attributes; } TPM_NV_ATTRIBUTES; typedef struct tdTPM_NV_DATA_PUBLIC { TPM_STRUCTURE_TAG tag; TPM_NV_INDEX nvIndex; TPM_PCR_INFO_SHORT pcrInfoRead; TPM_PCR_INFO_SHORT pcrInfoWrite; TPM_NV_ATTRIBUTES permission; TPM_BOOL bReadSTClear; TPM_BOOL bWriteSTClear; TPM_BOOL bWriteDefine; UINT32 dataSize; } TPM_NV_DATA_PUBLIC; typedef UINT32 TPM_FAMILY_VERIFICATION; typedef UINT32 TPM_FAMILY_ID; typedef UINT32 TPM_DELEGATE_INDEX; typedef UINT32 TPM_FAMILY_OPERATION; typedef UINT32 TPM_FAMILY_FLAGS; typedef struct tdTPM_FAMILY_LABEL { BYTE label; } TPM_FAMILY_LABEL; typedef struct tdTPM_FAMILY_TABLE_ENTRY { TPM_STRUCTURE_TAG tag; TPM_FAMILY_LABEL label; TPM_FAMILY_ID familyID; TPM_FAMILY_VERIFICATION verificationCount; TPM_FAMILY_FLAGS flags; } TPM_FAMILY_TABLE_ENTRY; typedef struct tdTPM_DELEGATE_LABEL { BYTE label; } TPM_DELEGATE_LABEL; typedef UINT32 TPM_DELEGATE_TYPE; typedef struct tdTPM_DELEGATIONS { TPM_STRUCTURE_TAG tag; TPM_DELEGATE_TYPE delegateType; UINT32 per1; UINT32 per2; } TPM_DELEGATIONS; typedef struct tdTPM_DELEGATE_PUBLIC { TPM_STRUCTURE_TAG tag; TPM_DELEGATE_LABEL label; TPM_PCR_INFO_SHORT pcrInfo; TPM_DELEGATIONS permissions; TPM_FAMILY_ID familyID; TPM_FAMILY_VERIFICATION verificationCount; } TPM_DELEGATE_PUBLIC; typedef struct tdTPM_DELEGATE_TABLE_ROW { TPM_STRUCTURE_TAG tag; TPM_DELEGATE_PUBLIC pub; TPM_SECRET authValue; } TPM_DELEGATE_TABLE_ROW; typedef struct tdTPM_DELEGATE_SENSITIVE { TPM_STRUCTURE_TAG tag; TPM_SECRET authValue; } TPM_DELEGATE_SENSITIVE; typedef struct tdTPM_DELEGATE_OWNER_BLOB { TPM_STRUCTURE_TAG tag; TPM_DELEGATE_PUBLIC pub; TPM_DIGEST integrityDigest; UINT32 additionalSize; BYTE *additionalArea; UINT32 sensitiveSize; BYTE *sensitiveArea; } TPM_DELEGATE_OWNER_BLOB; typedef struct tdTPM_DELEGATE_KEY_BLOB { TPM_STRUCTURE_TAG tag; TPM_DELEGATE_PUBLIC pub; TPM_DIGEST integrityDigest; TPM_DIGEST pubKeyDigest; UINT32 additionalSize; BYTE *additionalArea; UINT32 sensitiveSize; BYTE *sensitiveArea; } TPM_DELEGATE_KEY_BLOB; typedef UINT32 TPM_CAPABILITY_AREA; typedef struct tdTPM_CAP_VERSION_INFO { TPM_STRUCTURE_TAG tag; TPM_VERSION version; UINT16 specLevel; BYTE errataRev; BYTE tpmVendorID[4]; UINT16 vendorSpecificSize; BYTE *vendorSpecific; } TPM_CAP_VERSION_INFO; typedef BYTE TPM_DA_STATE; typedef struct tdTPM_DA_ACTION_TYPE { TPM_STRUCTURE_TAG tag; UINT32 actions; } TPM_DA_ACTION_TYPE; typedef struct tdTPM_DA_INFO { TPM_STRUCTURE_TAG tag; TPM_DA_STATE state; UINT16 currentCount; UINT16 threshholdCount; TPM_DA_ACTION_TYPE actionAtThreshold; UINT32 actionDependValue; UINT32 vendorDataSize; BYTE *vendorData; } TPM_DA_INFO; typedef struct tdTPM_DA_INFO_LIMITED { TPM_STRUCTURE_TAG tag; TPM_DA_STATE state; TPM_DA_ACTION_TYPE actionAtThreshold; UINT32 vendorDataSize; BYTE *vendorData; } TPM_DA_INFO_LIMITED; typedef struct tdTPM_DAA_ISSUER { TPM_STRUCTURE_TAG tag; TPM_DIGEST DAA_digest_R0; TPM_DIGEST DAA_digest_R1; TPM_DIGEST DAA_digest_S0; TPM_DIGEST DAA_digest_S1; TPM_DIGEST DAA_digest_n; TPM_DIGEST DAA_digest_gamma; BYTE DAA_generic_q[26]; } TPM_DAA_ISSUER; typedef struct tdTPM_DAA_TPM { TPM_STRUCTURE_TAG tag; TPM_DIGEST DAA_digestIssuer; TPM_DIGEST DAA_digest_v0; TPM_DIGEST DAA_digest_v1; TPM_DIGEST DAA_rekey; UINT32 DAA_count; } TPM_DAA_TPM; typedef struct tdTPM_DAA_CONTEXT { TPM_STRUCTURE_TAG tag; TPM_DIGEST DAA_digestContext; TPM_DIGEST DAA_digest; TPM_DAA_CONTEXT_SEED DAA_contextSeed; BYTE DAA_scratch[256]; BYTE DAA_stage; } TPM_DAA_CONTEXT; typedef struct tdTPM_DAA_JOINDATA { BYTE DAA_join_u0[128]; BYTE DAA_join_u1[138]; TPM_DIGEST DAA_digest_n0; } TPM_DAA_JOINDATA; typedef struct tdTPM_DAA_BLOB { TPM_STRUCTURE_TAG tag; TPM_RESOURCE_TYPE resourceType; BYTE label[16]; TPM_DIGEST blobIntegrity; UINT32 additionalSize; BYTE *additionalData; UINT32 sensitiveSize; BYTE *sensitiveData; } TPM_DAA_BLOB; typedef struct tdTPM_DAA_SENSITIVE { TPM_STRUCTURE_TAG tag; UINT32 internalSize; BYTE *internalData; } TPM_DAA_SENSITIVE; typedef UINT32 TPM_SYM_MODE; typedef struct tdTSS_VERSION { BYTE bMajor; BYTE bMinor; BYTE bRevMajor; BYTE bRevMinor; } TSS_VERSION; typedef struct tdTSS_PCR_EVENT { TSS_VERSION versionInfo; UINT32 ulPcrIndex; TSS_EVENTTYPE eventType; UINT32 ulPcrValueLength; BYTE* rgbPcrValue; UINT32 ulEventLength; BYTE* rgbEvent; } TSS_PCR_EVENT; typedef struct tdTSS_EVENT_CERT { TSS_VERSION versionInfo; UINT32 ulCertificateHashLength; BYTE* rgbCertificateHash; UINT32 ulEntityDigestLength; BYTE* rgbentityDigest; TSS_BOOL fDigestChecked; TSS_BOOL fDigestVerified; UINT32 ulIssuerLength; BYTE* rgbIssuer; } TSS_EVENT_CERT; typedef struct tdTSS_UUID { UINT32 ulTimeLow; UINT16 usTimeMid; UINT16 usTimeHigh; BYTE bClockSeqHigh; BYTE bClockSeqLow; BYTE rgbNode[6]; } TSS_UUID; typedef struct tdTSS_KM_KEYINFO { TSS_VERSION versionInfo; TSS_UUID keyUUID; TSS_UUID parentKeyUUID; BYTE bAuthDataUsage; TSS_BOOL fIsLoaded; UINT32 ulVendorDataLength; BYTE *rgbVendorData; } TSS_KM_KEYINFO; typedef struct tdTSS_KM_KEYINFO2 { TSS_VERSION versionInfo; TSS_UUID keyUUID; TSS_UUID parentKeyUUID; BYTE bAuthDataUsage; TSS_FLAG persistentStorageType; TSS_FLAG persistentStorageTypeParent; TSS_BOOL fIsLoaded; UINT32 ulVendorDataLength; BYTE *rgbVendorData; } TSS_KM_KEYINFO2; typedef struct tdTSS_NONCE { BYTE nonce[0x14]; } TSS_NONCE; typedef struct tdTSS_VALIDATION { TSS_VERSION versionInfo; UINT32 ulExternalDataLength; BYTE* rgbExternalData; UINT32 ulDataLength; BYTE* rgbData; UINT32 ulValidationDataLength; BYTE* rgbValidationData; } TSS_VALIDATION; typedef struct tdTSS_CALLBACK { PVOID callback; PVOID appData; TSS_ALGORITHM_ID alg; } TSS_CALLBACK; typedef struct tdTSS_DAA_PK { TSS_VERSION versionInfo; UINT32 modulusLength; BYTE* modulus; UINT32 capitalSLength; BYTE* capitalS; UINT32 capitalZLength; BYTE* capitalZ; UINT32 capitalR0Length; BYTE* capitalR0; UINT32 capitalR1Length; BYTE* capitalR1; UINT32 gammaLength; BYTE* gamma; UINT32 capitalGammaLength; BYTE* capitalGamma; UINT32 rhoLength; BYTE* rho; UINT32 capitalYLength; UINT32 capitalYLength2; BYTE** capitalY; UINT32 capitalYPlatformLength; UINT32 issuerBaseNameLength; BYTE* issuerBaseName; UINT32 numPlatformAttributes; UINT32 numIssuerAttributes; } TSS_DAA_PK; typedef struct tdTSS_DAA_PK_PROOF { TSS_VERSION versionInfo; UINT32 challengeLength; BYTE* challenge; UINT32 responseLength; UINT32 responseLength2; BYTE** response; } TSS_DAA_PK_PROOF; typedef struct tdTSS_DAA_SK { TSS_VERSION versionInfo; UINT32 productPQprimeLength; BYTE* productPQprime; } TSS_DAA_SK; typedef struct tdTSS_DAA_KEY_PAIR { TSS_VERSION versionInfo; TSS_DAA_SK secretKey; TSS_DAA_PK publicKey; } TSS_DAA_KEY_PAIR; typedef struct tdTSS_DAA_AR_PK { TSS_VERSION versionInfo; UINT32 etaLength; BYTE* eta; UINT32 lambda1Length; BYTE* lambda1; UINT32 lambda2Length; BYTE* lambda2; UINT32 lambda3Length; BYTE* lambda3; } TSS_DAA_AR_PK; typedef struct tdTSS_DAA_AR_SK { TSS_VERSION versionInfo; UINT32 x0Length; BYTE* x0; UINT32 x1Length; BYTE* x1; UINT32 x2Length; BYTE* x2; UINT32 x3Length; BYTE* x3; UINT32 x4Length; BYTE* x4; UINT32 x5Length; BYTE* x5; } TSS_DAA_AR_SK; typedef struct tdTSS_DAA_AR_KEY_PAIR { TSS_VERSION versionInfo; TSS_DAA_AR_SK secretKey; TSS_DAA_AR_PK publicKey; } TSS_DAA_AR_KEY_PAIR; typedef struct tdTSS_DAA_CRED_ISSUER { TSS_VERSION versionInfo; UINT32 capitalALength; BYTE* capitalA; UINT32 eLength; BYTE* e; UINT32 vPrimePrimeLength; BYTE* vPrimePrime; UINT32 attributesIssuerLength; UINT32 attributesIssuerLength2; BYTE** attributesIssuer; UINT32 cPrimeLength; BYTE* cPrime; UINT32 sELength; BYTE* sE; } TSS_DAA_CRED_ISSUER; typedef struct tdTSS_DAA_CREDENTIAL { TSS_VERSION versionInfo; UINT32 capitalALength; BYTE* capitalA; UINT32 exponentLength; BYTE* exponent; UINT32 vBar0Length; BYTE* vBar0; UINT32 vBar1Length; BYTE* vBar1; UINT32 attributesLength; UINT32 attributesLength2; BYTE** attributes; TSS_DAA_PK issuerPK; UINT32 tpmSpecificEncLength; BYTE* tpmSpecificEnc; UINT32 daaCounter; } TSS_DAA_CREDENTIAL; typedef struct tdTSS_DAA_ATTRIB_COMMIT { TSS_VERSION versionInfo; UINT32 betaLength; BYTE* beta; UINT32 sMuLength; BYTE* sMu; } TSS_DAA_ATTRIB_COMMIT; typedef struct tdTSS_DAA_CREDENTIAL_REQUEST { TSS_VERSION versionInfo; UINT32 capitalULength; BYTE* capitalU; UINT32 capitalNiLength; BYTE* capitalNi; UINT32 authenticationProofLength; BYTE* authenticationProof; UINT32 challengeLength; BYTE* challenge; UINT32 nonceTpmLength; BYTE* nonceTpm; UINT32 noncePlatformLength; BYTE* noncePlatform; UINT32 sF0Length; BYTE* sF0; UINT32 sF1Length; BYTE* sF1; UINT32 sVprimeLength; BYTE* sVprime; UINT32 sVtildePrimeLength; BYTE* sVtildePrime; UINT32 sALength; UINT32 sALength2; BYTE** sA; UINT32 attributeCommitmentsLength; TSS_DAA_ATTRIB_COMMIT* attributeCommitments; } TSS_DAA_CREDENTIAL_REQUEST; typedef struct tdTSS_DAA_SELECTED_ATTRIB { TSS_VERSION versionInfo; UINT32 indicesListLength; TSS_BOOL* indicesList; } TSS_DAA_SELECTED_ATTRIB; typedef struct tdTSS_DAA_PSEUDONYM { TSS_VERSION versionInfo; TSS_FLAG payloadFlag; UINT32 payloadLength; BYTE* payload; } TSS_DAA_PSEUDONYM; typedef struct tdTSS_DAA_PSEUDONYM_PLAIN { TSS_VERSION versionInfo; UINT32 capitalNvLength; BYTE* capitalNv; } TSS_DAA_PSEUDONYM_PLAIN; typedef struct tdTSS_DAA_PSEUDONYM_ENCRYPTED { TSS_VERSION versionInfo; UINT32 delta1Length; BYTE* delta1; UINT32 delta2Length; BYTE* delta2; UINT32 delta3Length; BYTE* delta3; UINT32 delta4Length; BYTE* delta4; UINT32 sTauLength; BYTE* sTau; } TSS_DAA_PSEUDONYM_ENCRYPTED; typedef struct tdTSS_DAA_SIGN_CALLBACK { TSS_VERSION versionInfo; TSS_HHASH challenge; TSS_FLAG payloadFlag; UINT32 payloadLength; BYTE* payload; } TSS_DAA_SIGN_CALLBACK; typedef struct tdTSS_DAA_SIGNATURE { TSS_VERSION versionInfo; UINT32 zetaLength; BYTE* zeta; UINT32 capitalTLength; BYTE* capitalT; UINT32 challengeLength; BYTE* challenge; UINT32 nonceTpmLength; BYTE* nonceTpm; UINT32 sVLength; BYTE* sV; UINT32 sF0Length; BYTE* sF0; UINT32 sF1Length; BYTE* sF1; UINT32 sELength; BYTE* sE; UINT32 sALength; UINT32 sALength2; BYTE** sA; UINT32 attributeCommitmentsLength; TSS_DAA_ATTRIB_COMMIT* attributeCommitments; TSS_DAA_PSEUDONYM signedPseudonym; TSS_DAA_SIGN_CALLBACK callbackResult; } TSS_DAA_SIGNATURE; typedef struct tdTSS_DAA_IDENTITY_PROOF { TSS_VERSION versionInfo; UINT32 endorsementLength; BYTE* endorsementCredential; UINT32 platformLength; BYTE* platform; UINT32 conformanceLength; BYTE* conformance; } TSS_DAA_IDENTITY_PROOF; typedef UINT32 TSS_FAMILY_ID; typedef BYTE TSS_DELEGATION_LABEL; typedef UINT32 TSS_DELEGATION_TYPE; typedef struct tdTSS_PCR_INFO_SHORT { UINT32 sizeOfSelect; BYTE *selection; BYTE localityAtRelease; UINT32 sizeOfDigestAtRelease; BYTE *digestAtRelease; } TSS_PCR_INFO_SHORT; typedef struct tdTSS_FAMILY_TABLE_ENTRY { TSS_FAMILY_ID familyID; TSS_DELEGATION_LABEL label; UINT32 verificationCount; TSS_BOOL enabled; TSS_BOOL locked; } TSS_FAMILY_TABLE_ENTRY; typedef struct tdTSS_DELEGATION_TABLE_ENTRY { UINT32 tableIndex; TSS_DELEGATION_LABEL label; TSS_PCR_INFO_SHORT pcrInfo; UINT32 per1; UINT32 per2; TSS_FAMILY_ID familyID; UINT32 verificationCount; } TSS_DELEGATION_TABLE_ENTRY; typedef struct tdTSS_PLATFORM_CLASS { UINT32 platformClassSimpleIdentifier; UINT32 platformClassURISize; BYTE* pPlatformClassURI; } TSS_PLATFORM_CLASS; typedef UINT32 TCS_AUTHHANDLE; typedef UINT32 TCS_CONTEXT_HANDLE; typedef UINT32 TCS_KEY_HANDLE; typedef UINT32 TCS_HANDLE; typedef TPM_ENCAUTH TCG_ENCAUTH; typedef TPM_NONCE TCG_NONCE; typedef TPM_ENTITY_TYPE TCG_ENTITY_TYPE; typedef TPM_PCRINDEX TCG_PCRINDEX; typedef TPM_DIGEST TCG_DIGEST; typedef TPM_PCRVALUE TCG_PCRVALUE; typedef TPM_DIRVALUE TCG_DIRVALUE; typedef TPM_DIRINDEX TCG_DIRINDEX; typedef struct tdTCS_AUTH { TCS_AUTHHANDLE AuthHandle; TPM_NONCE NonceOdd; TPM_NONCE NonceEven; TSS_BOOL fContinueAuthSession; TPM_AUTHDATA HMAC; } TCS_AUTH; typedef TCS_AUTH TPM_AUTH; typedef struct tdTCS_LOADKEY_INFO { TSS_UUID keyUUID; TSS_UUID parentKeyUUID; TPM_DIGEST paramDigest; TPM_AUTH authData; } TCS_LOADKEY_INFO; extern TSS_RESULT Tspi_EncodeDER_TssBlob ( UINT32 rawBlobSize, BYTE* rawBlob, UINT32 blobType, UINT32* derBlobSize, BYTE* derBlob ); extern TSS_RESULT Tspi_DecodeBER_TssBlob ( UINT32 berBlobSize, BYTE* berBlob, UINT32* blobType, UINT32* rawBlobSize, BYTE* rawBlob ); extern TSS_RESULT Tspi_SetAttribUint32 ( TSS_HOBJECT hObject, TSS_FLAG attribFlag, TSS_FLAG subFlag, UINT32 ulAttrib ); extern TSS_RESULT Tspi_GetAttribUint32 ( TSS_HOBJECT hObject, TSS_FLAG attribFlag, TSS_FLAG subFlag, UINT32* pulAttrib ); extern TSS_RESULT Tspi_SetAttribData ( TSS_HOBJECT hObject, TSS_FLAG attribFlag, TSS_FLAG subFlag, UINT32 ulAttribDataSize, BYTE* rgbAttribData ); extern TSS_RESULT Tspi_GetAttribData ( TSS_HOBJECT hObject, TSS_FLAG attribFlag, TSS_FLAG subFlag, UINT32* pulAttribDataSize, BYTE** prgbAttribData ); extern TSS_RESULT Tspi_ChangeAuth ( TSS_HOBJECT hObjectToChange, TSS_HOBJECT hParentObject, TSS_HPOLICY hNewPolicy ); extern TSS_RESULT Tspi_ChangeAuthAsym ( TSS_HOBJECT hObjectToChange, TSS_HOBJECT hParentObject, TSS_HKEY hIdentKey, TSS_HPOLICY hNewPolicy ); extern TSS_RESULT Tspi_GetPolicyObject ( TSS_HOBJECT hObject, TSS_FLAG policyType, TSS_HPOLICY* phPolicy ); extern TSS_RESULT Tspi_Context_Create ( TSS_HCONTEXT* phContext ); extern TSS_RESULT Tspi_Context_Close ( TSS_HCONTEXT hContext ); extern TSS_RESULT Tspi_Context_Connect ( TSS_HCONTEXT hContext, TSS_UNICODE* wszDestination ); extern TSS_RESULT Tspi_Context_FreeMemory ( TSS_HCONTEXT hContext, BYTE* rgbMemory ); extern TSS_RESULT Tspi_Context_GetDefaultPolicy ( TSS_HCONTEXT hContext, TSS_HPOLICY* phPolicy ); extern TSS_RESULT Tspi_Context_CreateObject ( TSS_HCONTEXT hContext, TSS_FLAG objectType, TSS_FLAG initFlags, TSS_HOBJECT* phObject ); extern TSS_RESULT Tspi_Context_CloseObject ( TSS_HCONTEXT hContext, TSS_HOBJECT hObject ); extern TSS_RESULT Tspi_Context_GetCapability ( TSS_HCONTEXT hContext, TSS_FLAG capArea, UINT32 ulSubCapLength, BYTE* rgbSubCap, UINT32* pulRespDataLength, BYTE** prgbRespData ); extern TSS_RESULT Tspi_Context_GetTpmObject ( TSS_HCONTEXT hContext, TSS_HTPM* phTPM ); extern TSS_RESULT Tspi_Context_SetTransEncryptionKey ( TSS_HCONTEXT hContext, TSS_HKEY hKey ); extern TSS_RESULT Tspi_Context_CloseSignTransport ( TSS_HCONTEXT hContext, TSS_HKEY hSigningKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_Context_LoadKeyByBlob ( TSS_HCONTEXT hContext, TSS_HKEY hUnwrappingKey, UINT32 ulBlobLength, BYTE* rgbBlobData, TSS_HKEY* phKey ); extern TSS_RESULT Tspi_Context_LoadKeyByUUID ( TSS_HCONTEXT hContext, TSS_FLAG persistentStorageType, TSS_UUID uuidData, TSS_HKEY* phKey ); extern TSS_RESULT Tspi_Context_RegisterKey ( TSS_HCONTEXT hContext, TSS_HKEY hKey, TSS_FLAG persistentStorageType, TSS_UUID uuidKey, TSS_FLAG persistentStorageTypeParent, TSS_UUID uuidParentKey ); extern TSS_RESULT Tspi_Context_UnregisterKey ( TSS_HCONTEXT hContext, TSS_FLAG persistentStorageType, TSS_UUID uuidKey, TSS_HKEY* phkey ); extern TSS_RESULT Tspi_Context_GetKeyByUUID ( TSS_HCONTEXT hContext, TSS_FLAG persistentStorageType, TSS_UUID uuidData, TSS_HKEY* phKey ); extern TSS_RESULT Tspi_Context_GetKeyByPublicInfo ( TSS_HCONTEXT hContext, TSS_FLAG persistentStorageType, TSS_ALGORITHM_ID algID, UINT32 ulPublicInfoLength, BYTE* rgbPublicInfo, TSS_HKEY* phKey ); extern TSS_RESULT Tspi_Context_GetRegisteredKeysByUUID ( TSS_HCONTEXT hContext, TSS_FLAG persistentStorageType, TSS_UUID* pUuidData, UINT32* pulKeyHierarchySize, TSS_KM_KEYINFO** ppKeyHierarchy ); extern TSS_RESULT Tspi_Context_GetRegisteredKeysByUUID2 ( TSS_HCONTEXT hContext, TSS_FLAG persistentStorageType, TSS_UUID* pUuidData, UINT32* pulKeyHierarchySize, TSS_KM_KEYINFO2** ppKeyHierarchy ); extern TSS_RESULT Tspi_Policy_SetSecret ( TSS_HPOLICY hPolicy, TSS_FLAG secretMode, UINT32 ulSecretLength, BYTE* rgbSecret ); extern TSS_RESULT Tspi_Policy_FlushSecret ( TSS_HPOLICY hPolicy ); extern TSS_RESULT Tspi_Policy_AssignToObject ( TSS_HPOLICY hPolicy, TSS_HOBJECT hObject ); extern TSS_RESULT Tspi_TPM_KeyControlOwner ( TSS_HTPM hTPM, TSS_HKEY hKey, UINT32 attribName, TSS_BOOL attribValue, TSS_UUID* pUuidData ); extern TSS_RESULT Tspi_TPM_CreateEndorsementKey ( TSS_HTPM hTPM, TSS_HKEY hKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_TPM_CreateRevocableEndorsementKey ( TSS_HTPM hTPM, TSS_HKEY hKey, TSS_VALIDATION* pValidationData, UINT32* pulEkResetDataLength, BYTE** rgbEkResetData ); extern TSS_RESULT Tspi_TPM_RevokeEndorsementKey ( TSS_HTPM hTPM, UINT32 ulEkResetDataLength, BYTE* rgbEkResetData ); extern TSS_RESULT Tspi_TPM_GetPubEndorsementKey ( TSS_HTPM hTPM, TSS_BOOL fOwnerAuthorized, TSS_VALIDATION* pValidationData, TSS_HKEY* phEndorsementPubKey ); extern TSS_RESULT Tspi_TPM_OwnerGetSRKPubKey ( TSS_HTPM hTPM, UINT32* pulPubKeyLength, BYTE** prgbPubKey ); extern TSS_RESULT Tspi_TPM_TakeOwnership ( TSS_HTPM hTPM, TSS_HKEY hKeySRK, TSS_HKEY hEndorsementPubKey ); extern TSS_RESULT Tspi_TPM_ClearOwner ( TSS_HTPM hTPM, TSS_BOOL fForcedClear ); extern TSS_RESULT Tspi_TPM_CollateIdentityRequest ( TSS_HTPM hTPM, TSS_HKEY hKeySRK, TSS_HKEY hCAPubKey, UINT32 ulIdentityLabelLength, BYTE* rgbIdentityLabelData, TSS_HKEY hIdentityKey, TSS_ALGORITHM_ID algID, UINT32* pulTCPAIdentityReqLength, BYTE** prgbTCPAIdentityReq ); extern TSS_RESULT Tspi_TPM_ActivateIdentity ( TSS_HTPM hTPM, TSS_HKEY hIdentKey, UINT32 ulAsymCAContentsBlobLength, BYTE* rgbAsymCAContentsBlob, UINT32 ulSymCAAttestationBlobLength, BYTE* rgbSymCAAttestationBlob, UINT32* pulCredentialLength, BYTE** prgbCredential ); extern TSS_RESULT Tspi_TPM_CreateMaintenanceArchive ( TSS_HTPM hTPM, TSS_BOOL fGenerateRndNumber, UINT32* pulRndNumberLength, BYTE** prgbRndNumber, UINT32* pulArchiveDataLength, BYTE** prgbArchiveData ); extern TSS_RESULT Tspi_TPM_KillMaintenanceFeature ( TSS_HTPM hTPM ); extern TSS_RESULT Tspi_TPM_LoadMaintenancePubKey ( TSS_HTPM hTPM, TSS_HKEY hMaintenanceKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_TPM_CheckMaintenancePubKey ( TSS_HTPM hTPM, TSS_HKEY hMaintenanceKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_TPM_SetOperatorAuth ( TSS_HTPM hTPM, TSS_HPOLICY hOperatorPolicy ); extern TSS_RESULT Tspi_TPM_SetStatus ( TSS_HTPM hTPM, TSS_FLAG statusFlag, TSS_BOOL fTpmState ); extern TSS_RESULT Tspi_TPM_GetStatus ( TSS_HTPM hTPM, TSS_FLAG statusFlag, TSS_BOOL* pfTpmState ); extern TSS_RESULT Tspi_TPM_GetCapability ( TSS_HTPM hTPM, TSS_FLAG capArea, UINT32 ulSubCapLength, BYTE* rgbSubCap, UINT32* pulRespDataLength, BYTE** prgbRespData ); extern TSS_RESULT Tspi_TPM_GetCapabilitySigned ( TSS_HTPM hTPM, TSS_HKEY hKey, TSS_FLAG capArea, UINT32 ulSubCapLength, BYTE* rgbSubCap, TSS_VALIDATION* pValidationData, UINT32* pulRespDataLength, BYTE** prgbRespData ); extern TSS_RESULT Tspi_TPM_SelfTestFull ( TSS_HTPM hTPM ); extern TSS_RESULT Tspi_TPM_CertifySelfTest ( TSS_HTPM hTPM, TSS_HKEY hKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_TPM_GetTestResult ( TSS_HTPM hTPM, UINT32* pulTestResultLength, BYTE** prgbTestResult ); extern TSS_RESULT Tspi_TPM_GetRandom ( TSS_HTPM hTPM, UINT32 ulRandomDataLength, BYTE** prgbRandomData ); extern TSS_RESULT Tspi_TPM_StirRandom ( TSS_HTPM hTPM, UINT32 ulEntropyDataLength, BYTE* rgbEntropyData ); extern TSS_RESULT Tspi_TPM_GetEvent ( TSS_HTPM hTPM, UINT32 ulPcrIndex, UINT32 ulEventNumber, TSS_PCR_EVENT* pPcrEvent ); extern TSS_RESULT Tspi_TPM_GetEvents ( TSS_HTPM hTPM, UINT32 ulPcrIndex, UINT32 ulStartNumber, UINT32* pulEventNumber, TSS_PCR_EVENT** prgPcrEvents ); extern TSS_RESULT Tspi_TPM_GetEventLog ( TSS_HTPM hTPM, UINT32* pulEventNumber, TSS_PCR_EVENT** prgPcrEvents ); extern TSS_RESULT Tspi_TPM_Quote ( TSS_HTPM hTPM, TSS_HKEY hIdentKey, TSS_HPCRS hPcrComposite, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_TPM_Quote2 ( TSS_HTPM hTPM, TSS_HKEY hIdentKey, TSS_BOOL fAddVersion, TSS_HPCRS hPcrComposite, TSS_VALIDATION* pValidationData, UINT32* versionInfoSize, BYTE** versionInfo ); extern TSS_RESULT Tspi_TPM_PcrExtend ( TSS_HTPM hTPM, UINT32 ulPcrIndex, UINT32 ulPcrDataLength, BYTE* pbPcrData, TSS_PCR_EVENT* pPcrEvent, UINT32* pulPcrValueLength, BYTE** prgbPcrValue ); extern TSS_RESULT Tspi_TPM_PcrRead ( TSS_HTPM hTPM, UINT32 ulPcrIndex, UINT32* pulPcrValueLength, BYTE** prgbPcrValue ); extern TSS_RESULT Tspi_TPM_PcrReset ( TSS_HTPM hTPM, TSS_HPCRS hPcrComposite ); extern TSS_RESULT Tspi_TPM_AuthorizeMigrationTicket ( TSS_HTPM hTPM, TSS_HKEY hMigrationKey, TSS_MIGRATE_SCHEME migrationScheme, UINT32* pulMigTicketLength, BYTE** prgbMigTicket ); extern TSS_RESULT Tspi_TPM_CMKSetRestrictions ( TSS_HTPM hTPM, TSS_CMK_DELEGATE CmkDelegate ); extern TSS_RESULT Tspi_TPM_CMKApproveMA ( TSS_HTPM hTPM, TSS_HMIGDATA hMaAuthData ); extern TSS_RESULT Tspi_TPM_CMKCreateTicket ( TSS_HTPM hTPM, TSS_HKEY hVerifyKey, TSS_HMIGDATA hSigData ); extern TSS_RESULT Tspi_TPM_ReadCounter ( TSS_HTPM hTPM, UINT32* counterValue ); extern TSS_RESULT Tspi_TPM_ReadCurrentTicks ( TSS_HTPM hTPM, TPM_CURRENT_TICKS* tickCount ); extern TSS_RESULT Tspi_TPM_DirWrite ( TSS_HTPM hTPM, UINT32 ulDirIndex, UINT32 ulDirDataLength, BYTE* rgbDirData ); extern TSS_RESULT Tspi_TPM_DirRead ( TSS_HTPM hTPM, UINT32 ulDirIndex, UINT32* pulDirDataLength, BYTE** prgbDirData ); extern TSS_RESULT Tspi_TPM_Delegate_AddFamily ( TSS_HTPM hTPM, BYTE bLabel, TSS_HDELFAMILY* phFamily ); extern TSS_RESULT Tspi_TPM_Delegate_GetFamily ( TSS_HTPM hTPM, UINT32 ulFamilyID, TSS_HDELFAMILY* phFamily ); extern TSS_RESULT Tspi_TPM_Delegate_InvalidateFamily ( TSS_HTPM hTPM, TSS_HDELFAMILY hFamily ); extern TSS_RESULT Tspi_TPM_Delegate_CreateDelegation ( TSS_HOBJECT hObject, BYTE bLabel, UINT32 ulFlags, TSS_HPCRS hPcr, TSS_HDELFAMILY hFamily, TSS_HPOLICY hDelegation ); extern TSS_RESULT Tspi_TPM_Delegate_CacheOwnerDelegation ( TSS_HTPM hTPM, TSS_HPOLICY hDelegation, UINT32 ulIndex, UINT32 ulFlags ); extern TSS_RESULT Tspi_TPM_Delegate_UpdateVerificationCount ( TSS_HTPM hTPM, TSS_HPOLICY hDelegation ); extern TSS_RESULT Tspi_TPM_Delegate_VerifyDelegation ( TSS_HPOLICY hDelegation ); extern TSS_RESULT Tspi_TPM_Delegate_ReadTables ( TSS_HCONTEXT hContext, UINT32* pulFamilyTableSize, TSS_FAMILY_TABLE_ENTRY** ppFamilyTable, UINT32* pulDelegateTableSize, TSS_DELEGATION_TABLE_ENTRY** ppDelegateTable ); extern TSS_RESULT Tspi_TPM_GetAuditDigest ( TSS_HTPM hTPM, TSS_HKEY hKey, TSS_BOOL closeAudit, UINT32* pulAuditDigestSize, BYTE** prgbAuditDigest, TPM_COUNTER_VALUE* pCounterValue, TSS_VALIDATION* pValidationData, UINT32* ordSize, UINT32** ordList ); extern TSS_RESULT Tspi_PcrComposite_SelectPcrIndex ( TSS_HPCRS hPcrComposite, UINT32 ulPcrIndex ); extern TSS_RESULT Tspi_PcrComposite_SelectPcrIndexEx ( TSS_HPCRS hPcrComposite, UINT32 ulPcrIndex, UINT32 direction ); extern TSS_RESULT Tspi_PcrComposite_SetPcrValue ( TSS_HPCRS hPcrComposite, UINT32 ulPcrIndex, UINT32 ulPcrValueLength, BYTE* rgbPcrValue ); extern TSS_RESULT Tspi_PcrComposite_GetPcrValue ( TSS_HPCRS hPcrComposite, UINT32 ulPcrIndex, UINT32* pulPcrValueLength, BYTE** prgbPcrValue ); extern TSS_RESULT Tspi_PcrComposite_SetPcrLocality ( TSS_HPCRS hPcrComposite, UINT32 LocalityValue ); extern TSS_RESULT Tspi_PcrComposite_GetPcrLocality ( TSS_HPCRS hPcrComposite, UINT32* pLocalityValue ); extern TSS_RESULT Tspi_PcrComposite_GetCompositeHash ( TSS_HPCRS hPcrComposite, UINT32* pLen, BYTE** ppbHashData ); extern TSS_RESULT Tspi_Key_LoadKey ( TSS_HKEY hKey, TSS_HKEY hUnwrappingKey ); extern TSS_RESULT Tspi_Key_UnloadKey ( TSS_HKEY hKey ); extern TSS_RESULT Tspi_Key_GetPubKey ( TSS_HKEY hKey, UINT32* pulPubKeyLength, BYTE** prgbPubKey ); extern TSS_RESULT Tspi_Key_CertifyKey ( TSS_HKEY hKey, TSS_HKEY hCertifyingKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_Key_CreateKey ( TSS_HKEY hKey, TSS_HKEY hWrappingKey, TSS_HPCRS hPcrComposite ); extern TSS_RESULT Tspi_Key_WrapKey ( TSS_HKEY hKey, TSS_HKEY hWrappingKey, TSS_HPCRS hPcrComposite ); extern TSS_RESULT Tspi_Key_CreateMigrationBlob ( TSS_HKEY hKeyToMigrate, TSS_HKEY hParentKey, UINT32 ulMigTicketLength, BYTE* rgbMigTicket, UINT32* pulRandomLength, BYTE** prgbRandom, UINT32* pulMigrationBlobLength, BYTE** prgbMigrationBlob ); extern TSS_RESULT Tspi_Key_ConvertMigrationBlob ( TSS_HKEY hKeyToMigrate, TSS_HKEY hParentKey, UINT32 ulRandomLength, BYTE* rgbRandom, UINT32 ulMigrationBlobLength, BYTE* rgbMigrationBlob ); extern TSS_RESULT Tspi_Key_CMKCreateBlob ( TSS_HKEY hKeyToMigrate, TSS_HKEY hParentKey, TSS_HMIGDATA hMigrationData, UINT32* pulRandomLength, BYTE** prgbRandom ); extern TSS_RESULT Tspi_Key_CMKConvertMigration ( TSS_HKEY hKeyToMigrate, TSS_HKEY hParentKey, TSS_HMIGDATA hMigrationData, UINT32 ulRandomLength, BYTE* rgbRandom ); extern TSS_RESULT Tspi_Hash_Sign ( TSS_HHASH hHash, TSS_HKEY hKey, UINT32* pulSignatureLength, BYTE** prgbSignature ); extern TSS_RESULT Tspi_Hash_VerifySignature ( TSS_HHASH hHash, TSS_HKEY hKey, UINT32 ulSignatureLength, BYTE* rgbSignature ); extern TSS_RESULT Tspi_Hash_SetHashValue ( TSS_HHASH hHash, UINT32 ulHashValueLength, BYTE* rgbHashValue ); extern TSS_RESULT Tspi_Hash_GetHashValue ( TSS_HHASH hHash, UINT32* pulHashValueLength, BYTE** prgbHashValue ); extern TSS_RESULT Tspi_Hash_UpdateHashValue ( TSS_HHASH hHash, UINT32 ulDataLength, BYTE* rgbData ); extern TSS_RESULT Tspi_Hash_TickStampBlob ( TSS_HHASH hHash, TSS_HKEY hIdentKey, TSS_VALIDATION* pValidationData ); extern TSS_RESULT Tspi_Data_Bind ( TSS_HENCDATA hEncData, TSS_HKEY hEncKey, UINT32 ulDataLength, BYTE* rgbDataToBind ); extern TSS_RESULT Tspi_Data_Unbind ( TSS_HENCDATA hEncData, TSS_HKEY hKey, UINT32* pulUnboundDataLength, BYTE** prgbUnboundData ); extern TSS_RESULT Tspi_Data_Seal ( TSS_HENCDATA hEncData, TSS_HKEY hEncKey, UINT32 ulDataLength, BYTE* rgbDataToSeal, TSS_HPCRS hPcrComposite ); extern TSS_RESULT Tspi_Data_Unseal ( TSS_HENCDATA hEncData, TSS_HKEY hKey, UINT32* pulUnsealedDataLength, BYTE** prgbUnsealedData ); extern TSS_RESULT Tspi_NV_DefineSpace ( TSS_HNVSTORE hNVStore, TSS_HPCRS hReadPcrComposite, TSS_HPCRS hWritePcrComposite ); extern TSS_RESULT Tspi_NV_ReleaseSpace ( TSS_HNVSTORE hNVStore ); extern TSS_RESULT Tspi_NV_WriteValue ( TSS_HNVSTORE hNVStore, UINT32 offset, UINT32 ulDataLength, BYTE* rgbDataToWrite ); extern TSS_RESULT Tspi_NV_ReadValue ( TSS_HNVSTORE hNVStore, UINT32 offset, UINT32* ulDataLength, BYTE** rgbDataRead ); typedef TSS_RESULT (*Tspicb_CallbackHMACAuth) ( PVOID lpAppData, TSS_HOBJECT hAuthorizedObject, TSS_BOOL ReturnOrVerify, UINT32 ulPendingFunction, TSS_BOOL ContinueUse, UINT32 ulSizeNonces, BYTE* rgbNonceEven, BYTE* rgbNonceOdd, BYTE* rgbNonceEvenOSAP, BYTE* rgbNonceOddOSAP, UINT32 ulSizeDigestHmac, BYTE* rgbParamDigest, BYTE* rgbHmacData ); typedef TSS_RESULT (*Tspicb_CallbackXorEnc) ( PVOID lpAppData, TSS_HOBJECT hOSAPObject, TSS_HOBJECT hObject, TSS_FLAG PurposeSecret, UINT32 ulSizeNonces, BYTE* rgbNonceEven, BYTE* rgbNonceOdd, BYTE* rgbNonceEvenOSAP, BYTE* rgbNonceOddOSAP, UINT32 ulSizeEncAuth, BYTE* rgbEncAuthUsage, BYTE* rgbEncAuthMigration ); typedef TSS_RESULT (*Tspicb_CallbackTakeOwnership) ( PVOID lpAppData, TSS_HOBJECT hObject, TSS_HKEY hObjectPubKey, UINT32 ulSizeEncAuth, BYTE* rgbEncAuth ); typedef TSS_RESULT (*Tspicb_CallbackSealxMask) ( PVOID lpAppData, TSS_HKEY hKey, TSS_HENCDATA hEncData, TSS_ALGORITHM_ID algID, UINT32 ulSizeNonces, BYTE* rgbNonceEven, BYTE* rgbNonceOdd, BYTE* rgbNonceEvenOSAP, BYTE* rgbNonceOddOSAP, UINT32 ulDataLength, BYTE* rgbDataToMask, BYTE* rgbMaskedData ); typedef TSS_RESULT (*Tspicb_CallbackChangeAuthAsym) ( PVOID lpAppData, TSS_HOBJECT hObject, TSS_HKEY hObjectPubKey, UINT32 ulSizeEncAuth, UINT32 ulSizeAuthLink, BYTE* rgbEncAuth, BYTE* rgbAuthLink ); typedef TSS_RESULT (*Tspicb_CollateIdentity) ( PVOID lpAppData, UINT32 ulTCPAPlainIdentityProofLength, BYTE* rgbTCPAPlainIdentityProof, TSS_ALGORITHM_ID algID, UINT32 ulSessionKeyLength, BYTE* rgbSessionKey, UINT32* pulTCPAIdentityProofLength, BYTE* rgbTCPAIdentityProof ); typedef TSS_RESULT (*Tspicb_ActivateIdentity) ( PVOID lpAppData, UINT32 ulSessionKeyLength, BYTE* rgbSessionKey, UINT32 ulSymCAAttestationBlobLength, BYTE* rgbSymCAAttestationBlob, UINT32* pulCredentialLength, BYTE* rgbCredential ); typedef TSS_RESULT (*Tspicb_DAA_Sign) ( PVOID lpAppData, TSS_HDAA_ISSUER_KEY daaPublicKey, UINT32 gammasLength, BYTE** gammas, UINT32 attributesLength, BYTE** attributes, UINT32 randomAttributesLength, BYTE** randomAttributes, UINT32 attributeCommitmentsLength, TSS_DAA_ATTRIB_COMMIT* attributeCommitments, TSS_DAA_ATTRIB_COMMIT* attributeCommitmentsProof, TSS_DAA_PSEUDONYM_PLAIN* pseudonym, TSS_DAA_PSEUDONYM_PLAIN* pseudonymTilde, TSS_DAA_PSEUDONYM_ENCRYPTED* pseudonymEncrypted, TSS_DAA_PSEUDONYM_ENCRYPTED* pseudonymEncProof, TSS_DAA_SIGN_CALLBACK** additionalProof ); typedef TSS_RESULT (*Tspicb_DAA_VerifySignature) ( PVOID lpAppData, UINT32 challengeLength, BYTE* challenge, TSS_DAA_SIGN_CALLBACK* additionalProof, TSS_HDAA_ISSUER_KEY daaPublicKey, UINT32 gammasLength, BYTE** gammas, UINT32 sAttributesLength, BYTE** sAttributes, UINT32 attributeCommitmentsLength, TSS_DAA_ATTRIB_COMMIT* attributeCommitments, TSS_DAA_ATTRIB_COMMIT* attributeCommitmentsProof, UINT32 zetaLength, BYTE* zeta, UINT32 sFLength, BYTE* sF, TSS_DAA_PSEUDONYM* pseudonym, TSS_DAA_PSEUDONYM* pseudonymProof, TSS_BOOL* isCorrect ); #define TSS_LEVEL_SUCCESS ... #define TSS_LEVEL_INFO ... #define TSS_LEVEL_WARNING ... #define TSS_LEVEL_ERROR ... #define FACILITY_TSS ... #define FACILITY_TSS_CODEPOS ... #define TSS_CUSTOM_CODEFLAG ... #define TSS_E_BASE ... #define TSS_W_BASE ... #define TSS_I_BASE ... #define TSS_SUCCESS ... #define TSS_E_FAIL ... #define TSS_E_BAD_PARAMETER ... #define TSS_E_INTERNAL_ERROR ... #define TSS_E_OUTOFMEMORY ... #define TSS_E_NOTIMPL ... #define TSS_E_KEY_ALREADY_REGISTERED ... #define TSS_E_TPM_UNEXPECTED ... #define TSS_E_COMM_FAILURE ... #define TSS_E_TIMEOUT ... #define TSS_E_TPM_UNSUPPORTED_FEATURE ... #define TSS_E_CANCELED ... #define TSS_E_PS_KEY_NOTFOUND ... #define TSS_E_PS_KEY_EXISTS ... #define TSS_E_PS_BAD_KEY_STATE ... #define TSS_E_INVALID_OBJECT_TYPE ... #define TSS_E_NO_CONNECTION ... #define TSS_E_CONNECTION_FAILED ... #define TSS_E_CONNECTION_BROKEN ... #define TSS_E_HASH_INVALID_ALG ... #define TSS_E_HASH_INVALID_LENGTH ... #define TSS_E_HASH_NO_DATA ... #define TSS_E_INVALID_ATTRIB_FLAG ... #define TSS_E_INVALID_ATTRIB_SUBFLAG ... #define TSS_E_INVALID_ATTRIB_DATA ... #define TSS_E_INVALID_OBJECT_INIT_FLAG ... #define TSS_E_INVALID_OBJECT_INITFLAG ... #define TSS_E_NO_PCRS_SET ... #define TSS_E_KEY_NOT_LOADED ... #define TSS_E_KEY_NOT_SET ... #define TSS_E_VALIDATION_FAILED ... #define TSS_E_TSP_AUTHREQUIRED ... #define TSS_E_TSP_AUTH2REQUIRED ... #define TSS_E_TSP_AUTHFAIL ... #define TSS_E_TSP_AUTH2FAIL ... #define TSS_E_KEY_NO_MIGRATION_POLICY ... #define TSS_E_POLICY_NO_SECRET ... #define TSS_E_INVALID_OBJ_ACCESS ... #define TSS_E_INVALID_ENCSCHEME ... #define TSS_E_INVALID_SIGSCHEME ... #define TSS_E_ENC_INVALID_LENGTH ... #define TSS_E_ENC_NO_DATA ... #define TSS_E_ENC_INVALID_TYPE ... #define TSS_E_INVALID_KEYUSAGE ... #define TSS_E_VERIFICATION_FAILED ... #define TSS_E_HASH_NO_IDENTIFIER ... #define TSS_E_INVALID_HANDLE ... #define TSS_E_SILENT_CONTEXT ... #define TSS_E_EK_CHECKSUM ... #define TSS_E_DELEGATION_NOTSET ... #define TSS_E_DELFAMILY_NOTFOUND ... #define TSS_E_DELFAMILY_ROWEXISTS ... #define TSS_E_VERSION_MISMATCH ... #define TSS_E_DAA_AR_DECRYPTION_ERROR ... #define TSS_E_DAA_AUTHENTICATION_ERROR ... #define TSS_E_DAA_CHALLENGE_RESPONSE_ERROR ... #define TSS_E_DAA_CREDENTIAL_PROOF_ERROR ... #define TSS_E_DAA_CREDENTIAL_REQUEST_PROOF_ERROR ... #define TSS_E_DAA_ISSUER_KEY_ERROR ... #define TSS_E_DAA_PSEUDONYM_ERROR ... #define TSS_E_INVALID_RESOURCE ... #define TSS_E_NV_AREA_EXIST ... #define TSS_E_NV_AREA_NOT_EXIST ... #define TSS_E_TSP_TRANS_AUTHFAIL ... #define TSS_E_TSP_TRANS_AUTHREQUIRED ... #define TSS_E_TSP_TRANS_NOTEXCLUSIVE ... #define TSS_E_TSP_TRANS_FAIL ... #define TSS_E_TSP_TRANS_NO_PUBKEY ... #define TSS_E_NO_ACTIVE_COUNTER ... #define TSS_OBJECT_TYPE_POLICY ... #define TSS_OBJECT_TYPE_RSAKEY ... #define TSS_OBJECT_TYPE_ENCDATA ... #define TSS_OBJECT_TYPE_PCRS ... #define TSS_OBJECT_TYPE_HASH ... #define TSS_OBJECT_TYPE_DELFAMILY ... #define TSS_OBJECT_TYPE_NV ... #define TSS_OBJECT_TYPE_MIGDATA ... #define TSS_OBJECT_TYPE_DAA_CERTIFICATE ... #define TSS_OBJECT_TYPE_DAA_ISSUER_KEY ... #define TSS_OBJECT_TYPE_DAA_ARA_KEY ... #define TSS_KEY_NO_AUTHORIZATION ... #define TSS_KEY_AUTHORIZATION ... #define TSS_KEY_AUTHORIZATION_PRIV_USE_ONLY ... #define TSS_KEY_NON_VOLATILE ... #define TSS_KEY_VOLATILE ... #define TSS_KEY_NOT_MIGRATABLE ... #define TSS_KEY_MIGRATABLE ... #define TSS_KEY_TYPE_DEFAULT ... #define TSS_KEY_TYPE_SIGNING ... #define TSS_KEY_TYPE_STORAGE ... #define TSS_KEY_TYPE_IDENTITY ... #define TSS_KEY_TYPE_AUTHCHANGE ... #define TSS_KEY_TYPE_BIND ... #define TSS_KEY_TYPE_LEGACY ... #define TSS_KEY_TYPE_MIGRATE ... #define TSS_KEY_TYPE_BITMASK ... #define TSS_KEY_SIZE_DEFAULT ... #define TSS_KEY_SIZE_512 ... #define TSS_KEY_SIZE_1024 ... #define TSS_KEY_SIZE_2048 ... #define TSS_KEY_SIZE_4096 ... #define TSS_KEY_SIZE_8192 ... #define TSS_KEY_SIZE_16384 ... #define TSS_KEY_SIZE_BITMASK ... #define TSS_KEY_NOT_CERTIFIED_MIGRATABLE ... #define TSS_KEY_CERTIFIED_MIGRATABLE ... #define TSS_KEY_STRUCT_DEFAULT ... #define TSS_KEY_STRUCT_KEY ... #define TSS_KEY_STRUCT_KEY12 ... #define TSS_KEY_STRUCT_BITMASK ... #define TSS_KEY_EMPTY_KEY ... #define TSS_KEY_TSP_SRK ... #define TSS_KEY_TEMPLATE_BITMASK ... #define TSS_ENCDATA_SEAL ... #define TSS_ENCDATA_BIND ... #define TSS_ENCDATA_LEGACY ... #define TSS_HASH_DEFAULT ... #define TSS_HASH_SHA1 ... #define TSS_HASH_OTHER ... #define TSS_POLICY_USAGE ... #define TSS_POLICY_MIGRATION ... #define TSS_POLICY_OPERATOR ... #define TSS_PCRS_STRUCT_DEFAULT ... #define TSS_PCRS_STRUCT_INFO ... #define TSS_PCRS_STRUCT_INFO_LONG ... #define TSS_PCRS_STRUCT_INFO_SHORT ... #define TSS_TSPATTRIB_CONTEXT_SILENT_MODE ... #define TSS_TSPATTRIB_CONTEXT_MACHINE_NAME ... #define TSS_TSPATTRIB_CONTEXT_VERSION_MODE ... #define TSS_TSPATTRIB_CONTEXT_TRANSPORT ... #define TSS_TSPATTRIB_CONTEXT_CONNECTION_VERSION ... #define TSS_TSPATTRIB_SECRET_HASH_MODE ... #define TSS_TSPATTRIB_CONTEXTTRANS_CONTROL ... #define TSS_TSPATTRIB_CONTEXTTRANS_MODE ... #define TSS_TSPATTRIB_CONTEXT_NOT_SILENT ... #define TSS_TSPATTRIB_CONTEXT_SILENT ... #define TSS_TSPATTRIB_CONTEXT_VERSION_AUTO ... #define TSS_TSPATTRIB_CONTEXT_VERSION_V1_1 ... #define TSS_TSPATTRIB_CONTEXT_VERSION_V1_2 ... #define TSS_TSPATTRIB_DISABLE_TRANSPORT ... #define TSS_TSPATTRIB_ENABLE_TRANSPORT ... #define TSS_TSPATTRIB_TRANSPORT_NO_DEFAULT_ENCRYPTION ... #define TSS_TSPATTRIB_TRANSPORT_DEFAULT_ENCRYPTION ... #define TSS_TSPATTRIB_TRANSPORT_AUTHENTIC_CHANNEL ... #define TSS_TSPATTRIB_TRANSPORT_EXCLUSIVE ... #define TSS_TSPATTRIB_TRANSPORT_STATIC_AUTH ... #define TSS_CONNECTION_VERSION_1_1 ... #define TSS_CONNECTION_VERSION_1_2 ... #define TSS_TSPATTRIB_SECRET_HASH_MODE_POPUP ... #define TSS_TSPATTRIB_HASH_MODE_NOT_NULL ... #define TSS_TSPATTRIB_HASH_MODE_NULL ... #define TSS_TSPATTRIB_TPM_CALLBACK_COLLATEIDENTITY ... #define TSS_TSPATTRIB_TPM_CALLBACK_ACTIVATEIDENTITY ... #define TSS_TSPATTRIB_TPM_ORDINAL_AUDIT_STATUS ... #define TSS_TSPATTRIB_TPM_CREDENTIAL ... #define TPM_CAP_PROP_TPM_CLEAR_ORDINAL_AUDIT ... #define TPM_CAP_PROP_TPM_SET_ORDINAL_AUDIT ... #define TSS_TPMATTRIB_EKCERT ... #define TSS_TPMATTRIB_TPM_CC ... #define TSS_TPMATTRIB_PLATFORMCERT ... #define TSS_TPMATTRIB_PLATFORM_CC ... #define TSS_TSPATTRIB_POLICY_CALLBACK_HMAC ... #define TSS_TSPATTRIB_POLICY_CALLBACK_XOR_ENC ... #define TSS_TSPATTRIB_POLICY_CALLBACK_TAKEOWNERSHIP ... #define TSS_TSPATTRIB_POLICY_CALLBACK_CHANGEAUTHASYM ... #define TSS_TSPATTRIB_POLICY_SECRET_LIFETIME ... #define TSS_TSPATTRIB_POLICY_POPUPSTRING ... #define TSS_TSPATTRIB_POLICY_CALLBACK_SEALX_MASK ... #define TSS_TSPATTRIB_SECRET_HASH_MODE ... #define TSS_TSPATTRIB_POLICY_DELEGATION_INFO ... #define TSS_TSPATTRIB_POLICY_DELEGATION_PCR ... #define TSS_SECRET_LIFETIME_ALWAYS ... #define TSS_SECRET_LIFETIME_COUNTER ... #define TSS_SECRET_LIFETIME_TIMER ... #define TSS_TSPATTRIB_POLSECRET_LIFETIME_ALWAYS ... #define TSS_TSPATTRIB_POLSECRET_LIFETIME_COUNTER ... #define TSS_TSPATTRIB_POLSECRET_LIFETIME_TIMER ... #define TSS_TSPATTRIB_POLICYSECRET_LIFETIME_ALWAYS ... #define TSS_TSPATTRIB_POLICYSECRET_LIFETIME_COUNTER ... #define TSS_TSPATTRIB_POLICYSECRET_LIFETIME_TIMER ... #define TSS_TSPATTRIB_POLDEL_TYPE ... #define TSS_TSPATTRIB_POLDEL_INDEX ... #define TSS_TSPATTRIB_POLDEL_PER1 ... #define TSS_TSPATTRIB_POLDEL_PER2 ... #define TSS_TSPATTRIB_POLDEL_LABEL ... #define TSS_TSPATTRIB_POLDEL_FAMILYID ... #define TSS_TSPATTRIB_POLDEL_VERCOUNT ... #define TSS_TSPATTRIB_POLDEL_OWNERBLOB ... #define TSS_TSPATTRIB_POLDEL_KEYBLOB ... #define TSS_TSPATTRIB_POLDELPCR_LOCALITY ... #define TSS_TSPATTRIB_POLDELPCR_DIGESTATRELEASE ... #define TSS_TSPATTRIB_POLDELPCR_SELECTION ... #define TSS_DELEGATIONTYPE_NONE ... #define TSS_DELEGATIONTYPE_OWNER ... #define TSS_DELEGATIONTYPE_KEY ... #define TSS_SECRET_MODE_NONE ... #define TSS_SECRET_MODE_SHA1 ... #define TSS_SECRET_MODE_PLAIN ... #define TSS_SECRET_MODE_POPUP ... #define TSS_SECRET_MODE_CALLBACK ... #define TSS_TSPATTRIB_ENCDATA_BLOB ... #define TSS_TSPATTRIB_ENCDATA_PCR ... #define TSS_TSPATTRIB_ENCDATA_PCR_LONG ... #define TSS_TSPATTRIB_ENCDATA_SEAL ... #define TSS_TSPATTRIB_ENCDATABLOB_BLOB ... #define TSS_TSPATTRIB_ENCDATAPCR_DIGEST_ATCREATION ... #define TSS_TSPATTRIB_ENCDATAPCR_DIGEST_ATRELEASE ... #define TSS_TSPATTRIB_ENCDATAPCR_SELECTION ... #define TSS_TSPATTRIB_ENCDATAPCR_DIGEST_RELEASE ... #define TSS_TSPATTRIB_ENCDATAPCRLONG_LOCALITY_ATCREATION ... #define TSS_TSPATTRIB_ENCDATAPCRLONG_LOCALITY_ATRELEASE ... #define TSS_TSPATTRIB_ENCDATAPCRLONG_CREATION_SELECTION ... #define TSS_TSPATTRIB_ENCDATAPCRLONG_RELEASE_SELECTION ... #define TSS_TSPATTRIB_ENCDATAPCRLONG_DIGEST_ATCREATION ... #define TSS_TSPATTRIB_ENCDATAPCRLONG_DIGEST_ATRELEASE ... #define TSS_TSPATTRIB_ENCDATASEAL_PROTECT_MODE ... #define TSS_TSPATTRIB_ENCDATASEAL_NOPROTECT ... #define TSS_TSPATTRIB_ENCDATASEAL_PROTECT ... #define TSS_TSPATTRIB_ENCDATASEAL_NO_PROTECT ... #define TSS_TSPATTRIB_NV_INDEX ... #define TSS_TSPATTRIB_NV_PERMISSIONS ... #define TSS_TSPATTRIB_NV_STATE ... #define TSS_TSPATTRIB_NV_DATASIZE ... #define TSS_TSPATTRIB_NV_PCR ... #define TSS_TSPATTRIB_NVSTATE_READSTCLEAR ... #define TSS_TSPATTRIB_NVSTATE_WRITESTCLEAR ... #define TSS_TSPATTRIB_NVSTATE_WRITEDEFINE ... #define TSS_TSPATTRIB_NVPCR_READPCRSELECTION ... #define TSS_TSPATTRIB_NVPCR_READDIGESTATRELEASE ... #define TSS_TSPATTRIB_NVPCR_READLOCALITYATRELEASE ... #define TSS_TSPATTRIB_NVPCR_WRITEPCRSELECTION ... #define TSS_TSPATTRIB_NVPCR_WRITEDIGESTATRELEASE ... #define TSS_TSPATTRIB_NVPCR_WRITELOCALITYATRELEASE ... #define TSS_NV_TPM ... #define TSS_NV_PLATFORM ... #define TSS_NV_USER ... #define TSS_NV_DEFINED ... #define TSS_NV_MASK_TPM ... #define TSS_NV_MASK_PLATFORM ... #define TSS_NV_MASK_USER ... #define TSS_NV_MASK_DEFINED ... #define TSS_NV_MASK_RESERVED ... #define TSS_NV_MASK_PURVIEW ... #define TSS_NV_MASK_INDEX ... #define TSS_NV_INDEX_SESSIONS ... #define TSS_MIGATTRIB_MIGRATIONBLOB ... #define TSS_MIGATTRIB_MIGRATIONTICKET ... #define TSS_MIGATTRIB_AUTHORITY_DATA ... #define TSS_MIGATTRIB_MIG_AUTH_DATA ... #define TSS_MIGATTRIB_TICKET_DATA ... #define TSS_MIGATTRIB_PAYLOAD_TYPE ... #define TSS_MIGATTRIB_MIGRATION_XOR_BLOB ... #define TSS_MIGATTRIB_MIGRATION_REWRAPPED_BLOB ... #define TSS_MIGATTRIB_MIG_MSALIST_PUBKEY_BLOB ... #define TSS_MIGATTRIB_MIG_AUTHORITY_PUBKEY_BLOB ... #define TSS_MIGATTRIB_MIG_DESTINATION_PUBKEY_BLOB ... #define TSS_MIGATTRIB_MIG_SOURCE_PUBKEY_BLOB ... #define TSS_MIGATTRIB_MIG_REWRAPPED_BLOB ... #define TSS_MIGATTRIB_MIG_XOR_BLOB ... #define TSS_MIGATTRIB_AUTHORITY_DIGEST ... #define TSS_MIGATTRIB_AUTHORITY_APPROVAL_HMAC ... #define TSS_MIGATTRIB_AUTHORITY_MSALIST ... #define TSS_MIGATTRIB_MIG_AUTH_AUTHORITY_DIGEST ... #define TSS_MIGATTRIB_MIG_AUTH_DESTINATION_DIGEST ... #define TSS_MIGATTRIB_MIG_AUTH_SOURCE_DIGEST ... #define TSS_MIGATTRIB_TICKET_SIG_DIGEST ... #define TSS_MIGATTRIB_TICKET_SIG_VALUE ... #define TSS_MIGATTRIB_TICKET_SIG_TICKET ... #define TSS_MIGATTRIB_TICKET_RESTRICT_TICKET ... #define TSS_MIGATTRIB_PT_MIGRATE_RESTRICTED ... #define TSS_MIGATTRIB_PT_MIGRATE_EXTERNAL ... #define TSS_TSPATTRIB_HASH_IDENTIFIER ... #define TSS_TSPATTRIB_ALG_IDENTIFIER ... #define TSS_TSPATTRIB_PCRS_INFO ... #define TSS_TSPATTRIB_PCRSINFO_PCRSTRUCT ... #define TSS_TSPATTRIB_DELFAMILY_STATE ... #define TSS_TSPATTRIB_DELFAMILY_INFO ... #define TSS_TSPATTRIB_DELFAMILYSTATE_LOCKED ... #define TSS_TSPATTRIB_DELFAMILYSTATE_ENABLED ... #define TSS_TSPATTRIB_DELFAMILYINFO_LABEL ... #define TSS_TSPATTRIB_DELFAMILYINFO_VERCOUNT ... #define TSS_TSPATTRIB_DELFAMILYINFO_FAMILYID ... #define TSS_DELEGATE_INCREMENTVERIFICATIONCOUNT ... #define TSS_DELEGATE_CACHEOWNERDELEGATION_OVERWRITEEXISTING ... #define TSS_TSPATTRIB_DAACRED_COMMIT ... #define TSS_TSPATTRIB_DAACRED_ATTRIB_GAMMAS ... #define TSS_TSPATTRIB_DAACRED_CREDENTIAL_BLOB ... #define TSS_TSPATTRIB_DAACRED_CALLBACK_SIGN ... #define TSS_TSPATTRIB_DAACRED_CALLBACK_VERIFYSIGNATURE ... #define TSS_TSPATTRIB_DAACOMMIT_NUMBER ... #define TSS_TSPATTRIB_DAACOMMIT_SELECTION ... #define TSS_TSPATTRIB_DAACOMMIT_COMMITMENTS ... #define TSS_TSPATTRIB_DAAATTRIBGAMMAS_BLOB ... #define TSS_TSPATTRIB_DAAISSUERKEY_BLOB ... #define TSS_TSPATTRIB_DAAISSUERKEY_PUBKEY ... #define TSS_TSPATTRIB_DAAISSUERKEYBLOB_PUBLIC_KEY ... #define TSS_TSPATTRIB_DAAISSUERKEYBLOB_SECRET_KEY ... #define TSS_TSPATTRIB_DAAISSUERKEYBLOB_KEYBLOB ... #define TSS_TSPATTRIB_DAAISSUERKEYBLOB_PROOF ... #define TSS_TSPATTRIB_DAAISSUERKEYPUBKEY_NUM_ATTRIBS ... #define TSS_TSPATTRIB_DAAISSUERKEYPUBKEY_NUM_PLATFORM_ATTRIBS ... #define TSS_TSPATTRIB_DAAISSUERKEYPUBKEY_NUM_ISSUER_ATTRIBS ... #define TSS_TSPATTRIB_DAAARAKEY_BLOB ... #define TSS_TSPATTRIB_DAAARAKEYBLOB_PUBLIC_KEY ... #define TSS_TSPATTRIB_DAAARAKEYBLOB_SECRET_KEY ... #define TSS_TSPATTRIB_DAAARAKEYBLOB_KEYBLOB ... #define TSS_FLAG_DAA_PSEUDONYM_PLAIN ... #define TSS_FLAG_DAA_PSEUDONYM_ENCRYPTED ... #define TSS_TSPATTRIB_KEY_BLOB ... #define TSS_TSPATTRIB_KEY_INFO ... #define TSS_TSPATTRIB_KEY_UUID ... #define TSS_TSPATTRIB_KEY_PCR ... #define TSS_TSPATTRIB_RSAKEY_INFO ... #define TSS_TSPATTRIB_KEY_REGISTER ... #define TSS_TSPATTRIB_KEY_PCR_LONG ... #define TSS_TSPATTRIB_KEY_CONTROLBIT ... #define TSS_TSPATTRIB_KEY_CMKINFO ... #define TSS_TSPATTRIB_KEYBLOB_BLOB ... #define TSS_TSPATTRIB_KEYBLOB_PUBLIC_KEY ... #define TSS_TSPATTRIB_KEYBLOB_PRIVATE_KEY ... #define TSS_TSPATTRIB_KEYINFO_SIZE ... #define TSS_TSPATTRIB_KEYINFO_USAGE ... #define TSS_TSPATTRIB_KEYINFO_KEYFLAGS ... #define TSS_TSPATTRIB_KEYINFO_AUTHUSAGE ... #define TSS_TSPATTRIB_KEYINFO_ALGORITHM ... #define TSS_TSPATTRIB_KEYINFO_SIGSCHEME ... #define TSS_TSPATTRIB_KEYINFO_ENCSCHEME ... #define TSS_TSPATTRIB_KEYINFO_MIGRATABLE ... #define TSS_TSPATTRIB_KEYINFO_REDIRECTED ... #define TSS_TSPATTRIB_KEYINFO_VOLATILE ... #define TSS_TSPATTRIB_KEYINFO_AUTHDATAUSAGE ... #define TSS_TSPATTRIB_KEYINFO_VERSION ... #define TSS_TSPATTRIB_KEYINFO_CMK ... #define TSS_TSPATTRIB_KEYINFO_KEYSTRUCT ... #define TSS_TSPATTRIB_KEYCONTROL_OWNEREVICT ... #define TSS_TSPATTRIB_KEYINFO_RSA_EXPONENT ... #define TSS_TSPATTRIB_KEYINFO_RSA_MODULUS ... #define TSS_TSPATTRIB_KEYINFO_RSA_KEYSIZE ... #define TSS_TSPATTRIB_KEYINFO_RSA_PRIMES ... #define TSS_TSPATTRIB_KEYPCR_DIGEST_ATCREATION ... #define TSS_TSPATTRIB_KEYPCR_DIGEST_ATRELEASE ... #define TSS_TSPATTRIB_KEYPCR_SELECTION ... #define TSS_TSPATTRIB_KEYREGISTER_USER ... #define TSS_TSPATTRIB_KEYREGISTER_SYSTEM ... #define TSS_TSPATTRIB_KEYREGISTER_NO ... #define TSS_TSPATTRIB_KEYPCRLONG_LOCALITY_ATCREATION ... #define TSS_TSPATTRIB_KEYPCRLONG_LOCALITY_ATRELEASE ... #define TSS_TSPATTRIB_KEYPCRLONG_CREATION_SELECTION ... #define TSS_TSPATTRIB_KEYPCRLONG_RELEASE_SELECTION ... #define TSS_TSPATTRIB_KEYPCRLONG_DIGEST_ATCREATION ... #define TSS_TSPATTRIB_KEYPCRLONG_DIGEST_ATRELEASE ... #define TSS_TSPATTRIB_KEYINFO_CMK_MA_APPROVAL ... #define TSS_TSPATTRIB_KEYINFO_CMK_MA_DIGEST ... #define TSS_KEY_SIZEVAL_512BIT ... #define TSS_KEY_SIZEVAL_1024BIT ... #define TSS_KEY_SIZEVAL_2048BIT ... #define TSS_KEY_SIZEVAL_4096BIT ... #define TSS_KEY_SIZEVAL_8192BIT ... #define TSS_KEY_SIZEVAL_16384BIT ... #define TSS_KEYUSAGE_BIND ... #define TSS_KEYUSAGE_IDENTITY ... #define TSS_KEYUSAGE_LEGACY ... #define TSS_KEYUSAGE_SIGN ... #define TSS_KEYUSAGE_STORAGE ... #define TSS_KEYUSAGE_AUTHCHANGE ... #define TSS_KEYUSAGE_MIGRATE ... #define TSS_KEYFLAG_REDIRECTION ... #define TSS_KEYFLAG_MIGRATABLE ... #define TSS_KEYFLAG_VOLATILEKEY ... #define TSS_KEYFLAG_CERTIFIED_MIGRATABLE ... #define TSS_ALG_RSA ... #define TSS_ALG_DES ... #define TSS_ALG_3DES ... #define TSS_ALG_SHA ... #define TSS_ALG_HMAC ... #define TSS_ALG_AES128 ... #define TSS_ALG_AES192 ... #define TSS_ALG_AES256 ... #define TSS_ALG_XOR ... #define TSS_ALG_MGF1 ... #define TSS_ALG_AES ... #define TSS_ALG_DEFAULT ... #define TSS_ALG_DEFAULT_SIZE ... #define TSS_SS_NONE ... #define TSS_SS_RSASSAPKCS1V15_SHA1 ... #define TSS_SS_RSASSAPKCS1V15_DER ... #define TSS_SS_RSASSAPKCS1V15_INFO ... #define TSS_ES_NONE ... #define TSS_ES_RSAESPKCSV15 ... #define TSS_ES_RSAESOAEP_SHA1_MGF1 ... #define TSS_ES_SYM_CNT ... #define TSS_ES_SYM_OFB ... #define TSS_ES_SYM_CBC_PKCS5PAD ... #define TSS_PS_TYPE_USER ... #define TSS_PS_TYPE_SYSTEM ... #define TSS_MS_MIGRATE ... #define TSS_MS_REWRAP ... #define TSS_MS_MAINT ... #define TSS_MS_RESTRICT_MIGRATE ... #define TSS_MS_RESTRICT_APPROVE_DOUBLE ... #define TSS_MS_RESTRICT_MIGRATE_EXTERNAL ... #define TSS_KEYAUTH_AUTH_NEVER ... #define TSS_KEYAUTH_AUTH_ALWAYS ... #define TSS_KEYAUTH_AUTH_PRIV_USE_ONLY ... #define TSS_TPMSTATUS_DISABLEOWNERCLEAR ... #define TSS_TPMSTATUS_DISABLEFORCECLEAR ... #define TSS_TPMSTATUS_DISABLED ... #define TSS_TPMSTATUS_DEACTIVATED ... #define TSS_TPMSTATUS_OWNERSETDISABLE ... #define TSS_TPMSTATUS_SETOWNERINSTALL ... #define TSS_TPMSTATUS_DISABLEPUBEKREAD ... #define TSS_TPMSTATUS_ALLOWMAINTENANCE ... #define TSS_TPMSTATUS_PHYSPRES_LIFETIMELOCK ... #define TSS_TPMSTATUS_PHYSPRES_HWENABLE ... #define TSS_TPMSTATUS_PHYSPRES_CMDENABLE ... #define TSS_TPMSTATUS_PHYSPRES_LOCK ... #define TSS_TPMSTATUS_PHYSPRESENCE ... #define TSS_TPMSTATUS_PHYSICALDISABLE ... #define TSS_TPMSTATUS_CEKP_USED ... #define TSS_TPMSTATUS_PHYSICALSETDEACTIVATED ... #define TSS_TPMSTATUS_SETTEMPDEACTIVATED ... #define TSS_TPMSTATUS_POSTINITIALISE ... #define TSS_TPMSTATUS_TPMPOST ... #define TSS_TPMSTATUS_TPMPOSTLOCK ... #define TSS_TPMSTATUS_DISABLEPUBSRKREAD ... #define TSS_TPMSTATUS_MAINTENANCEUSED ... #define TSS_TPMSTATUS_OPERATORINSTALLED ... #define TSS_TPMSTATUS_OPERATOR_INSTALLED ... #define TSS_TPMSTATUS_FIPS ... #define TSS_TPMSTATUS_ENABLEREVOKEEK ... #define TSS_TPMSTATUS_ENABLE_REVOKEEK ... #define TSS_TPMSTATUS_NV_LOCK ... #define TSS_TPMSTATUS_TPM_ESTABLISHED ... #define TSS_TPMSTATUS_RESETLOCK ... #define TSS_TPMSTATUS_DISABLE_FULL_DA_LOGIC_INFO ... #define TSS_TPMCAP_ORD ... #define TSS_TPMCAP_ALG ... #define TSS_TPMCAP_FLAG ... #define TSS_TPMCAP_PROPERTY ... #define TSS_TPMCAP_VERSION ... #define TSS_TPMCAP_VERSION_VAL ... #define TSS_TPMCAP_NV_LIST ... #define TSS_TPMCAP_NV_INDEX ... #define TSS_TPMCAP_MFR ... #define TSS_TPMCAP_SYM_MODE ... #define TSS_TPMCAP_HANDLE ... #define TSS_TPMCAP_TRANS_ES ... #define TSS_TPMCAP_AUTH_ENCRYPT ... #define TSS_TPMCAP_SET_PERM_FLAGS ... #define TSS_TPMCAP_SET_VENDOR ... #define TSS_TPMCAP_DA_LOGIC ... #define TSS_TPMCAP_PROP_PCR ... #define TSS_TPMCAP_PROP_DIR ... #define TSS_TPMCAP_PROP_MANUFACTURER ... #define TSS_TPMCAP_PROP_SLOTS ... #define TSS_TPMCAP_PROP_KEYS ... #define TSS_TPMCAP_PROP_FAMILYROWS ... #define TSS_TPMCAP_PROP_DELEGATEROWS ... #define TSS_TPMCAP_PROP_OWNER ... #define TSS_TPMCAP_PROP_MAXKEYS ... #define TSS_TPMCAP_PROP_AUTHSESSIONS ... #define TSS_TPMCAP_PROP_MAXAUTHSESSIONS ... #define TSS_TPMCAP_PROP_TRANSESSIONS ... #define TSS_TPMCAP_PROP_MAXTRANSESSIONS ... #define TSS_TPMCAP_PROP_SESSIONS ... #define TSS_TPMCAP_PROP_MAXSESSIONS ... #define TSS_TPMCAP_PROP_CONTEXTS ... #define TSS_TPMCAP_PROP_MAXCONTEXTS ... #define TSS_TPMCAP_PROP_DAASESSIONS ... #define TSS_TPMCAP_PROP_MAXDAASESSIONS ... #define TSS_TPMCAP_PROP_DAA_INTERRUPT ... #define TSS_TPMCAP_PROP_COUNTERS ... #define TSS_TPMCAP_PROP_MAXCOUNTERS ... #define TSS_TPMCAP_PROP_ACTIVECOUNTER ... #define TSS_TPMCAP_PROP_MIN_COUNTER ... #define TSS_TPMCAP_PROP_TISTIMEOUTS ... #define TSS_TPMCAP_PROP_STARTUPEFFECTS ... #define TSS_TPMCAP_PROP_MAXCONTEXTCOUNTDIST ... #define TSS_TPMCAP_PROP_CMKRESTRICTION ... #define TSS_TPMCAP_PROP_DURATION ... #define TSS_TPMCAP_PROP_MAXNVAVAILABLE ... #define TSS_TPMCAP_PROP_INPUTBUFFERSIZE ... #define TSS_TPMCAP_PROP_REVISION ... #define TSS_TPMCAP_PROP_LOCALITIES_AVAIL ... #define TSS_RT_KEY ... #define TSS_RT_AUTH ... #define TSS_RT_TRANS ... #define TSS_RT_COUNTER ... #define TSS_TCSCAP_ALG ... #define TSS_TCSCAP_VERSION ... #define TSS_TCSCAP_CACHING ... #define TSS_TCSCAP_PERSSTORAGE ... #define TSS_TCSCAP_MANUFACTURER ... #define TSS_TCSCAP_PLATFORM_CLASS ... #define TSS_TCSCAP_TRANSPORT ... #define TSS_TCSCAP_PLATFORM_INFO ... #define TSS_TCSCAP_PROP_KEYCACHE ... #define TSS_TCSCAP_PROP_AUTHCACHE ... #define TSS_TCSCAP_PROP_MANUFACTURER_STR ... #define TSS_TCSCAP_PROP_MANUFACTURER_ID ... #define TSS_TCSCAP_PLATFORM_VERSION ... #define TSS_TCSCAP_PLATFORM_TYPE ... #define TSS_TCSCAP_TRANS_EXCLUSIVE ... #define TSS_TCSCAP_PROP_HOST_PLATFORM ... #define TSS_TCSCAP_PROP_ALL_PLATFORMS ... #define TSS_TSPCAP_ALG ... #define TSS_TSPCAP_VERSION ... #define TSS_TSPCAP_PERSSTORAGE ... #define TSS_TSPCAP_MANUFACTURER ... #define TSS_TSPCAP_RETURNVALUE_INFO ... #define TSS_TSPCAP_PLATFORM_INFO ... #define TSS_TSPCAP_PROP_MANUFACTURER_STR ... #define TSS_TSPCAP_PROP_MANUFACTURER_ID ... #define TSS_TSPCAP_PLATFORM_TYPE ... #define TSS_TSPCAP_PLATFORM_VERSION ... #define TSS_TSPCAP_PROP_RETURNVALUE_INFO ... #define TSS_EV_CODE_CERT ... #define TSS_EV_CODE_NOCERT ... #define TSS_EV_XML_CONFIG ... #define TSS_EV_NO_ACTION ... #define TSS_EV_SEPARATOR ... #define TSS_EV_ACTION ... #define TSS_EV_PLATFORM_SPECIFIC ... #define TSS_TSPCAP_RANDOMLIMIT ... #define TSS_PCRS_DIRECTION_CREATION ... #define TSS_PCRS_DIRECTION_RELEASE ... #define TSS_BLOB_STRUCT_VERSION ... #define TSS_BLOB_TYPE_KEY ... #define TSS_BLOB_TYPE_PUBKEY ... #define TSS_BLOB_TYPE_MIGKEY ... #define TSS_BLOB_TYPE_SEALEDDATA ... #define TSS_BLOB_TYPE_BOUNDDATA ... #define TSS_BLOB_TYPE_MIGTICKET ... #define TSS_BLOB_TYPE_PRIVATEKEY ... #define TSS_BLOB_TYPE_PRIVATEKEY_MOD1 ... #define TSS_BLOB_TYPE_RANDOM_XOR ... #define TSS_BLOB_TYPE_CERTIFY_INFO ... #define TSS_BLOB_TYPE_KEY_1_2 ... #define TSS_BLOB_TYPE_CERTIFY_INFO_2 ... #define TSS_BLOB_TYPE_CMK_MIG_KEY ... #define TSS_BLOB_TYPE_CMK_BYTE_STREAM ... #define TSS_CMK_DELEGATE_SIGNING ... #define TSS_CMK_DELEGATE_STORAGE ... #define TSS_CMK_DELEGATE_BIND ... #define TSS_CMK_DELEGATE_LEGACY ... #define TSS_CMK_DELEGATE_MIGRATE ... #define TSS_DAA_LENGTH_N ... #define TSS_DAA_LENGTH_F ... #define TSS_DAA_LENGTH_E ... #define TSS_DAA_LENGTH_E_PRIME ... #define TSS_DAA_LENGTH_V ... #define TSS_DAA_LENGTH_SAFETY ... #define TSS_DAA_LENGTH_HASH ... #define TSS_DAA_LENGTH_S ... #define TSS_DAA_LENGTH_GAMMA ... #define TSS_DAA_LENGTH_RHO ... #define TSS_DAA_LENGTH_MFG1_GAMMA ... #define TSS_DAA_LENGTH_MGF1_AR ... #define TPM_Vendor_Specific32 ... #define TPM_Vendor_Specific8 ... #define TPM_TAG_CONTEXTBLOB ... #define TPM_TAG_CONTEXT_SENSITIVE ... #define TPM_TAG_CONTEXTPOINTER ... #define TPM_TAG_CONTEXTLIST ... #define TPM_TAG_SIGNINFO ... #define TPM_TAG_PCR_INFO_LONG ... #define TPM_TAG_PERSISTENT_FLAGS ... #define TPM_TAG_VOLATILE_FLAGS ... #define TPM_TAG_PERSISTENT_DATA ... #define TPM_TAG_VOLATILE_DATA ... #define TPM_TAG_SV_DATA ... #define TPM_TAG_EK_BLOB ... #define TPM_TAG_EK_BLOB_AUTH ... #define TPM_TAG_COUNTER_VALUE ... #define TPM_TAG_TRANSPORT_INTERNAL ... #define TPM_TAG_TRANSPORT_LOG_IN ... #define TPM_TAG_TRANSPORT_LOG_OUT ... #define TPM_TAG_AUDIT_EVENT_IN ... #define TPM_TAG_AUDIT_EVENT_OUT ... #define TPM_TAG_CURRENT_TICKS ... #define TPM_TAG_KEY ... #define TPM_TAG_STORED_DATA12 ... #define TPM_TAG_NV_ATTRIBUTES ... #define TPM_TAG_NV_DATA_PUBLIC ... #define TPM_TAG_NV_DATA_SENSITIVE ... #define TPM_TAG_DELEGATIONS ... #define TPM_TAG_DELEGATE_PUBLIC ... #define TPM_TAG_DELEGATE_TABLE_ROW ... #define TPM_TAG_TRANSPORT_AUTH ... #define TPM_TAG_TRANSPORT_PUBLIC ... #define TPM_TAG_PERMANENT_FLAGS ... #define TPM_TAG_STCLEAR_FLAGS ... #define TPM_TAG_STANY_FLAGS ... #define TPM_TAG_PERMANENT_DATA ... #define TPM_TAG_STCLEAR_DATA ... #define TPM_TAG_STANY_DATA ... #define TPM_TAG_FAMILY_TABLE_ENTRY ... #define TPM_TAG_DELEGATE_SENSITIVE ... #define TPM_TAG_DELG_KEY_BLOB ... #define TPM_TAG_KEY12 ... #define TPM_TAG_CERTIFY_INFO2 ... #define TPM_TAG_DELEGATE_OWNER_BLOB ... #define TPM_TAG_EK_BLOB_ACTIVATE ... #define TPM_TAG_DAA_BLOB ... #define TPM_TAG_DAA_CONTEXT ... #define TPM_TAG_DAA_ENFORCE ... #define TPM_TAG_DAA_ISSUER ... #define TPM_TAG_CAP_VERSION_INFO ... #define TPM_TAG_DAA_SENSITIVE ... #define TPM_TAG_DAA_TPM ... #define TPM_TAG_CMK_MIGAUTH ... #define TPM_TAG_CMK_SIGTICKET ... #define TPM_TAG_CMK_MA_APPROVAL ... #define TPM_TAG_QUOTE_INFO2 ... #define TPM_TAG_DA_INFO ... #define TPM_TAG_DA_INFO_LIMITED ... #define TPM_TAG_DA_ACTION_TYPE ... #define TPM_RT_KEY ... #define TPM_RT_AUTH ... #define TPM_RT_HASH ... #define TPM_RT_TRANS ... #define TPM_RT_CONTEXT ... #define TPM_RT_COUNTER ... #define TPM_RT_DELEGATE ... #define TPM_RT_DAA_TPM ... #define TPM_RT_DAA_V0 ... #define TPM_RT_DAA_V1 ... #define TPM_PT_ASYM ... #define TPM_PT_BIND ... #define TPM_PT_MIGRATE ... #define TPM_PT_MAINT ... #define TPM_PT_SEAL ... #define TPM_PT_MIGRATE_RESTRICTED ... #define TPM_PT_MIGRATE_EXTERNAL ... #define TPM_PT_CMK_MIGRATE ... #define TPM_ET_KEYHANDLE ... #define TPM_ET_OWNER ... #define TPM_ET_DATA ... #define TPM_ET_SRK ... #define TPM_ET_KEY ... #define TPM_ET_REVOKE ... #define TPM_ET_DEL_OWNER_BLOB ... #define TPM_ET_DEL_ROW ... #define TPM_ET_DEL_KEY_BLOB ... #define TPM_ET_COUNTER ... #define TPM_ET_NV ... #define TPM_ET_OPERATOR ... #define TPM_ET_RESERVED_HANDLE ... #define TPM_ET_XOR ... #define TPM_ET_AES ... #define TPM_KH_SRK ... #define TPM_KH_OWNER ... #define TPM_KH_REVOKE ... #define TPM_KH_TRANSPORT ... #define TPM_KH_OPERATOR ... #define TPM_KH_ADMIN ... #define TPM_KH_EK ... #define TPM_KEYHND_SRK ... #define TPM_KEYHND_OWNER ... #define TPM_ST_CLEAR ... #define TPM_ST_STATE ... #define TPM_ST_DEACTIVATED ... #define TPM_PID_OIAP ... #define TPM_PID_OSAP ... #define TPM_PID_ADIP ... #define TPM_PID_ADCP ... #define TPM_PID_OWNER ... #define TPM_PID_DSAP ... #define TPM_PID_TRANSPORT ... #define TPM_ALG_RSA ... #define TPM_ALG_DES ... #define TPM_ALG_3DES ... #define TPM_ALG_SHA ... #define TPM_ALG_HMAC ... #define TPM_ALG_AES ... #define TPM_ALG_AES128 ... #define TPM_ALG_MGF1 ... #define TPM_ALG_AES192 ... #define TPM_ALG_AES256 ... #define TPM_ALG_XOR ... #define TPM_PHYSICAL_PRESENCE_LOCK ... #define TPM_PHYSICAL_PRESENCE_PRESENT ... #define TPM_PHYSICAL_PRESENCE_NOTPRESENT ... #define TPM_PHYSICAL_PRESENCE_CMD_ENABLE ... #define TPM_PHYSICAL_PRESENCE_HW_ENABLE ... #define TPM_PHYSICAL_PRESENCE_LIFETIME_LOCK ... #define TPM_PHYSICAL_PRESENCE_CMD_DISABLE ... #define TPM_PHYSICAL_PRESENCE_HW_DISABLE ... #define TPM_MS_MIGRATE ... #define TPM_MS_REWRAP ... #define TPM_MS_MAINT ... #define TPM_MS_RESTRICT_MIGRATE ... #define TPM_MS_RESTRICT_APPROVE_DOUBLE ... #define TPM_EK_TYPE_ACTIVATE ... #define TPM_EK_TYPE_AUTH ... #define TPM_PS_PC_11 ... #define TPM_PS_PC_12 ... #define TPM_PS_PDA_12 ... #define TPM_PS_Server_12 ... #define TPM_PS_Mobile_12 ... #define TPM_SHA1_160_HASH_LEN ... #define TPM_SHA1BASED_NONCE_LEN ... #define TPM_KEY_SIGNING ... #define TPM_KEY_STORAGE ... #define TPM_KEY_IDENTITY ... #define TPM_KEY_AUTHCHANGE ... #define TPM_KEY_BIND ... #define TPM_KEY_LEGACY ... #define TPM_KEY_MIGRATE ... #define TPM_SS_NONE ... #define TPM_SS_RSASSAPKCS1v15_SHA1 ... #define TPM_SS_RSASSAPKCS1v15_DER ... #define TPM_SS_RSASSAPKCS1v15_INFO ... #define TPM_ES_NONE ... #define TPM_ES_RSAESPKCSv15 ... #define TPM_ES_RSAESOAEP_SHA1_MGF1 ... #define TPM_ES_SYM_CNT ... #define TPM_ES_SYM_CTR ... #define TPM_ES_SYM_OFB ... #define TPM_ES_SYM_CBC_PKCS5PAD ... #define TPM_AUTH_NEVER ... #define TPM_AUTH_ALWAYS ... #define TPM_AUTH_PRIV_USE_ONLY ... #define TPM_REDIRECTION ... #define TPM_MIGRATABLE ... #define TPM_VOLATILE ... #define TPM_PCRIGNOREDONREAD ... #define TPM_MIGRATEAUTHORITY ... #define TPM_CMK_DELEGATE_SIGNING ... #define TPM_CMK_DELEGATE_STORAGE ... #define TPM_CMK_DELEGATE_BIND ... #define TPM_CMK_DELEGATE_LEGACY ... #define TPM_CMK_DELEGATE_MIGRATE ... #define TPM_TAG_RQU_COMMAND ... #define TPM_TAG_RQU_AUTH1_COMMAND ... #define TPM_TAG_RQU_AUTH2_COMMAND ... #define TPM_TAG_RSP_COMMAND ... #define TPM_TAG_RSP_AUTH1_COMMAND ... #define TPM_TAG_RSP_AUTH2_COMMAND ... #define TPM_PF_DISABLE ... #define TPM_PF_OWNERSHIP ... #define TPM_PF_DEACTIVATED ... #define TPM_PF_READPUBEK ... #define TPM_PF_DISABLEOWNERCLEAR ... #define TPM_PF_ALLOWMAINTENANCE ... #define TPM_PF_PHYSICALPRESENCELIFETIMELOCK ... #define TPM_PF_PHYSICALPRESENCEHWENABLE ... #define TPM_PF_PHYSICALPRESENCECMDENABLE ... #define TPM_PF_CEKPUSED ... #define TPM_PF_TPMPOST ... #define TPM_PF_TPMPOSTLOCK ... #define TPM_PF_FIPS ... #define TPM_PF_OPERATOR ... #define TPM_PF_ENABLEREVOKEEK ... #define TPM_PF_NV_LOCKED ... #define TPM_PF_READSRKPUB ... #define TPM_PF_RESETESTABLISHMENTBIT ... #define TPM_PF_MAINTENANCEDONE ... #define TPM_PF_DISABLEFULLDALOGICINFO ... #define TPM_SF_DEACTIVATED ... #define TPM_SF_DISABLEFORCECLEAR ... #define TPM_SF_PHYSICALPRESENCE ... #define TPM_SF_PHYSICALPRESENCELOCK ... #define TPM_SF_GLOBALLOCK ... #define TPM_AF_POSTINITIALIZE ... #define TPM_AF_LOCALITYMODIFIER ... #define TPM_AF_TRANSPORTEXCLUSIVE ... #define TPM_AF_TOSPRESENT ... //#define TPM_MIN_COUNTERS ... //#define TPM_NUM_PCR ... //#define TPM_MAX_NV_WRITE_NOOWNER ... #define TPM_LOC_FOUR ... #define TPM_LOC_THREE ... #define TPM_LOC_TWO ... #define TPM_LOC_ONE ... #define TPM_LOC_ZERO ... #define TPM_KEY_CONTROL_OWNER_EVICT ... #define TPM_TRANSPORT_ENCRYPT ... #define TPM_TRANSPORT_LOG ... #define TPM_TRANSPORT_EXCLUSIVE ... #define TPM_NV_INDEX_LOCK ... #define TPM_NV_INDEX0 ... #define TPM_NV_INDEX_DIR ... #define TPM_NV_INDEX_EKCert ... #define TPM_NV_INDEX_TPM_CC ... #define TPM_NV_INDEX_PlatformCert ... #define TPM_NV_INDEX_Platform_CC ... #define TPM_NV_INDEX_TSS_BASE ... #define TPM_NV_INDEX_PC_BASE ... #define TPM_NV_INDEX_SERVER_BASE ... #define TPM_NV_INDEX_MOBILE_BASE ... #define TPM_NV_INDEX_PERIPHERAL_BASE ... #define TPM_NV_INDEX_GROUP_RESV_BASE ... #define TPM_NV_PER_READ_STCLEAR ... #define TPM_NV_PER_AUTHREAD ... #define TPM_NV_PER_OWNERREAD ... #define TPM_NV_PER_PPREAD ... #define TPM_NV_PER_GLOBALLOCK ... #define TPM_NV_PER_WRITE_STCLEAR ... #define TPM_NV_PER_WRITEDEFINE ... #define TPM_NV_PER_WRITEALL ... #define TPM_NV_PER_AUTHWRITE ... #define TPM_NV_PER_OWNERWRITE ... #define TPM_NV_PER_PPWRITE ... #define TPM_DELEGATE_SetOrdinalAuditStatus ... #define TPM_DELEGATE_DirWriteAuth ... #define TPM_DELEGATE_CMK_ApproveMA ... #define TPM_DELEGATE_NV_WriteValue ... #define TPM_DELEGATE_CMK_CreateTicket ... #define TPM_DELEGATE_NV_ReadValue ... #define TPM_DELEGATE_Delegate_LoadOwnerDelegation ... #define TPM_DELEGATE_DAA_Join ... #define TPM_DELEGATE_AuthorizeMigrationKey ... #define TPM_DELEGATE_CreateMaintenanceArchive ... #define TPM_DELEGATE_LoadMaintenanceArchive ... #define TPM_DELEGATE_KillMaintenanceFeature ... #define TPM_DELEGATE_OwnerReadInternalPub ... #define TPM_DELEGATE_ResetLockValue ... #define TPM_DELEGATE_OwnerClear ... #define TPM_DELEGATE_DisableOwnerClear ... #define TPM_DELEGATE_NV_DefineSpace ... #define TPM_DELEGATE_OwnerSetDisable ... #define TPM_DELEGATE_SetCapability ... #define TPM_DELEGATE_MakeIdentity ... #define TPM_DELEGATE_ActivateIdentity ... #define TPM_DELEGATE_OwnerReadPubek ... #define TPM_DELEGATE_DisablePubekRead ... #define TPM_DELEGATE_SetRedirection ... #define TPM_DELEGATE_FieldUpgrade ... #define TPM_DELEGATE_Delegate_UpdateVerification ... #define TPM_DELEGATE_CreateCounter ... #define TPM_DELEGATE_ReleaseCounterOwner ... #define TPM_DELEGATE_DelegateManage ... #define TPM_DELEGATE_Delegate_CreateOwnerDelegation ... #define TPM_DELEGATE_DAA_Sign ... #define TPM_KEY_DELEGATE_CMK_ConvertMigration ... #define TPM_KEY_DELEGATE_TickStampBlob ... #define TPM_KEY_DELEGATE_ChangeAuthAsymStart ... #define TPM_KEY_DELEGATE_ChangeAuthAsymFinish ... #define TPM_KEY_DELEGATE_CMK_CreateKey ... #define TPM_KEY_DELEGATE_MigrateKey ... #define TPM_KEY_DELEGATE_LoadKey2 ... #define TPM_KEY_DELEGATE_EstablishTransport ... #define TPM_KEY_DELEGATE_ReleaseTransportSigned ... #define TPM_KEY_DELEGATE_Quote2 ... #define TPM_KEY_DELEGATE_Sealx ... #define TPM_KEY_DELEGATE_MakeIdentity ... #define TPM_KEY_DELEGATE_ActivateIdentity ... #define TPM_KEY_DELEGATE_GetAuditDigestSigned ... #define TPM_KEY_DELEGATE_Sign ... #define TPM_KEY_DELEGATE_CertifyKey2 ... #define TPM_KEY_DELEGATE_CertifyKey ... #define TPM_KEY_DELEGATE_CreateWrapKey ... #define TPM_KEY_DELEGATE_CMK_CreateBlob ... #define TPM_KEY_DELEGATE_CreateMigrationBlob ... #define TPM_KEY_DELEGATE_ConvertMigrationBlob ... #define TPM_KEY_DELEGATE_CreateKeyDelegation ... #define TPM_KEY_DELEGATE_ChangeAuth ... #define TPM_KEY_DELEGATE_GetPubKey ... #define TPM_KEY_DELEGATE_UnBind ... #define TPM_KEY_DELEGATE_Quote ... #define TPM_KEY_DELEGATE_Unseal ... #define TPM_KEY_DELEGATE_Seal ... #define TPM_KEY_DELEGATE_LoadKey ... #define TPM_FAMILY_CREATE ... #define TPM_FAMILY_ENABLE ... #define TPM_FAMILY_ADMIN ... #define TPM_FAMILY_INVALIDATE ... #define TPM_FAMFLAG_DELEGATE_ADMIN_LOCK ... #define TPM_FAMFLAG_ENABLE ... #define TPM_FAMILY_TABLE_ENTRY_MIN ... #define TPM_DEL_OWNER_BITS ... #define TPM_DEL_KEY_BITS ... #define TPM_NUM_DELEGATE_TABLE_ENTRY_MIN ... #define TPM_CAP_ORD ... #define TPM_CAP_ALG ... #define TPM_CAP_PID ... #define TPM_CAP_FLAG ... #define TPM_CAP_PROPERTY ... #define TPM_CAP_VERSION ... #define TPM_CAP_KEY_HANDLE ... #define TPM_CAP_CHECK_LOADED ... #define TPM_CAP_SYM_MODE ... #define TPM_CAP_KEY_STATUS ... #define TPM_CAP_NV_LIST ... #define TPM_CAP_MFR ... #define TPM_CAP_NV_INDEX ... #define TPM_CAP_TRANS_ALG ... #define TPM_CAP_HANDLE ... #define TPM_CAP_TRANS_ES ... #define TPM_CAP_AUTH_ENCRYPT ... #define TPM_CAP_SELECT_SIZE ... #define TPM_CAP_DA_LOGIC ... #define TPM_CAP_VERSION_VAL ... #define TPM_CAP_FLAG_PERMANENT ... #define TPM_CAP_FLAG_VOLATILE ... #define TPM_CAP_PROP_PCR ... #define TPM_CAP_PROP_DIR ... #define TPM_CAP_PROP_MANUFACTURER ... #define TPM_CAP_PROP_KEYS ... #define TPM_CAP_PROP_SLOTS ... #define TPM_CAP_PROP_MIN_COUNTER ... #define TPM_CAP_PROP_AUTHSESS ... #define TPM_CAP_PROP_TRANSSESS ... #define TPM_CAP_PROP_COUNTERS ... #define TPM_CAP_PROP_MAX_AUTHSESS ... #define TPM_CAP_PROP_MAX_TRANSSESS ... #define TPM_CAP_PROP_MAX_COUNTERS ... #define TPM_CAP_PROP_MAX_KEYS ... #define TPM_CAP_PROP_OWNER ... #define TPM_CAP_PROP_CONTEXT ... #define TPM_CAP_PROP_MAX_CONTEXT ... #define TPM_CAP_PROP_FAMILYROWS ... #define TPM_CAP_PROP_TIS_TIMEOUT ... #define TPM_CAP_PROP_STARTUP_EFFECT ... #define TPM_CAP_PROP_DELEGATE_ROW ... #define TPM_CAP_PROP_MAX_DAASESS ... #define TPM_CAP_PROP_DAA_MAX ... #define TPM_CAP_PROP_DAASESS ... #define TPM_CAP_PROP_SESSION_DAA ... #define TPM_CAP_PROP_CONTEXT_DIST ... #define TPM_CAP_PROP_DAA_INTERRUPT ... #define TPM_CAP_PROP_SESSIONS ... #define TPM_CAP_PROP_MAX_SESSIONS ... #define TPM_CAP_PROP_CMK_RESTRICTION ... #define TPM_CAP_PROP_DURATION ... #define TPM_CAP_PROP_ACTIVE_COUNTER ... #define TPM_CAP_PROP_NV_AVAILABLE ... #define TPM_CAP_PROP_INPUT_BUFFER ... #define TPM_SET_PERM_FLAGS ... #define TPM_SET_PERM_DATA ... #define TPM_SET_STCLEAR_FLAGS ... #define TPM_SET_STCLEAR_DATA ... #define TPM_SET_STANY_FLAGS ... #define TPM_SET_STANY_DATA ... #define TPM_SET_VENDOR ... #define TPM_DA_STATE_INACTIVE ... #define TPM_DA_STATE_ACTIVE ... #define TPM_DA_ACTION_TIMEOUT ... #define TPM_DA_ACTION_DISABLE ... #define TPM_DA_ACTION_DEACTIVATE ... #define TPM_DA_ACTION_FAILURE_MODE ... #define TPM_DAA_SIZE_r0 ... #define TPM_DAA_SIZE_r1 ... #define TPM_DAA_SIZE_r2 ... #define TPM_DAA_SIZE_r3 ... #define TPM_DAA_SIZE_r4 ... #define TPM_DAA_SIZE_NT ... #define TPM_DAA_SIZE_v0 ... #define TPM_DAA_SIZE_v1 ... #define TPM_DAA_SIZE_NE ... #define TPM_DAA_SIZE_w ... #define TPM_DAA_SIZE_issuerModulus ... #define TPM_DAA_power0 ... #define TPM_DAA_power1 ... #define TPM_REDIR_GPIO ... #define TPM_SYM_MODE_ECB ... #define TPM_SYM_MODE_CBC ... #define TPM_SYM_MODE_CFB ... #define TPM_E_BASE ... #define TPM_E_NON_FATAL ... #define TPM_SUCCESS ... #define TPM_E_AUTHFAIL ... #define TPM_E_BADINDEX ... #define TPM_E_BAD_PARAMETER ... #define TPM_E_AUDITFAILURE ... #define TPM_E_CLEAR_DISABLED ... #define TPM_E_DEACTIVATED ... #define TPM_E_DISABLED ... #define TPM_E_DISABLED_CMD ... #define TPM_E_FAIL ... #define TPM_E_BAD_ORDINAL ... #define TPM_E_INSTALL_DISABLED ... #define TPM_E_INVALID_KEYHANDLE ... #define TPM_E_KEYNOTFOUND ... #define TPM_E_INAPPROPRIATE_ENC ... #define TPM_E_MIGRATEFAIL ... #define TPM_E_INVALID_PCR_INFO ... #define TPM_E_NOSPACE ... #define TPM_E_NOSRK ... #define TPM_E_NOTSEALED_BLOB ... #define TPM_E_OWNER_SET ... #define TPM_E_RESOURCES ... #define TPM_E_SHORTRANDOM ... #define TPM_E_SIZE ... #define TPM_E_WRONGPCRVAL ... #define TPM_E_BAD_PARAM_SIZE ... #define TPM_E_SHA_THREAD ... #define TPM_E_SHA_ERROR ... #define TPM_E_FAILEDSELFTEST ... #define TPM_E_AUTH2FAIL ... #define TPM_E_BADTAG ... #define TPM_E_IOERROR ... #define TPM_E_ENCRYPT_ERROR ... #define TPM_E_DECRYPT_ERROR ... #define TPM_E_INVALID_AUTHHANDLE ... #define TPM_E_NO_ENDORSEMENT ... #define TPM_E_INVALID_KEYUSAGE ... #define TPM_E_WRONG_ENTITYTYPE ... #define TPM_E_INVALID_POSTINIT ... #define TPM_E_INAPPROPRIATE_SIG ... #define TPM_E_BAD_KEY_PROPERTY ... #define TPM_E_BAD_MIGRATION ... #define TPM_E_BAD_SCHEME ... #define TPM_E_BAD_DATASIZE ... #define TPM_E_BAD_MODE ... #define TPM_E_BAD_PRESENCE ... #define TPM_E_BAD_VERSION ... #define TPM_E_NO_WRAP_TRANSPORT ... #define TPM_E_AUDITFAIL_UNSUCCESSFUL ... #define TPM_E_AUDITFAIL_SUCCESSFUL ... #define TPM_E_NOTRESETABLE ... #define TPM_E_NOTLOCAL ... #define TPM_E_BAD_TYPE ... #define TPM_E_INVALID_RESOURCE ... #define TPM_E_NOTFIPS ... #define TPM_E_INVALID_FAMILY ... #define TPM_E_NO_NV_PERMISSION ... #define TPM_E_REQUIRES_SIGN ... #define TPM_E_KEY_NOTSUPPORTED ... #define TPM_E_AUTH_CONFLICT ... #define TPM_E_AREA_LOCKED ... #define TPM_E_BAD_LOCALITY ... #define TPM_E_READ_ONLY ... #define TPM_E_PER_NOWRITE ... #define TPM_E_FAMILYCOUNT ... #define TPM_E_WRITE_LOCKED ... #define TPM_E_BAD_ATTRIBUTES ... #define TPM_E_INVALID_STRUCTURE ... #define TPM_E_KEY_OWNER_CONTROL ... #define TPM_E_BAD_COUNTER ... #define TPM_E_NOT_FULLWRITE ... #define TPM_E_CONTEXT_GAP ... #define TPM_E_MAXNVWRITES ... #define TPM_E_NOOPERATOR ... #define TPM_E_RESOURCEMISSING ... #define TPM_E_DELEGATE_LOCK ... #define TPM_E_DELEGATE_FAMILY ... #define TPM_E_DELEGATE_ADMIN ... #define TPM_E_TRANSPORT_NOTEXCLUSIVE ... #define TPM_E_OWNER_CONTROL ... #define TPM_E_DAA_RESOURCES ... #define TPM_E_DAA_INPUT_DATA0 ... #define TPM_E_DAA_INPUT_DATA1 ... #define TPM_E_DAA_ISSUER_SETTINGS ... #define TPM_E_DAA_TPM_SETTINGS ... #define TPM_E_DAA_STAGE ... #define TPM_E_DAA_ISSUER_VALIDITY ... #define TPM_E_DAA_WRONG_W ... #define TPM_E_BAD_HANDLE ... #define TPM_E_BAD_DELEGATE ... #define TPM_E_BADCONTEXT ... #define TPM_E_TOOMANYCONTEXTS ... #define TPM_E_MA_TICKET_SIGNATURE ... #define TPM_E_MA_DESTINATION ... #define TPM_E_MA_SOURCE ... #define TPM_E_MA_AUTHORITY ... #define TPM_E_PERMANENTEK ... #define TPM_E_BAD_SIGNATURE ... #define TPM_E_NOCONTEXTSPACE ... #define TPM_E_RETRY ... #define TPM_E_NEEDS_SELFTEST ... #define TPM_E_DOING_SELFTEST ... #define TPM_E_DEFEND_LOCK_RUNNING ...
/** * Add 10 stock for every inventory item. */ private void initStock(){ System.out.println("initStock"); String line = null; try{ FileReader fileReader = new FileReader(InventoryFile); BufferedReader bufferedReader = new BufferedReader(fileReader); while((line = bufferedReader.readLine()) != null) { String[] comp = line.split(","); inventory.addInventory(new IngredientItems(comp[0], Integer.parseInt(comp[1]))); } bufferedReader.close(); } catch(FileNotFoundException ex) { System.out.println( "Unable to Find file '" + InventoryFile + "'"); } catch(IOException ex) { System.out.println( "Error reading file '" + InventoryFile + "'"); } }
<filename>JavaCustomToolsFromLorenWang/src/javabase/lorenwang/tools/dataConversion/JtlwCodeConversionUtil.java<gh_stars>0 package javabase.lorenwang.tools.dataConversion; import org.jetbrains.annotations.NotNull; import javabase.lorenwang.tools.JtlwMatchesRegularCommon; /** * 功能作用:编码转换 * 创建时间:2020-12-12 9:34 下午 * 创建人:王亮(Loren) * 思路: * 方法: * 中文转unicode编码(chineseToUnicode) * unicode编码转中文(unicodeToChinese) * 注意: * 修改人: * 修改时间: * 备注: * * @author 王亮(Loren) */ public class JtlwCodeConversionUtil { private final String TAG = getClass().getName(); private static volatile JtlwCodeConversionUtil optionsInstance; private JtlwCodeConversionUtil() { } public static JtlwCodeConversionUtil getInstance() { if (optionsInstance == null) { synchronized (JtlwCodeConversionUtil.class) { if (optionsInstance == null) { optionsInstance = new JtlwCodeConversionUtil(); } } } return optionsInstance; } /** * 中文转unicode编码 * * @param dataStr 原始数据 * @return 转换后数据 */ public String chineseToUnicode(@NotNull String dataStr) { StringBuilder result = new StringBuilder(); String item; for (char cha : dataStr.toCharArray()) { item = Integer.toHexString(cha); result.append("\\u"); if (item.length() <= 2) { result.append("00"); } result.append(item); } return result.toString(); } /** * unicode编码转中文 * * @param dataStr 原始数据 * @return 编码后数据 */ public String unicodeToChinese(@NotNull String dataStr) { int start = 0; int end; for (String code : JtlwMatchesRegularCommon.getRegexResultList(dataStr, JtlwMatchesRegularCommon.EXP_CODE_CONVERSION_UNICODE, false)) { //16进制parse整形字符串 dataStr = dataStr.replace(code, String.valueOf((char) Integer.parseInt(code.substring(2), 16))); } return dataStr; } }
<gh_stars>10-100 /* * Copyright 2009-2017 Red Hat Inc. * * Licensed under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.pcp.parfait; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; public class MonitoringViewPropertiesTest { private String originalNameValue; private String originalClusterValue; private String originalIntervalValue; private String originalStartupValue; @Before public void setup() { // setProperty returns the old value of that property. originalNameValue = System.setProperty(MonitoringViewProperties.PARFAIT_NAME, "true"); originalClusterValue = System.setProperty(MonitoringViewProperties.PARFAIT_CLUSTER, "true"); originalIntervalValue = System.setProperty(MonitoringViewProperties.PARFAIT_INTERVAL, "true"); originalStartupValue = System.setProperty(MonitoringViewProperties.PARFAIT_STARTUP, "true"); } private void reset(String name, String value) { if (value == null) { System.clearProperty(name); } else { System.setProperty(name, value); } } @After public void teardown() { reset(MonitoringViewProperties.PARFAIT_NAME, originalNameValue); reset(MonitoringViewProperties.PARFAIT_CLUSTER, originalClusterValue); reset(MonitoringViewProperties.PARFAIT_INTERVAL, originalIntervalValue); reset(MonitoringViewProperties.PARFAIT_STARTUP, originalStartupValue); } @Test public void checkValidClusterSetting() { System.setProperty(MonitoringViewProperties.PARFAIT_CLUSTER, "123"); assertEquals("123", MonitoringViewProperties.getDefaultCluster("anyname")); } @Test public void checkDefaultClusterSetting() { System.clearProperty(MonitoringViewProperties.PARFAIT_CLUSTER); assertNotEquals("123", MonitoringViewProperties.getDefaultCluster("somename")); } @Test public void checkValidIntervalSetting() { System.clearProperty(MonitoringViewProperties.PARFAIT_INTERVAL); String interval = MonitoringViewProperties.getDefaultInterval(); System.setProperty(MonitoringViewProperties.PARFAIT_INTERVAL, "bad-do-not-modify"); assertEquals(interval, MonitoringViewProperties.getDefaultInterval()); } @Test public void checkInvalidIntervalSetting() { System.setProperty(MonitoringViewProperties.PARFAIT_INTERVAL, "13000"); assertEquals("13000", MonitoringViewProperties.getDefaultInterval()); System.clearProperty(MonitoringViewProperties.PARFAIT_INTERVAL); } @Test public void checkValidStartupSetting() { System.clearProperty(MonitoringViewProperties.PARFAIT_STARTUP); String startup = MonitoringViewProperties.getDefaultStartup(); System.setProperty(MonitoringViewProperties.PARFAIT_STARTUP, "bad-do-not-modify"); assertEquals(startup, MonitoringViewProperties.getDefaultStartup()); } @Test public void checkInvalidStartupSetting() { System.setProperty(MonitoringViewProperties.PARFAIT_STARTUP, "15000"); assertEquals("15000", MonitoringViewProperties.getDefaultStartup()); System.clearProperty(MonitoringViewProperties.PARFAIT_STARTUP); } }
from datetime import datetime from quickbats.config import CONFIG from quickbats.line_items import stripe_fee_line_item from quickbats.line_items import transaction_line_item from quickbats.line_items import vendor_unit_fee_line_item from quickbats.qbo import CreateReceipt from quickbats.shared import csv_rows from quickbats.shared import data_file from quickbats.shared import to_dec import json import logging logger = logging.getLogger("quickbats") def vbo_customer(qbo, row): display_name = "%s %s (%s)" % (row['First Name'], row['Last Name'], row['Email Address']) if display_name == " ()": display_name = "<NAME>" customer_attrs = {} else: customer_attrs = { "PrimaryEmailAddr" : row['Email Address'], "GivenName" : row['First Name'], "FamilyName" : row['Last Name'], "PrimaryPhone" : row['Phone'] } return qbo.find_or_create_customer(display_name, customer_attrs) def parse_vbo_transactions(qbo, payments): vbo_transactions_file = data_file("vbo", "transactions_file") advance_sale_item = qbo.get_item_by_name("General Admission - Advance") door_sale_item = qbo.get_item_by_name("General Admission - Door") subscription_sale_item = qbo.get_item_by_name("Ticket Subscription") vbo_fees_item = qbo.get_item_by_name("VBO Fees") stripe_fees_item = qbo.get_item_by_name("Stripe Fees (Ticketing)") credit_card_receivables_account = qbo.get_account_by_name("Stripe Receivables") qbo_class = qbo.get_class_by_name("3 Shows") for row in csv_rows(vbo_transactions_file, skip_rows=CONFIG['vbo']['header_row']): if row['ItemDescription'] == "Total:": logger.debug("reached end of records") break doc_number = "VBO-%s" % row['OrderID'] order_date = datetime.strptime(row['Orders'], "%m/%d/%Y %I:%M:%S %p") total = to_dec(row['Total']) price = to_dec(row['Price']) qty = to_dec(row['Qty']) vbo_fee = to_dec(row['VBOFee']) is_credit_card = to_dec(row['CreditCard']) > 0 or to_dec(row['Amex']) > 0 is_exchange = row['Other Name'] == 'Exchange' try: event_date = datetime.strptime(row['Event Date'], "%m/%d/%Y %I:%M:%S %p") except ValueError: logger.debug("invalid event date %s" % row['Event Date']) event_date = None logger.debug("doc number is %s" % doc_number) if order_date < CONFIG['app']['start_date']: logger.debug("skipping because before %s" % CONFIG['app']['start_date']) continue elif qbo.already_processed(doc_number): continue elif total == 0.0: logger.debug("skipping because comp") continue elif is_exchange: logger.debug("skipping because exchange") elif is_credit_card and (doc_number not in payments): msg = "fee information not available yet, skipping %s" % doc_number logger.info(msg) continue customer = vbo_customer(qbo, row) with CreateReceipt(qbo) as receipt: receipt.CustomerRef = customer.to_ref() receipt.DocNumber = doc_number receipt.TxnDate = order_date.strftime("%Y-%m-%d") notes = ["Imported via QBO API from VBO export."] if row['Notes']: notes.append(row['Notes']) note = u"\n".join(notes) receipt.PrivateNote = note is_door_sale = ("Door" in row['ItemName']) if is_door_sale: item = door_sale_item elif ("Advance" in row['ItemName']) or ("Admission" in row['ItemName']): item = advance_sale_item elif "Pack" in row['ItemName']: item = subscription_sale_item else: logger.error("\n%s" % json.dumps(row, sort_keys=True, indent=4)) raise Exception("can't identify product '%s'" % row['ItemName']) description = u"%s; %s" % (row['ItemDescription'], row['Event Name']) receipt.Line.append(transaction_line_item(price, description, qty, item, event_date, qbo_class=qbo_class)) if is_door_sale: # VBO fees are not added on top pass else: receipt.Line.append(vendor_unit_fee_line_item(-1 * vbo_fee, qty, vbo_fees_item, order_date, qbo_class=qbo_class)) if is_credit_card: stripe_fees_line = stripe_fee_line_item(payments, total, doc_number, stripe_fees_item, order_date, qbo_class=qbo_class) receipt.Line.append(stripe_fees_line) receipt.DepositToAccountRef = credit_card_receivables_account.to_ref()
/** * Decrypts the given bytes using a stream cipher into a new byte[]. */ protected static byte[] decryptBytes(StreamCipherCompat cipher, byte[] encBytes) { byte[] bytes = new byte[encBytes.length]; cipher.processStreamBytes(encBytes, 0, encBytes.length, bytes, 0); return bytes; }
/* ================================================================== * Converser.java - Jun 27, 2011 1:06:01 PM * * Copyright 2007-2011 SolarNetwork.net Dev Team * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of * the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA * 02111-1307 USA * ================================================================== * $Id: Converser.java 1703 2011-07-12 01:43:35Z shauryab $ * ================================================================== */ package net.solarnetwork.node.control.jf2.lata; import java.io.IOException; import net.solarnetwork.node.control.jf2.lata.command.Command; import net.solarnetwork.node.control.jf2.lata.command.CommandInterface; import net.solarnetwork.node.io.serial.SerialConnection; import net.solarnetwork.node.io.serial.SerialConnectionAction; import net.solarnetwork.node.io.serial.SerialUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Extension of {@link Converser} that initializes the LATA Bus prior to sending * commands. * * <p> * Serial parameters known to work on Linux using <code>/dev/USB</code>: * <p> * * <pre> * baud 4800 * data bits 8 * stop bits 1 * parity 0 * flow control -1 * receive threshold -1 * receive timeout -1 * receive framing -1 * dtr -1 * rts -1 * response timeout 60000 * </pre> * * @author shauryab */ public class LATABusConverser implements SerialConnectionAction<String> { private static Logger LOG = LoggerFactory.getLogger(LATABusConverser.class); private static final byte[] MAGIC = new byte[] { 'T' }; private static final int READ_LENGTH = 14; // e.g. T100000BD26464 private final CommandInterface command; /** * Construct with a specific command. * * @param command * the command */ public LATABusConverser(CommandInterface command) { super(); this.command = command; } @Override public String doWithConnection(SerialConnection conn) throws IOException { // sets the Reset Mode in the LATA Bus speakAndWait(conn, Command.StartResetMode); //sets the speed in the LATA Bus speakAndWait(conn, Command.SetSpeed); //sets the Operational Mode in the LATA Bus speakAndWait(conn, Command.StartOperationalMode); // drain the input buffer... the bus sometimes has stuff waiting around LOG.trace("Drain the input buffer", getCommand()); byte[] data = conn.drainInputBuffer(); LOG.trace("Drained buffer of {} bytes", data.length); LOG.trace("Sending command {}: {}", getCommand(), getCommand().getData()); conn.writeMessage(command.getCommandData()); if ( getCommand().includesResponse() ) { LOG.trace("Waiting for response", getCommand(), getCommand().getData()); data = conn.readMarkedMessage(MAGIC, READ_LENGTH); return (data == null ? null : new String(data, SerialUtils.ASCII_CHARSET)); } return null; } private void speakAndWait(SerialConnection conn, CommandInterface command) throws IOException { LOG.trace("Sending command {}: {}", command, command.getData()); conn.writeMessage(command.getCommandData()); synchronized ( this ) { try { this.wait(500); } catch ( InterruptedException e ) { // ignore } } } public CommandInterface getCommand() { return command; } }
/** * @file * @brief Driver to interface with the camera using OpenCV. * @copyright 2020, <NAME> Gesellschaft. All rights reserved. * @license BSD 3-clause */ #pragma once #include <robot_interfaces/sensors/sensor_driver.hpp> #include <trifinger_cameras/camera_observation.hpp> namespace trifinger_cameras { /** * @brief Driver for interacting with any camera using OpenCV. */ class OpenCVDriver : public robot_interfaces::SensorDriver<CameraObservation> { public: OpenCVDriver(int device_id); /** * @brief Grab a single frame along with its timestamp. * * @return Image frame consisting of an image matrix and the time at * which it was grabbed. */ CameraObservation get_observation(); private: cv::VideoCapture video_capture_; }; } // namespace trifinger_cameras
#ifndef STACK_H #define STACK_H #include <Windows.h> extern const void * STACK; struct Stack{ void *internals; void (*Push)(struct Stack *__ptrStack, void * __newValue); void *(*Pop)(struct Stack *__ptrStack); //read top item without popint void *(*Read)(struct Stack *__ptrStack); struct Stack *(*init)(struct Stack *__ptrInput, size_t __size); }; #endif STACK_H
“Congressman Dennis Kucinich (D-OH) today challenged the notion that removing ‘combat brigades’ but leaving 50,000 U.S. troops in Iraq constitutes an end to combat operations, let alone an end to the war,” a press release sent to RAW STORY on Thursday stated. The press release continues: “Who is in charge of our operations in Iraq , now? George Orwell? A war based on lies continues to be a war based on lies. Today, we have a war that is not a war, with combat troops who are not combat troops. In 2003, President Bush said ‘ Mission Accomplished ‘ . In 2010, the White House says combat operations are over in Iraq , but will leave 50,000 troops, many of whom will inevitably be involved in combat-related activities. “Just seven days ago, General Babaker Shawkat Zebari, the commander of Iraq ’s military, said that Iraq ’s security forces will not be trained and ready to take over security for another 10 years. One story is being told to the military on the ground in Iraq and another story is being told to their families back home. “You can’t be in and out at the same time. “This is not the end of the war; this is simply a new stage in the campaign to lull the American people into accepting an open-ended presence in Iraq . This is not an honest accounting to the American people and it diminishes the role of the troops who will put their lives on the line. This is not fair to the troops, their families or the American people. “The Administration and the Pentagon would be wise to level with the American people about our long-term commitment to Iraq . “The cost of the wars has been estimated to be around $1 million per soldier per year. Each year the troop levels stay at 50,000 means another $50 billion is wasted. I object to spending billions of dollars to maintain a charade in Iraq while our own economy is failing and over 15 million Americans are out of work. I object to keeping any level troops in Iraq to maintain a war based on lies. It is time that Congress sees through the manipulation and finally acts to truly end the war by stopping its funding,” said Kucinich. Kucinich’s statement doesn’t mention President Obama’s name once, but the president also didn’t don a military jumpsuit and fly a plane onto a carrier with a gigantic “Mission Accomplished” banner. Many of the top liberal blogs who have criticized Obama the past year went silent on the Iraq “exit” coverage (perhaps some are on August vacation). Aside from Kucinich, RAW STORY was only able to find a scathing editorial on the World Socialist Web Site. The White House and the Pentagon, assisted by a servile media, have hyped Thursday’s exit of a single Stryker brigade from Iraq as the end of the “combat mission” in that country, echoing the ill-fated claim made by George W. Bush seven years ago. Obama is more skillful in packaging false propaganda than Bush, and no doubt has learned something from the glaring mistakes of his predecessor. Bush landed on the deck of the US aircraft carrier Abraham Lincoln on May 1, 2003 to proclaim—under a banner reading “Mission Accomplished”—that “major combat operations” in Iraq were over. A captive audience of naval enlisted personnel was assembled on deck as cheering extras. Obama wisely did not fly to Kuwait to deliver a similar address from atop an armored vehicle. He merely issued a statement from the White House, while leaving the heavy lifting to the television networks and their “embedded” reporters, who accompanied the brigade across the border into Kuwait and repeated the propaganda line fashioned by the administration and the military brass. Three years after former President George W. Bush declared “Mission Accomplished” on an aircraft carrier, MSNBC anchor Keith Olbermann proceeded to mock the early propagandistic call by announcing each successive night on his Countdown show that it has been “one thousand and blank” days since the the war in Iraq “ended,” RAW STORY noted yesterday. Chances are, three years from now, even if US troops are still caught up in a quagmire in Iraq, Olbermann won’t be doing a similar signoff schtick to mock the coverage that ran on NBC and MSNBC Wednesday evening. At The New York Times Media Decoder blog,, Brian Stelter reported, “The combat mission in Iraq doesn’t officially end until Aug. 31 but viewers and readers could be forgiven for thinking it ended tonight.” In a broadcast that Brian Williams said constituted an “official Pentagon announcement,” NBC showed live pictures Wednesday night as members of the last combat brigade in Iraq drove toward the Kuwait border, symbolizing an end to fighting in the country. “We are with the last combat troops” in Iraq, the NBC correspondent Richard Engel said at 6:30 p.m. Eastern, the same time that the military lifted an embargo that had been placed on the reporters traveling with the 440 troops, a part of the 4/2 Stryker Brigade. The Associated Press, Fox News, The Los Angeles Times, The Washington Post, Al Jazeera and other news media outlets also reported Wednesday evening that the last combat troops were crossing into Kuwait. Only NBC broadcast it live, in asymmetrical image to the invasion that captured the nation’s attention on television seven years ago. Coverage by most media outlets on the “last combat brigade” leaving Iraq paint an almost rosy picture with their headlines, which suggest that not only will the close to 60,000 troops left behind not be fighting anyone, but that there is no chance of any future surge. “As the United States military prepares to leave Iraq by the end of 2011, the Obama administration is planning a remarkable civilian effort, buttressed by a small army of contractors, to fill the void,” the New York Times reports. However, the Associated Press and many liberal blogs instead chose to criticize Fox News for not covering the “exit” with the same gusto. Perhaps another network could have covered the extensive coverage MSNBC was provided by the Pentagon instead.
// readHeader reads a block of SMTP or MIME headers and returns a textproto.MIMEHeader. // Header parse warnings & errors will be added to p.Errors, io errors will be returned directly. func readHeader(r *bufio.Reader, p *Part) (textproto.MIMEHeader, error) { buf := &bytes.Buffer{} tp := textproto.NewReader(r) firstHeader := true for { s, err := tp.ReadLineBytes() if err != nil { cause := errors.Cause(err) if cause == io.ErrUnexpectedEOF && buf.Len() == 0 { return nil, errors.WithStack(errEmptyHeaderBlock) } else if cause == io.EOF { buf.Write([]byte{'\r', '\n'}) break } return nil, err } firstColon := bytes.IndexByte(s, ':') firstSpace := bytes.IndexAny(s, " \t\n\r") if firstSpace == 0 { buf.WriteByte(' ') buf.Write(textproto.TrimBytes(s)) continue } if firstColon == 0 { p.addError(ErrorMalformedHeader, "Header line %q started with a colon", s) continue } if firstColon > 0 { if !firstHeader { buf.Write([]byte{'\r', '\n'}) } s = textproto.TrimBytes(s) buf.Write(s) firstHeader = false } else { if len(s) > 0 { buf.WriteByte(' ') buf.Write(s) p.addWarning(ErrorMalformedHeader, "Continued line %q was not indented", s) } else { buf.Write([]byte{'\r', '\n'}) break } } } buf.Write([]byte{'\r', '\n'}) tr := textproto.NewReader(bufio.NewReader(buf)) header, err := tr.ReadMIMEHeader() return header, errors.WithStack(err) }
Bile accelerates carcinogenic processes in pancreatic ductal adenocarcinoma cells through the overexpression of MUC4 Pancreatic cancer (PC) is one of the leading causes of mortality rate globally and is usually associated with obstructive jaundice (OJ). Up to date, there is no clear consensus on whether biliary decompression should be performed prior to surgery and how high levels of serum bile affects the outcome of PC. Therefore, our study aims were to characterise the effect of bile acids (BAs) on carcinogenic processes using pancreatic ductal adenocarcinoma (PDAC) cell lines and to investigate the underlying mechanisms. Liquid chromatography-mass spectrometry was used to determine the serum concentrations of BAs. The effects of BAs on tumour progression were investigated using different assays. Mucin expressions were studied in normal and PDAC cell lines and in human samples at gene and protein levels and results were validated with gene silencing. The levels of BAs were significantly higher in the PDAC + OJ group compared to the healthy control. Treating PDAC cells with different BAs or with human serum obtained from PDAC + OJ patients enhanced the rate of proliferation, migration, adhesion, colony forming, and the expression of MUC4. In PDAC + OJ patients, MUC4 expression was higher and the 4-year survival rate was lower compare to PDAC patients. Silencing of MUC4 decreased BAs-induced carcinogenic processes in PDAC cells. Our results show that BAs promote carcinogenic process in PDAC cells, in which the increased expression of MUC4 plays an important role. Based on these results, we assume that in PC patients, where the disease is associated with OJ, the early treatment of biliary obstruction improves life expectancy. Scientific Reports | (2020) 10 Pancreatic cancer (PC) is associated with extremely poor survival and high mortality rate. Currently, PC is the seventh leading cause of cancer-related deaths worldwide 1 . One of the most common reasons for the poor clinical outcome is the lack of specific symptoms; as a result, approximately 80% of patients are diagnosed at an advanced stage, when most of them are inoperable . The most common form of PC is pancreatic ductal adenocarcinoma (PDAC), which is responsible for approximately 90% of cases 6 . Most of the PDAC arises from ductal cells in the head of the pancreas. As tumour grows, it prevents the flow of bile and, as a result, obstructive jaundice (OJ) develops. Elevated serum levels of bile acids (BAs) influence the function of several organs; they have also proved to have tumorigenic potential in both gastrointestinal and breast cancer 7 . Although surgical intervention is widely regarded as the most effective way for the treatment of PC 3,8 , the use of preoperative biliary stenting is often the basis for debate and it usually takes time to make a decision . Moreover, there is no consensus regarding the role of BAs in the initiation and progression of PC 12 . Some studies indicate that BAs inhibit the proliferation of PC cells due to their cytotoxic properties 13,14 , while others found that BAs promote tumour development and progression by increasing the expression of COX-2 or mucins . In recent years, considerable attention has been paid to the diagnostic use of mucins in PC. Twenty-one mucin genes have been identified in humans and, among them, MUC1, -4 and -5AC proved to be potential biomarkers to assess the progression of PC. These genes are mainly overexpressed in PC, play role in tumour cell growth and associate with a poor outcome for PC patients 16, . Several studies indicate that BAs play an extensive role in tumour progression by altering the expression of mucins 17, . In the oesophagus, BAs upregulate mucin expression, in which phosphatidylinositol 3-kinase and nuclear factor-κB (NK-κB) signalling pathways play a role 17,23,24 . The role of NK-κB in bile-induced mucin expression has also been implicated in gastric epithelial cells 25 . In contrast, there has not been in-depth study pertaining to the pancreas; thus, this study aims (i) to investigate how BAs treatment affect the behaviour of PDCA cells and ii) to identify the mechanisms that mediate the effects of BAs. We have shown that BAs increase the tumorigenic potential of PDAC cells, through the overexpression of MUC4. In addition, we investigated the expression of MUC4 in human PC samples and identified a relation between the presence of OJ and increased expression of MUC4. Moreover, we have found that the 4-year overall survival rate of the PDAC + OJ patients was significantly poorer than that of the PDAC patients. Taken together our results show that bile accelerates carcinogenic processes, which can be of great importance in the therapy of PC. Materials and methods Ethical aspects. The clinical part of the study was carried out with the approval of the Ethics Committee of the University of Szeged (No.: 4714), followed by the EU Member States' Directive 2004/23/EC on presumed consent practice for tissue collection, the guidelines of the Helsinki Declaration and GDRP. Written informed consent was obtained from all patients and healthy volunteers for sample and data collection. Pathological characterisation of the patients. Serum levels of BAs were investigated in PDAC patients with OJ (average age: 72.6 ± 9.8; male/female ratio: 5/5) or without OJ (average age: 80 ± 2.5; male/female ratio: 2/3) and in healthy volunteers (average age: 40.9 ± 18.77; male/female ratio: 6/8). In all groups, BAs were identified in fasting serum samples. See Table 1 for the details of patients. We performed immunohistochemistry on pancreatic samples obtained from 65 patients. These patients have been classified into the following groups: (1) Cell adhesion assay. We coated 96-well tissue plates with 40 µg/ml type 1 collagen from rat-tail in PBS at 4 °C. Next, we added 100 µl of cell suspension (10 5 cells/ml) to each of the coated wells and incubated the cells at 37 °C for 20 min to allow them to adhere to the surface. After washing, the cells were incubated with BAs and 10 µl of MTT substrate was added to each well. MTT-treated cells were then lysed in DMSO and absorbance was measured using a FLUOstar OPTIMA Spectrophotometer (BMG Labtech, Ortenberg, Germany) at 560 nm with background subtraction at 620 nm. Proliferation. For proliferation, 100 µl of cell suspension was seeded into a 96-well plate (5 × 10 3 cells/well), then the cells were incubated with BAs. After the treatments, 10 µl of CCK8 solution was added to each well and the cells were incubated for further 3 h. We measured absorbance at 450 nm using a FLUOstar OPTIMA Spectrophotometer (BMG Labtech, Ortenberg, Germany). Cytotoxicity assay. For cytotoxicity assay, 100 µl of cell suspension was seeded into a 96-well plate (2 × 10 4 cells/well) and allowed to adhere overnight. On the following day, the cells were incubated with BAs then 100 µl supernatant from each of the wells was carefully transferred into a new 96-well plate containing 100 µl reaction mixture. We then measured lactate dehydrogenase (LDH) activity at 490 nm using a FLUOstar OPTIMA Spectrophotometer (BMG Labtech, Ortenberg, Germany). For background controls, we measured 200 µl assay medium, without cells. For low controls, we used 100 µl cell suspension and 100 µl assay medium. In the case of high controls, the mixture of 100 µl cell suspension and 100 µl Triton-X 100 (0.1%) solution was measured. The LDH release induced by Triton-X 100 was assigned to 100%. The average absorbance values of each of the triplicates were calculated and the average value of the background control (LDH activity contained in the assay medium) was subtracted from each of the samples to reduce background noises. We then calculated the per- www.nature.com/scientificreports/ centage of cytotoxicity using the following formula: Cytotoxicity (%) = (exp. value-low control/high control-low control)*100. Low control determines the LDH activity released from the untreated normal cells (spontaneous LDH release), whereas high control determines the maximum releasable LDH activity in the cells (maximum LDH release). Wound healing assay. Cells were seeded onto 24-well cell culture plates in a 2.5 × 10 5 cell density and allowed to adhere overnight. On the following day, the confluent monolayer was gently scratched using P2 tips. Only the wells containing even-sided and sharp-edged wounds were used for experiments. After gentle washing with the complete medium, we added BAs to the wells. We carried out automated time lapse imaging using an Olympus IX83 inverted microscope with Olympus ScanR screening platform (Olympus, Japan) upgraded with OKOLAB incubator system (with gas, temperature, and humidity controller; Pozzuoli, NA, Italy). Digital images were analysed by Image J. Clonogenic assay. Capan-1 and BxPC-3 cells (10 3 cells/well) were seeded onto 6-well cell culture plates and allowed to adhere overnight. On the following day, the cells were treated with BAs then the normal media was given back. The cells were allowed to grow until day 9 after which the media was removed, and the cells were washed with PBS, fixed with methanol-ethanol solutions (3:1 dilution) and then stained with Giemsa. Olympus IX83 microscope-based screening platform was used for image acquisition and the Olympus Cellsense Dimension software was used for automated object detection, classification and measurement to enumerate colonies organised by the treated and untreated cells. Invasion assay. For the invasion assay Matrigel-coated transwell inserts were used. 200 µl cells (~ 2.5 × 10 5 / ml in serum-free medium) were added into the inserts whereas the lower chambers contained 750 µl complete medium with or without BAs. Cells were than incubated at 37 °C for 24-72 h in 5% CO 2 in a humidified incubator. Cells that migrated to the bottom surface were fixed in formaldehyde (3.7% in PBS) for 5 min, permeabilized with 100% methanol and stained with Giemsa dye for 30 min. The non-invading cells on the upper surface of the membrane were gently scraped off using a cotton swab. Invasion was quantified by counting the average number of invaded cells in five different microscopic fields in each treatment. Percent invasion was calculated from the mean of the average number of invaded cells obtained from 3 independent experiments. siRNA silencing. MUC4 expression was silenced transiently, using MUC4-targeted siRNA oligonucleotides. Transfection was performed with Oligofectamine™ Transfection Reagent following the manufacturer's instructions. We then plated 2 × 10 5 cells per well onto 6-well plates a day before the transfection. At 50-60% confluency, the MUC4-targeted siRNAs were transfected and the cells were incubated for 72 h. MUC4 mRNA and protein levels were assessed by RT-PCR and immunocytochemistry, respectively. RT-PCR. The total RNA was isolated from the cells using the NucleoSpin RNA Kit (Macherey-Nagel, Düren, Germany). Two micrograms of RNA were reverse-transcripted using the High-Capacity cDNA Reverse Transcription Kit (Applied Biosystems, Foster City, USA). Real-time PCR reactions of samples were performed with TaqMan RT-PCR assays (Supplementary Table S1) from Thermo Fisher Scientific (Darmstadt, Germany). Reactions were carried out with ABI PRISM 7000 Sequence Detection System (Applied Biosystems, Foster City, CA, USA) platform with the following conditions: 10 min initial denaturation at 95 °C, followed by 40 steps cycles: 15 s at 95 °C and 1 min at 60 °C. Fluorescein dye (FAM) intensity was detected after each cycle. All the samples were run in triplicates and non-template control sample was used for each PCR run to check the primer-dimer formation. The expression level of the gene of interest was normalised to the human β-actin (Actb) housekeeping gene (ΔCT), and then relative gene expression ratios were calculated using the ΔΔC T method as previously described 26,27 . The results were expressed as fold changes (2 −ΔΔCT ). Genes with expression values less than or equal to 0.5 were considered to be down-regulated, whereas the values higher than or equal to 2 were considered to be upregulated. Values ranging from 0.51 to 1.99 were not considered to be significant. Immunostainings. Immunocytochemistry (ICC) was performed using cytospin preparation during which 100 µl (2 × 10 6 cells/ml) of cell suspension was added to 100 µl of neutral formalin buffer and incubated for 5 min. After the incubation, 100 µl from this mixture were spin (Shandon Cytospin3, Marshall Scientific, Cambridge, MA, US) to an Ultra Plus Microscope Slide (Thermo Fisher Scientific, Darmstadt, Germany). Pre-treatment was carried out with heat-induced epitope retrieval procedure using PT Link (Autostainer Link 48 www.nature.com/scientificreports/ serum to reduce non-specific binding than anti-vimentin primary antibody (1:100 dilution) was added to the chambers and slides were incubated overnight in moist atmosphere at 4 °C. Chamber slides were then washed with PBS and incubated with TexasRed-conjugated anti-mouse secondary antibody (1:400 dilution) for 60 min at RT. Nuclei were counterstained with Hoechst 33342. Slides were then mounted and observed by a Fluowiew 10i-W confocal microscopy (Olympus, Budapest, Hungary). In the human pancreatic samples, MUC4 expression was analysed using formalin-fixed, and paraffin-embedded tissue specimens were obtained from patients. Control tissues (n = 4) were collected from the tumour-free region of the pancreas of patients with NE tumour. Briefly, 3 to 4 µm thick sections of section specimens were deparaffinised in xylene and rehydrated in graded ethanol. The diagnosis was assessed by a pathologist after staining the sections with haematoxylin-eosin-saffron. Immunohistochemistry (IHC) was performed as described above, but the slices were incubated with the primary MUC4 antibody for 60 min. Quantification of MUC4 expression was evaluated using the method described by Rachagani et al. 28 . Statistical analysis. Quantitative variables were described as means ± SE. Significant differences between groups were performed by ANOVA, p ≤ 0.05 were accepted as significant. Survival curves were prepared using the method of Kaplan and Meier, and differences in survival were studied by the Log-rank test. Results Serum levels of bile acids in PDAC patients. The total serum bile acid (TSBA) concentration in healthy controls was 401.3 ± 35.38 ng/ml, whereas in PDAC + OJ patients it increased tremendously (36,055.7 ± 2182.2 ng/ ml; Fig. 1A). Analysis of individual BAs has shown higher concentrations of GCA, TCA, GCDCA and TCDCA in the serum of PDAC + OJ patients. Interestingly, TCA was completely absent in healthy control, but increased dramatically in PDAC + OJ. Serum levels of TDCA were low in controls and could not be detected in PDAC + OJ patients (Fig. 1A). In PDAC patients without OJ, the TSBA concentration was 733.9 ± 118.7 ng/ml. Table 1 shows the clinicopathological characteristics and the level of BAs in human serum. Bile acids play a key role in the progression of PC. In the next step, we treated Capan-1 cells for 24, 48 and 72 h with serum obtained from PDAC patients (with or without OJ) and healthy volunteers (normal). Treatment with human serum induced a changed morphology and growth characteristic of the cells, therefore, we examined whether this altered morphology is associated with epithelial-mesenchymal transition (EMT). Vimentin is a structural protein that is expressed in mesenchymal cells but not in epithelial cells. In the case of PDAC + OJ a strong positive staining for vimentin was detected (Fig. 1B). In the PDAC group, only a slight staining was observed, whereas the control and the normal groups were completely negative for vimentin. These data indicate that BAs have a prominent role in the progression of cancer. To confirm this hypothesis, we supplemented PDAC serum with 0.5 mM BAs cocktail (BAC). The concentration and composition of BAC were selected on the basis of serum BAs measurements. Supplementation of PDAC serum with BAC resulted in similarly strong vimentin staining as observed for PDAC + OJ. As a positive control gastric myofibroblast were used. Moreover, we investigated proliferation, viability, and adhesion capability of the cells. As expected, serum from PDAC patients increased the rate of proliferation, adhesion and survival of Capan-1 cells compared to the normal serum (Fig. 1C-E.) Importantly, there was also a significant difference between the effect of serum from PDAC patients and that of PDAC + OJ patients, suggesting a specific role of BAs in PC pathogenesis. (Fig. 2A) Incubation of Capan-1 and BxPC-3 cells with BAs, increased the rate of proliferation almost in all treated groups (Fig. 2B). Among BAs, the effect of TDCA was dose-dependent especially at 24 h; it suppressed proliferation of the cells (0.83 ± 0.06) at a low concentration (100 µM), and increased it (1.64 ± 0.02) at a high concentration (500 µM), depending on time. Binding of cells to extracellular matrix plays an important role in survival of cells and determines the progression and outcome of PC. Subsequently, we have investigated the effect of BAs treatment on the adhesion of Capan-1 and BxPC-3 cells to collagen 1. As shown in Fig. 2C, adhesion of cells increased with the incubation time, mainly at high doses of TCDCA-treated group. BAs treatment also promoted the invasion of Capan-1 and BxPC-3 cells, as demonstrated on Fig. 2D,E. We have also investigated the metastatic potential of cancer cells using the wound healing assay. Treatment with BAs, especially high concentration of TCDCA (500 µM), significantly increased the migration rate of both Capan-1 and BxPC-3 cells (Fig. 2F). Next, we have investigated the ability of Capan-1 and BxPC-3 cells to form colonies using the clonogenic assay. Figure 3A shows a representative picture regarding the effect of TCDCA at high (500 µM) concentration. These pictures and the summary bar chart (Fig. 3B) clearly show that high concentration of BAs increase the colony forming ability of the cells especially at 72 h. We have also investigated the size of the colony in differently treated groups (Fig. 3C-E). Furthermore, we have distinguished the colonies according to the following criteria: small: 1000-10,000 µm 2 , medium: 10,000-20,000 µm 2 , large: 20,000-30,000 µm 2 and extra-large: 30,000-100,000 µm 2 . In the case of small colonies, a number of colonies were significantly higher in the non-treated, control group, compared to the BA-treated groups. Medium-sized colonies did not show any difference between the BA-treated and control groups. In contrast, BA treatment significantly increased the number of colonies in the large and www.nature.com/scientificreports/ extra-large groups, indicating that BAs induce the formation of large and extra-large colonies both in the Capan-1 (Fig. 3D) and BxPC-3 ( Fig. 3E) cells, an action that promotes larger tumour tissue development. Expression of mucin genes in pancreatic ductal cell lines. Our results clearly demonstrate that BAs accelerate tumour processes; thus, we aimed to identify the mechanism that mediates the effects of BAs. Mucins are glycoproteins whose significance has been identified in many cancer types. To examine whether BAs are acting through the altered mucin expression, we investigated the effect of BAs on mucin expression. First, we studied the mRNA expression of mucin genes in HPDEC, Capan-1 and BxPC-3 cells using RT-PCR and TaqMan primer-probe sets, specific for mucin genes (Suppl . Table S1). We have investigated those genes (MUC1, -2, -4, -5AC, -5B, -12, -13, - 15 -17, -19 and -20), which are proved to play a central role in gastrointestinal tumours, and TaqMan probe sets were available for them. In the normal cell line, the presence of MUC1, -2, -17 and -20 was shown (Table 3). In the case of Capan-1, expressions of MUC1, -4, -5AC, -5B, -13, -17 and -20 were observed, whereas in the BxPC-3 cells the presence of MUC1, -2, -4, -5AC, -5B and -13 was detected. Mucin expressions were also tested in two other PDAC cell lines, PANC-1 and MIAPaCa-2. Interestingly, much less mucin genes were detected in these cell lines ( Table 3). The mucin genes used as a biomarker in PC, such as MUC4, -5AC and -5B, are expressed only in Capan-1 and BxPC-3 cells. The expression of mucin genes is summarised in Table 3. Effect of bile acids on mucin expression. In the next step, we pre-treated the cells with various BAs (100 and 500 µM) for 24, 48 and 72 h and the mRNA expression of mucin genes was investigated by RT-PCR. In the normal cell line, long time incubation with the BAs decreased the expression of MUC1 and -2 in most of the treated groups (Suppl. Fig. S1A). In contrast, all of the investigated BAs dose-dependently increased the expression of MUC20 (Suppl. Fig. S1A). Treatment with BAs did not affect the expression of the other genes (data not shown). In the Capan-1 cell line, BAs treatment dose-and time-dependently upregulated the expression of MUC4 (Fig. 4A). Among the BAs, the highest effect has the conjugated forms of DC and CDC acids. In contrast, GCA and TCA induced significant increase only at higher concentrations. Interestingly, TCDCA induced a robust increase (approx. fivefold compared to the control) in the expression of MUC17 at a high concentration (500 µM), at all three incubation times (Supp. Fig. S1B). The expression of the other genes did not change www.nature.com/scientificreports/ significantly in most of the groups (Supp. Fig. S1B). Similarly to the Capan-1 cells, BAs treatment increased the expression of MUC4 in the BxPC-3 cell line (Fig. 4A), however it did not or hardly affect the expression of the other genes (Suppl. Fig. S1C). MUC4 has been shown to be aberrantly expressed in PC; it promotes metastasis, and it is used as a prognostic factor; thus, we investigated the expression of this gene also at a protein level. Using immunostaining, we have shown that, similar to the RT-PCR data, pre-treatment with BAs time-and dosedependently increased the protein expression of MUC4 in both PDAC cell lines (Fig. 4B,C). Hierarchical clustering of genes showed that TDCA, TCDCA, GCDCA and GCA (in high concentration) initiated similar MUC gene expression level changes in both cell lines and formed a separated cluster from the other BAs. The expression pattern of MUC2, -4 and -17 has changed more pronouncedly than the other genes Table 3. mRNA expression of mucin genes in the different pancreatic ductal cell lines. Isoforms Capan-1 BxPC-3 MiaPaca-2 Panc-1 HPDEC www.nature.com/scientificreports/ upon BAs treatments, which suggest that these genes are more sensitive to BAs. Deeper analysis focusing on just the Capan-1 cell line showed that MUC4 pattern changed only after 48 h of the BAs treatment (Fig. 4D). Expression of MUC4 in human pancreatic samples. The presence of MUC4 has also been investigated in human pancreatic samples by IHC. In the normal pancreas and in NE, there was no detectable staining for MUC4 (Fig. 5A). In contrast, in the case of PDAC (with or without OJ), we observed a strong expression of MUC4 in the intra-and interlobular ducts. Interestingly, in those patients where PDAC was diagnosed without OJ, the expression of MUC4 was significantly low compared to the PDAC + OJ group. (Fig. 5B) There was no significant difference in gender, age, location of primary tumour, histological type, stage, lymphatic invasion or metastasis between the PDAC and PDAC + OJ groups (Table 2). In addition, in the PDAC + OJ group the expression of MUC4 increased with the progression of the disease, whereas in the PDAC group, there was no difference in the expression of MUC4 between the early and advanced stages. Quantification of the staining has been shown in Fig. 5B. We also examined how high serum levels of bile affects the outcome of PC. The 4-year overall survival rate of the PDAC + OJ group was significantly lower than that of the PDAC group (p = 0.0191) (Fig. 6). Knockdown of MUC4 decreases the carcinogenic effect of BAs. Next, we have investigated the effect of MUC4 knockdown on the proliferation of Capan-1 and BxPC-3 cells. MUC4 was silenced by MUC4specific siRNA. The efficiency of MUC4 knockdown was confirmed by RT-PCR (Fig. 7A) and ICC (Fig. 7B). We found that knockdown of MUC4 significantly increased cell death and decreased the rate of proliferation, adhesion, migration and colony forming in a time-dependent manner. (Fig. 7C-G) These results indicate that MUC4 is key mucoprotein in the growth of PDAC cells. In the next step we tested the effect of BAs on the MUC4-silenced cells. Among BAs, the effect of TCDCA was investigated, as this BA showed the greatest effect on both Capan-1 and BxPC-3 cells. When TCDCA was added in the absence of MUC4, an increase in the abovementioned parameters has been observed, although it was still significantly lower than in the presence of MUC4, indicating that the effect of BAs is mediated by MUC4, although other factors also play a role in it (Fig. 7C-G and Suppl. Fig. S2.). www.nature.com/scientificreports/ Discussion Since most of the PCs develop in the head of the pancreas, PDAC is frequently associated with increased levels of BAs in the serum; however, the effect of bile on PC progression has not been evaluated yet. In this study, we used two PDAC cell lines to show that BAs promote carcinogenic processes in which expression of MUC4 plays a huge role. We have shown that the serum levels of BAs extremely increase in PDAC + OJ patients and the most abundant BAs are GCA, TCA, GCDCA and TCDCA. In order to investigate how elevated serum bile influences carcinogenic processes, a PDAC cell line, Capan-1 was treated with serum obtained from PDAC patients. Capan-1 is www.nature.com/scientificreports/ one of the most aggressive commercially available cell line; therefore, it proved to be a good model for the characterisation of PC progression 30 . High concentration of bile in the serum enhanced the tumorigenic potential of Capan-1 cells and also promoted EMT, indicating that BAs play a prominent role in the pathomechanism of PC. Previous studies indicated that the structure (number of -OH groups or the conjugation with glycine or taurine) of individual BAs determines their carcinogenic effect 31 . Moreover, the studies show that hydrophobic bile acids are mostly toxic to cells, by generating oxidative stress and DNA damage, while hydrophilic bile acids play a protective role 32 . In this study, we focused on those BAs that we detected in the serum of PDAC patients and literature data also confirm their altered concentrations in both the serum and pancreatic juice of PC patients 12,15,16 . In terms of cell survival, the normal and PC cells reacted differently to BAs. In normal cells, the higher rate of cell death was observed, especially after 48 h of BAs treatment, which indicates that, under normal conditions, the ductal cells respond by cell death to this noxious agent. Similar results have been shown on isolated, guinea pig pancreatic ducts, where the treatment of ducts with high dose (1 mM) of CDCA damaged the mitochondria and induced apoptosis in the ductal cells 32 . The apoptotic effect of BAs on normal epithelial cells has also been demonstrated in hepatocytes and in oesophageal and nasopharyngeal epithelial cells . We hypothesised that the bile-induced cell death in the normal cells is an anti-cancer defence, by which the malignant transformation of the cells can be avoided. In contrast, cancer cells were more resistant to BAs treatment. Long-term incubation of Capan-1 and BxPC-3 cells with BAs increased their survival, which was consistent with the increased proliferation rate of these cells. The different response of normal and PDAC cells to BAs treatment can be explained by the fact that BAs are more likely to induce DNA damage than apoptosis in cancerous cells. Since gene mutations are more frequent in the damaged DNA, this favours the tumour progression 36 . In contrast, some studies have found that BAs treatment inhibit the proliferation of pancreatic cancer cell lines (PANC-1 and MIAPaCa-2) due to the cytotoxic effects of BAs 13,14 . In these studies, relatively low concentrations (< 50 µM) of BAs were used and that might cause the difference. This is also proved by the fact that, among the BAs we investigated, the effect of TDCA was dose-dependent. High concentration of this BA promoted proliferation, and low concentration strongly inhibited it. The dose-dependent effect of the unconjugated form of TDCA has also been shown on colonocyte's 37 and in gastric and oesophageal carcinoma 38,39 ; however, the exact explanation is unknown. In addition, we have found that the adhesion, invasion, migration and colony forming ability of Capan-1 and BxPC-3 cells increased due to the BAs treatment, indicating that BAs enhance both the migratory and cell growth potential of PDAC cells. In the following, we wanted to identify the mechanism by which BAs exert their effects. Mucins can be found throughout the whole body, where they provide the hydration and lubrication of the mucosal surfaces and their pivotal role in different cancer types is generally known 40 . Depending on the tissue type, some of the genes act as a tumour suppressor and some of them promote tumour development 43,44 . MUC5AC, -5B and -13 are absent in normal pancreas, but can be detected in pancreatic intraepithelial neoplasia and PDAC 45 . The role of MUC17 is controversial. Some data indicate that MUC17 decreases the tumorigenic potential of PDAC cells 46 , whereas others have found that this gene is aberrantly expressed in PC 47,48 . In the normal cell line, BAs treatment decreased the expression of MUC2, and upregulated MUC20. Since MUC2 is a tumour suppressor, whereas overexpression of MUC20 favours tumour progression, these data indicate that BAs facilitate tumour development under normal conditions, by altering the expression of these mucins. In contrast, the expression of other, oncogenic mucins, such as MUC4, did not change due to the BAs treatment. In the Capan-1 and BxPC3 cell lines, BAs induced changes in the expression of MUC4 and at least two days of BAs treatment were needed to detect changes in its expression pattern. The expression of MUC17 was only affected by high concentration of TCDCA in the Capan-1 cells and it could be detected 24 h after the treatment. Using human pancreatic samples, we showed that MUC4 was completely abolished from the normal pancreatic tissue and also in NE. In contrast, strong expression was detected in PDAC, which further increased in PDAC + OJ. To exclude that elevated MUC4 levels can be explained by the more advanced stages of PDAC + OJ patients, we compared MUC4 expressions both at the early and late stages of PC. Expression of MUC4 increased with the disease progression in the PDAC + OJ group, but not in the PDAC group, indicating that the elevated level of MUC4 is due to the specific action of BAs. We also found that the presence of biliary obstruction was related to poor survival of the PDAC + OJ patients. Several studies have revealed that overexpression of MUC4 is associated with a poor clinical outcome and this gene has been reported to be an independent prognostic factor in PC . In order to clarify the role of MUC4 in the bile-induced cancer progression, we down-regulated MUC4 by siRNA transfection and found that MUC4 act as an oncogenic mucin. The oncogenic potential of MUC4 is not surprising since silencing of MUC4 decreases the proliferation of many cancer cells. Li et al. have shown that 96 h after the transfection with shRNA, lentivector for MUC4 decreased the cell growth of BxPC-3 cells, both under in vitro and in vivo conditions 42 . Similar results have been found in other pancreatic cancer cell lines 18,41,53,54 . We have also demonstrated that inhibition of MUC4 expression significantly decreased the effect of TCDCA, one of the most effective BAs, indicating that the tumorigenic effect of bile is mediated by MUC4. Figure 8 shows a hypothetic schematic figure regarding the role of BAs in PC progression. BAs induce cell death in normal pancreatic ductal cells; that are probably an anti-cancer, defensive mechanism. In contrast, elevated serum BAs levels increase MUC4 expression in PC, that presumably accelerates tumour progression.
// // WARNING: // I have not gotten this to work as a useful offscreen buffer. // class GlRenderableBuffer { public: GlRenderableBuffer(const GlSize& size); void bind(); private: void initialize(); bool initialized() const { return initialized_; } int frame_buffer_id_ = -1; int texture_target_ = -1; int depth_buffer_ = -1; bool initialized_ = false; GlSize size_; }
Dow Jones Files Idiotic 'Hot News' Lawsuit Against A Service That Sends News Alerts from the opening-a-can-of-worms dept How does Ransquawk provide such a popular service? Its business model is as simple as it is illegal: Ransquawk's audio and text services are based on the systematic unauthorized reproduction and redistribution of news content published by Dow Jones, and undoubtedly other news content providers as well. For a few years now, we've been covering the troubling return of the "hot news" doctrine. This is a non-copyright concept that was mostly considered dead and buried, but was suddenly revived a few years ago. Technically, it's still considered "law" in New York, and it involves the idea that there's some sort of "protection" in news, such that others can't re-report the news that others have reported if it's "too soon." Under basic copyright, of course, facts are not copyrightable, so it's always been considered fair game to repeat factual news information (so long as you're not copying specific expression). The whole hot news concept had basically become defunct before the Associated Press brought it back up in a lawsuit about five years ago. Of course, there are all sorts of troubling implications of creating a new form of intellectual property such as "hot news" -- especially in an age of Twitter, Facebook and other methods of sharing news and information. Already, some have sought to stretch the definition of hot news. So far, thankfully, most recent hot news lawsuits have failed in court , though many seem to end in "settlements."One of the "settled" cases was brought by Dow Jones a few years ago, and apparently the company has decided to try again. It has filed a hot news lawsuit against a company almost no one has heard of (until Dow Jones just gave them a ton of free publicity), Ransquawk.In Dow Jones' initial cease and desist letter , it claimed that Ransquawk violated its copyrights, but apparently the lawyers at Dow Jones finally figured out how copyright works and realized that wasn't true. The lawsuit only makes use of the hot news concept.In its defense, Ransquawk explained to Dow Jones that it does not have an account from Dow Jones' DJX service, which Dow Jones says Ransquawk is illegally copying, but rather that it finds the information from Twitter, Dow Jones reporters themselves and various other services who often share the same headlines. Since Ransquawk is a UK company, it also rejects the copyright claims, pointing out that news reporting is considered fair dealing under UK law.The actual lawsuit makes some brazen claims:But, again, repeating a news headline. The complaint also insists that Ransquawk is lying in claiming that it's obtaining the news from other sources, noting that sometimes Ransquawk is repeating the DJX news within five seconds, which suggests it has access to a direct feed, despite denying it. It's entirely possible thatis violating DJX's terms of service, to allow Ransquawk to have access to the feed, but that's a completely different matter than hot news.While Ransquawk may follow in the footsteps of others and settle this case to be done with it, this remains a really stupid move by Dow Jones -- a company that quite frequently has its own staff repeating and sharing news first reported elsewhere. It's not difficult to see how any precedent Dow Jones might set with this lawsuit will almost certainly come back to bite them when others realize that Dow Jones does the same exact thing. News is news: it's factual and sharing the news is just a part of how the world works today. Rather than freaking out about it, Dow Jones should focus on adding the kind of additional value that it claims to add, such that mere headlines from Ransquawk won't make a difference. Seriously, if the only value that Dow Jones provides is somehow "misappropriated" by Ransquawk then it makes me think that Dow Jones really doesn't provide much value at all. Filed Under: hot news, intellectual property, news alerts Companies: dow jones, news corp., ransquawk
/** * Non thread-safe implementation of withdraw, using no locking */ static bool withdraw_unsafe( struct account *account, unsigned int amount ) { bool success = false; const int balance = account->current_balance; if ( balance >= (long) amount ) { success = true; printf("Withdrawl approved\n"); account->current_balance = balance - amount; account->withdrawl_total += amount; disburse_money(amount); } return success; }
<filename>tests/test_numbrix.py import unittest from Numbrix import Numbrix from Numbrix_Cell import Numbrix_Cell from Chain_Endpoint import Chain_Endpoint beginner_puzzle = [5, 6, 7, 8, 9, 24, 25, 30, 31, 4, None, None, None, None, None, None, None, 32, 15, None, None, None, None, None, None, None, 33, 16, None, None, None, None, None, None, None, 34, 65, None, None, None, None, None, None, None, 39, 66, None, None, None, None, None, None, None, 40, 69, None, None, None, None, None, None, None, 45, 70, None, None, None, None, None, None, None, 46, 71, 72, 81, 80, 79, 52, 51, 48, 47] dec_27_2020 = [3, None, 9, None, 17, None, 21, None, 23, None, None, None, None, None, None, None, None, None, 1, None, None, None, None, None, None, None, 27, None, None, None, None, None, None, None, None, None, 59, None, None, None, None, None, None, None, 43, None, None, None, None, None, None, None, None, None, 63, None, None, None, None, None, None, None, 81, None, None, None, None, None, None, None, None, None, 67, None, 69, None, 73, None, 77, None, 79] very_hard_puzzle = [55, None, 61, None, 69, None, 79, None, 77, None, None, None, None, None, None, None, None, None, 53, None, None, None, None, None, None, None, 75, None, None, None, None, None, None, None, None, None, 47, None, None, None, None, None, None, None, 31, None, None, None, None, None, None, None, None, None, 45, None, None, None, None, None, None, None, 17, None, None, None, None, None, None, None, None, None, 5, None, 7, None, 9, None, 13, None, 15] # noinspection PyPep8Naming class TestNumbrix(unittest.TestCase): def test_creation(self): numbrix = Numbrix(beginner_puzzle) self.assertIsNotNone(numbrix) numbrix.display() def test_get_neighbors(self): numbrix = Numbrix(beginner_puzzle) cell_A1 = numbrix.get_cell('A1') neighbors_of_cell_A1 = numbrix.get_cell_neighbors(cell_A1) print('Neighbors of cell A1:', neighbors_of_cell_A1) self.assertTrue(cell_A1 not in neighbors_of_cell_A1) self.assertEqual(2, len(neighbors_of_cell_A1)) cell_I9 = numbrix.get_cell('I9') neighbors_of_cell_I9 = numbrix.get_cell_neighbors(cell_I9) print('Neighbors of cell I9:', neighbors_of_cell_I9) self.assertTrue(cell_I9 not in neighbors_of_cell_I9) self.assertEqual(2, len(neighbors_of_cell_I9)) cell_A2 = numbrix.get_cell('A2') neighbors_of_cell_A2 = numbrix.get_cell_neighbors(cell_A2) print('Neighbors of cell A2:', neighbors_of_cell_A2) self.assertTrue(cell_A2 not in neighbors_of_cell_A2) self.assertEqual(3, len(neighbors_of_cell_A2)) cell_B2 = numbrix.get_cell('B2') neighbors_of_cell_B2 = numbrix.get_cell_neighbors(cell_B2) print('Neighbors of cell B2:', neighbors_of_cell_B2) self.assertTrue(cell_B2 not in neighbors_of_cell_B2) self.assertEqual(4, len(neighbors_of_cell_B2)) def test_cell_reduction(self): numbrix = Numbrix(beginner_puzzle) cell_B1 = numbrix.get_cell('B1') neighbors_of_cell_B1 = numbrix.get_cell_neighbors(cell_B1) print('Neighbors of cell B1:', neighbors_of_cell_B1) cell_B1.reduce_neighbors(neighbors_of_cell_B1, numbrix.get_all_values()) print('Neighbors of cell B2:', neighbors_of_cell_B1) cell_B2 = numbrix.get_cell('B2') self.assertEqual(3, cell_B2.get_value()) def test_beginner_puzzle_reduction(self): numbrix = Numbrix(beginner_puzzle) numbrix.display() numbrix.reduce() numbrix.display() cell_E7 = numbrix.get_cell('E7') neighbors_of_cell_E7 = numbrix.get_cell_neighbors(cell_E7) self.assertFalse(cell_E7.is_link_endpoint(neighbors_of_cell_E7, numbrix.get_all_values())) cell_G6 = numbrix.get_cell('G6') neighbors_of_cell_G6 = numbrix.get_cell_neighbors(cell_G6) self.assertTrue(cell_G6.is_link_endpoint(neighbors_of_cell_G6, numbrix.get_all_values())) def test_find_chain_endpoints(self): numbrix = Numbrix(beginner_puzzle) numbrix.reduce() numbrix.display() # By looking at the puzzle produced above, we know that # the chain endpoints are: D4, D6, E3, G6, H3, H5 chain_endpoints = numbrix.get_chain_endpoints() print(chain_endpoints) chain_endpoint_addresses = [cell.address for cell in chain_endpoints] self.assertTrue('E3' in chain_endpoint_addresses) self.assertTrue('G6' in chain_endpoint_addresses) self.assertTrue('H3' in chain_endpoint_addresses) self.assertTrue('H5' in chain_endpoint_addresses) def test_get_cell_between(self): """Test getting the cell between two cells that are 2 cells apart""" numbrix = Numbrix(beginner_puzzle) cell_C1 = numbrix.get_cell('C1') cell_C2 = numbrix.get_cell('C2') cell_C3 = numbrix.get_cell('C3') cell_C4 = numbrix.get_cell('C4') cell_C5 = numbrix.get_cell('C5') cell_A3 = numbrix.get_cell('A3') cell_B3 = numbrix.get_cell('B3') cell_D3 = numbrix.get_cell('D3') cell_E3 = numbrix.get_cell('E3') self.assertEqual([cell_C2], numbrix.get_empty_cells_between(cell_C3, cell_C1)) self.assertEqual([cell_C4], numbrix.get_empty_cells_between(cell_C3, cell_C5)) self.assertEqual([cell_B3], numbrix.get_empty_cells_between(cell_C3, cell_A3)) self.assertEqual([cell_D3], numbrix.get_empty_cells_between(cell_C3, cell_E3)) def test_another_reduction(self): numbrix = Numbrix(dec_27_2020) numbrix.display() numbrix.reduce() numbrix.display() def test_hard_puzzle_reduction(self): numbrix = Numbrix(very_hard_puzzle) numbrix.display() numbrix.reduce() numbrix.display() def test_solve_one_cell_gaps(self): numbrix = Numbrix(beginner_puzzle) numbrix.reduce() numbrix.display() # By looking at the puzzle produced above, we know that # the chain endpoints are: D4, D6, E3, G6, H3, H5 numbrix.fill_1_cell_gaps() numbrix.display() cell_D5 = numbrix.get_cell('D5') self.assertEqual(20, cell_D5.get_value()) def test_create_endpoint_cell(self): numbrix = Numbrix(beginner_puzzle) numbrix.reduce() numbrix.display() endpoint_cell_E3 = Numbrix_Cell('E3', [63]) cell_F3 = Numbrix_Cell('F3', []) cell_E4 = Numbrix_Cell('E4', []) cell_D3 = Numbrix_Cell('D3', [18]) cell_E2 = Numbrix_Cell('E2', [64]) neighbors_of_E3 = [cell_E4, cell_F3, cell_E2, cell_D3] print("Neighbors of E3:", numbrix.get_cell_neighbors(endpoint_cell_E3)) self.assertEqual(set(neighbors_of_E3), set(numbrix.get_cell_neighbors(endpoint_cell_E3))) endpoint_cell_G6 = Numbrix_Cell('G6', [54]) endpoint_cell_H3 = Numbrix_Cell('H3', [74]) endpoint_cell_H5 = Numbrix_Cell('H5', [78]) all_endpoints = [endpoint_cell_H5, endpoint_cell_H3, endpoint_cell_G6, endpoint_cell_E3] chain_endpoints = numbrix.get_chain_endpoints() self.assertEqual(set(all_endpoints), set(chain_endpoints)) numbrix.fill_1_cell_gaps() all_endpoints = [endpoint_cell_H5, endpoint_cell_H3, endpoint_cell_G6, endpoint_cell_E3] chain_endpoints = numbrix.get_chain_endpoints() self.assertEqual(set(all_endpoints), set(chain_endpoints)) numbrix.display() cell_G3 = Numbrix_Cell('G3', []) cell_H4 = Numbrix_Cell('H4', []) expected_guessing_endpoint = Chain_Endpoint(endpoint_cell_H3, [cell_G3, cell_H4], [75], 4, 12) actual_guessing_endpoint = numbrix.get_guessing_cell() self.assertEqual(expected_guessing_endpoint, actual_guessing_endpoint) print("Chain Endpoint Guess is:", actual_guessing_endpoint) def test_solving_beginner_puzzle(self): numbrix = Numbrix(beginner_puzzle) numbrix.display() solved_puzzle = numbrix.search() solved_puzzle.display() self.assertTrue(solved_puzzle.is_solved()) def test_solving_intermediate_puzzle(self): interactive_mode = False numbrix = Numbrix(dec_27_2020, interactive_mode) numbrix.display() solved_puzzle = numbrix.search() if solved_puzzle: solved_puzzle.display() self.assertTrue(solved_puzzle.is_solved()) else: print("Solver finished, but no solution was found!") self.assertTrue(False) def test_solving_very_hard_puzzle(self): numbrix = Numbrix(very_hard_puzzle) numbrix.display() solved_puzzle = numbrix.search() solved_puzzle.display() self.assertTrue(solved_puzzle.is_solved()) @unittest.skip def test_solving_puzzles(self): puzzles = [beginner_puzzle, dec_27_2020, very_hard_puzzle] for puzzle in puzzles: numbrix = Numbrix(puzzle) numbrix.display() solved_puzzle = numbrix.search() solved_puzzle.display() self.assertTrue(solved_puzzle.is_solved()) if __name__ == '__main__': unittest.main()
/** * Used to authenticate and execute actions when Kerberos is enabled and a keytab is being used. * * Some of the functionality in this class is adapted from Hadoop's UserGroupInformation. */ public class KerberosKeytabUser extends AbstractKerberosUser { private final String keytabFile; public KerberosKeytabUser(final String principal, final String keytabFile) { super(principal); this.keytabFile = keytabFile; Validate.notBlank(keytabFile); } @Override protected LoginContext createLoginContext(Subject subject) throws LoginException { final Configuration config = new KeytabConfiguration(principal, keytabFile); return new LoginContext("KeytabConf", subject, null, config); } /** * @return the keytab file for this user */ public String getKeytabFile() { return keytabFile; } // Visible for testing Subject getSubject() { return this.subject; } }
/*---------------------------------------------------------------------------- Seventh step. Destroy everything left standing. ----------------------------------------------------------------------------*/ static void dse_step7 (bool global_done) { unsigned int i; group_info_t group; basic_block bb; FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group) { free (group->offset_map_n); free (group->offset_map_p); BITMAP_FREE (group->store1_n); BITMAP_FREE (group->store1_p); BITMAP_FREE (group->store2_n); BITMAP_FREE (group->store2_p); BITMAP_FREE (group->escaped_n); BITMAP_FREE (group->escaped_p); BITMAP_FREE (group->group_kill); } if (global_done) FOR_ALL_BB (bb) { bb_info_t bb_info = bb_table[bb->index]; BITMAP_FREE (bb_info->gen); if (bb_info->kill) BITMAP_FREE (bb_info->kill); if (bb_info->in) BITMAP_FREE (bb_info->in); if (bb_info->out) BITMAP_FREE (bb_info->out); } if (clear_alias_sets) { BITMAP_FREE (clear_alias_sets); BITMAP_FREE (disqualified_clear_alias_sets); free_alloc_pool (clear_alias_mode_pool); htab_delete (clear_alias_mode_table); } end_alias_analysis (); free (bb_table); htab_delete (rtx_group_table); VEC_free (group_info_t, heap, rtx_group_vec); BITMAP_FREE (all_blocks); BITMAP_FREE (scratch); BITMAP_FREE (kill_on_calls); free_alloc_pool (rtx_store_info_pool); free_alloc_pool (read_info_pool); free_alloc_pool (insn_info_pool); free_alloc_pool (bb_info_pool); free_alloc_pool (rtx_group_info_pool); free_alloc_pool (deferred_change_pool); }
"""A wrapper env that handles multiple tasks from different envs. Useful while training multi-task reinforcement learning algorithms. It provides observations augmented with one-hot representation of tasks. """ import random import akro import numpy as np from garage import EnvSpec, EnvStep, Wrapper def round_robin_strategy(num_tasks, last_task=None): """A function for sampling tasks in round robin fashion. Args: num_tasks (int): Total number of tasks. last_task (int): Previously sampled task. Returns: int: task id. """ if last_task is None: return 0 return (last_task + 1) % num_tasks def uniform_random_strategy(num_tasks, _): """A function for sampling tasks uniformly at random. Args: num_tasks (int): Total number of tasks. _ (object): Ignored by this sampling strategy. Returns: int: task id. """ return random.randint(0, num_tasks - 1) class MultiEnvWrapper(Wrapper): """A wrapper class to handle multiple environments. This wrapper adds an integer 'task_id' to env_info every timestep. Args: envs (list(Environment)): A list of objects implementing Environment. sample_strategy (function(int, int)): Sample strategy to be used when sampling a new task. mode (str): A string from 'vanilla`, 'add-onehot' and 'del-onehot'. The type of observation to use. - 'vanilla' provides the observation as it is. Use case: metaworld environments with MT* algorithms, gym environments with Task Embedding. - 'add-onehot' will append an one-hot task id to observation. Use case: gym environments with MT* algorithms. - 'del-onehot' assumes an one-hot task id is appended to observation, and it excludes that. Use case: metaworld environments with Task Embedding. env_names (list(str)): The names of the environments corresponding to envs. The index of an env_name must correspond to the index of the corresponding env in envs. An env_name in env_names must be unique. """ def __init__(self, envs, sample_strategy=uniform_random_strategy, mode='add-onehot', env_names=None): assert mode in ['vanilla', 'add-onehot', 'del-onehot'] self._sample_strategy = sample_strategy self._num_tasks = len(envs) self._active_task_index = None self._mode = mode super().__init__(envs[0]) if env_names is not None: assert isinstance(env_names, list), 'env_names must be a list' msg = ('env_names are not unique or there is not an env_name', 'corresponding to each env in envs') assert len(set(env_names)) == len(envs), msg self._env_names = env_names self._task_envs = [] for env in envs: if (env.observation_space.shape != self._env.observation_space.shape): raise ValueError( 'Observation space of all envs should be same.') if env.action_space.shape != self._env.action_space.shape: raise ValueError('Action space of all envs should be same.') self._task_envs.append(env) @property def observation_space(self): """Observation space. Returns: akro.Box: Observation space. """ if self._mode == 'vanilla': return self._env.observation_space elif self._mode == 'add-onehot': task_lb, task_ub = self.task_space.bounds env_lb, env_ub = self._env.observation_space.bounds return akro.Box(np.concatenate([env_lb, task_lb]), np.concatenate([env_ub, task_ub])) else: # self._mode == 'del-onehot' env_lb, env_ub = self._env.observation_space.bounds num_tasks = self._num_tasks return akro.Box(env_lb[:-num_tasks], env_ub[:-num_tasks]) @property def spec(self): """Describes the action and observation spaces of the wrapped envs. Returns: EnvSpec: the action and observation spaces of the wrapped environments. """ return EnvSpec(action_space=self.action_space, observation_space=self.observation_space, max_episode_length=self._env.spec.max_episode_length) @property def num_tasks(self): """Total number of tasks. Returns: int: number of tasks. """ return len(self._task_envs) @property def task_space(self): """Task Space. Returns: akro.Box: Task space. """ one_hot_ub = np.ones(self.num_tasks) one_hot_lb = np.zeros(self.num_tasks) return akro.Box(one_hot_lb, one_hot_ub) @property def active_task_index(self): """Index of active task env. Returns: int: Index of active task. """ if hasattr(self._env, 'active_task_index'): return self._env.active_task_index else: return self._active_task_index def reset(self): """Sample new task and call reset on new task env. Returns: numpy.ndarray: The first observation conforming to `observation_space`. dict: The episode-level information. Note that this is not part of `env_info` provided in `step()`. It contains information of he entire episode, which could be needed to determine the first action (e.g. in the case of goal-conditioned or MTRL.) """ self._active_task_index = self._sample_strategy( self._num_tasks, self._active_task_index) self._env = self._task_envs[self._active_task_index] obs, episode_info = self._env.reset() if self._mode == 'vanilla': pass elif self._mode == 'add-onehot': obs = np.concatenate([obs, self._active_task_one_hot()]) else: # self._mode == 'del-onehot' obs = obs[:-self._num_tasks] return obs, episode_info def step(self, action): """Step the active task env. Args: action (object): object to be passed in Environment.reset(action) Returns: EnvStep: The environment step resulting from the action. """ if type(action) == tuple: action = action[0] es = self._env.step(action) if self._mode == 'add-onehot': obs = np.concatenate([es.observation, self._active_task_one_hot()]) elif self._mode == 'del-onehot': obs = es.observation[:-self._num_tasks] else: # self._mode == 'vanilla' obs = es.observation env_info = es.env_info if 'task_id' not in es.env_info: env_info['task_id'] = self._active_task_index if self._env_names is not None: env_info['task_name'] = self._env_names[self._active_task_index] return EnvStep(env_spec=self.spec, action=action, reward=es.reward, observation=obs, env_info=env_info, step_type=es.step_type) def close(self): """Close all task envs.""" for env in self._task_envs: env.close() def _active_task_one_hot(self): """One-hot representation of active task. Returns: numpy.ndarray: one-hot representation of active task """ one_hot = np.zeros(self.task_space.shape) index = self.active_task_index or 0 one_hot[index] = self.task_space.high[index] return one_hot
An active disturbance rejection controller based on Kalman observer for PMSM In this paper, we propose a novel active disturbance rejection controller(ADRC) based on Kalman observer for permanent magnet synchronous motor(PMSM) speed-control system. The performance of the ADRC controller is mainly depending on the capacity of the extended state observer(ESO). However, the bandwidth of the ESO is usually limited by the existence of noise, and the amplitude of the total disturbance is very large in most cases. Therefore, the ESO usually can't track the total disturbance in real time. We design a Kalman observer to improve the performance of the ESO, which can weaken the effects of noise and estimate the load torque disturbance(the main component of the total disturbance) simultaneously. Combining the Kalman observer with the ESO, the performance of the ADRC controller can be greatly improved.
View photos (Photo: Getty Images) The World Health Organization has convened an emergency committee to discuss the “explosive” spread of the Zika virus, which has been linked to thousands of birth defects in Latin America. WHO’s Director General says the virus is “spreading explosively” and “the level of alarm is extremely high.” She explained that the relationship between Zika and birth defects has not yet been fully established but is “strongly suspected.” “The possible links, only recently suspected, have rapidly changed the risk profile of Zika, from a mild threat to one of alarming proportions,” she said Thursday. A post on the WHO website reveals that the emergency committee will meet to determine if the outbreak is a Public Health Emergency of International Concern. WHO plans to prioritize the development of vaccines and new tools to control mosquito populations, as well as improving diagnostic test and will convene experts to address critical gaps in scientific knowledge about the virus and its potential effects on fetuses, children and adults. For most people, the Zika virus causes only a brief, mild flu-like illness. But in pregnant women it has been linked to an alarming increase in the rate of the birth defect known as microcephaly — a debilitatingly small head and brain size. Microcephaly may cause mental retardation, as well as delays in speech, movement, and growth, according to the Mayo Clinic. We’ll update this breaking story as needed. More on the Zika virus on Yahoo Health: Infectious Disease Specialist Dr. Kent Sepkowitz explains the risks of the Zika virus in the U.S. and abroad Read This Next: Zika Virus Expected To Spread Throughout Americas
def install(ctx, **kwargs): input = IntegrationInstall(session=None, verbose=ctx.obj["VERBOSE"], **kwargs) input = input._replace( session=boto3.Session( profile_name=input.aws_profile, region_name=input.aws_region ) ) if not input.linked_account_name: input = input._replace( linked_account_name=( "New Relic AWS Integration - %s" % integrations.get_aws_account_id(input.session) ) ) if input.aws_permissions_check: permissions.ensure_integration_install_permissions(input) click.echo("Validating New Relic credentials") gql_client = api.validate_gql_credentials(input) click.echo("Retrieving integration license key") nr_license_key = api.retrieve_license_key(gql_client) install_success = True click.echo("Creating the AWS role for the New Relic AWS Lambda Integration") role = integrations.create_integration_role(input) install_success = install_success and role if role: click.echo("Linking New Relic account to AWS account") res = api.create_integration_account(gql_client, input, role) install_success = res and install_success linked_account_id = res.get("id") if linked_account_id: click.echo( "Enabling Lambda integration on the link between New Relic and AWS" ) res = api.enable_lambda_integration(gql_client, input, linked_account_id) install_success = res and install_success if input.enable_license_key_secret: click.echo("Creating the managed secret for the New Relic License Key") res = integrations.install_license_key(input, nr_license_key) install_success = install_success and res if input.enable_cw_ingest: click.echo("Creating newrelic-log-ingestion Lambda function in AWS account") res = integrations.install_log_ingestion(input, nr_license_key) install_success = res and install_success if install_success: done("Install Complete") if input.verbose: click.echo( "\nNext steps: Add the New Relic layers to your Lambda functions with " "the below command.\n" ) command = [ "$", "newrelic-lambda", "layers", "install", "--function", "all", "--nr-account-id", str(input.nr_account_id), ] if input.aws_profile: command.append("--aws-profile %s" % input.aws_profile) if input.aws_region: command.append("--aws-region %s" % input.aws_region) click.echo(" ".join(command)) else: failure("Install Incomplete. See messages above for details.", exit=True)
use crate::consts::BLAKE2B_IV; use digest::generic_array::typenum::{U128, U64}; blake2_impl!( VarBlake2b, Blake2b, u64, u64x4, U64, U128, 32, 24, 16, 63, BLAKE2B_IV, "Blake2b instance with a variable output.", "Blake2b instance with a fixed output.", );
/*! \brief Constructor * \details Constructs the Info object by allocating a hashmap to store the * information objects, and initializes a link to the conceptual parent * of this hashmap. Any search that fails in this hashmap will proceed * to search the parent hashmap. This link may be null in which case all * searches are local. * \param aParentInfo A pointer to the parent Info object of this Info object * which may be null. */ Info::Info( const IInfo* aParentInfo, const string& aOwnerName ) : mOwnerName( aOwnerName ), mInfoMap( new InfoMap( getInitialSize() ) ), mParentInfo( aParentInfo ) { }
import { Component, OnInit, ViewEncapsulation } from '@angular/core'; import { User, UserService, AuthenticationService } from 'ngx-login-client'; import { ActivatedRoute, Router } from '@angular/router'; import { ProviderService } from '../shared/account/provider.service'; import { Subscription } from 'rxjs'; @Component({ encapsulation: ViewEncapsulation.None, selector: 'alm-home', templateUrl: './home.component.html', }) export class HomeComponent implements OnInit { loggedInUser: User; openShiftLinked: boolean = false; subscriptions: Subscription[] = []; errorConnecting: boolean = false; loading: boolean = true; constructor( private readonly userService: UserService, private readonly auth: AuthenticationService, private readonly route: ActivatedRoute, private readonly providerService: ProviderService, private readonly router: Router, ) {} ngOnInit() { this.userService.loggedInUser.subscribe((user: User) => { this.loggedInUser = user; if (user.attributes && user.attributes.cluster) { this.linkOpenshiftAccounts(); } }); } linkOpenshiftAccounts() { this.loading = true; let connectionSub = this.auth .isOpenShiftConnected(this.loggedInUser.attributes.cluster) .subscribe((isConnected) => { this.openShiftLinked = isConnected; let wait = this.route.snapshot.queryParams['wait']; if (!isConnected && !wait) { // first time through and user isn't connected - automatically connect accounts this.providerService.linkOpenShift( this.loggedInUser.attributes.cluster, window.location.origin + '/_home?wait=true', ); } else if (!isConnected && wait) { // second time through - do not try again if wait is on URL and the user is still not connected // something happened, show error this.errorConnecting = true; this.loading = false; } else { // success - user is connected - send home this.loading = false; this.router.navigateByUrl('_home', { queryParams: {} }); } }); this.subscriptions.push(connectionSub); } ngOnDestroy(): void { this.subscriptions.forEach((sub) => { sub.unsubscribe(); }); } }
Get the biggest daily news stories by email Subscribe Thank you for subscribing We have more newsletters Show me See our privacy notice Could not subscribe, try again later Invalid Email Parents may have been entitled to free Calpol for years without knowing it under a hushed-up NHS scheme. A mum made the chance discovery after complaining to a Boots cashier about how expensive children’s medicine is. Christine Davidson, of Stornoway in the Outer Hebrides mentioned the NHS Minor Ailments scheme on Facebook – and her post went viral with over 100,000 shares. She wrote: “She told me if you register your details with them under the Minor Ailments Scheme all ­medicines for children are free. A scheme that has been going for eight years. “They are not allowed to advertise it but you can save a small fortune on Calpol, Piriton, Sudocrem, plasters, etc.” The Minor Ailments scheme actually started a decade ago. It lets patients get basic medication for things like coughs, colds, sore throats and earache free from some pharmacists without taking up GPs’ time. But there appears to have been little awareness of the service until now. To qualify, patients must be eligible for free prescriptions – which is normally the case if you are under 16, 16-18 and in full-time ­education or over 60. Women also qualify if they are pregnant or have had a baby in the past 12 months. All children, whatever family income, are therefore eligible. In theory, parents could save around £2.99 on a 100ml bottle of Calpol, £5 on a tub of Sudocrem and £10.79 on a children’s eczema cream. Tonight a spokeswoman for NHS England confirmed the existence of the scheme – which is run by local NHS clinical commissioning groups depending on that area’s needs. But she said it was “unlikely” patients would get “branded medicine, like Calpol”. She added: “The scheme was established to provide NHS-funded, over-the-counter ­medicines to people on lower incomes. "It prevents the need for patients to have medicines for minor illnesses prescribed by a GP, leaving the GP more time for patients with more complicated conditions.” The NHS was unable to provide a full list of participating pharmacies but around 70 services are believed to be operating in England alone.
/*Connect to a server on a specific port number, the actual bind method*/ public boolean bind(String inetAddress, int port) { String inputLine,answer; /*To store string read from socket*/ try{ controlSocket = new Socket(inetAddress,port); /*create socket*/ out = new PrintWriter( controlSocket.getOutputStream() , true ); /*Write to socket*/ in = new BufferedReader( new InputStreamReader( controlSocket.getInputStream() ) ); /*read from socket*/ System.out.println("Succesfully connected to " + inetAddress + " on port " + port); answer = in.readLine(); } catch(UnknownHostException e){ System.err.println("Couldn't reach host:" + inetAddress); return (false); } catch(IOException e){ System.err.println("Ioexception occured when trying to create socket I/O"); return (false); } return(true); }
#include<bits/stdc++.h> #define NN 200100 using namespace std; typedef long long INT; vector<int>adj[NN],q[NN],df[NN]; int dep[NN],vst[NN],ans[NN],pa[NN]; void dfs(int u,int fa=0){ pa[u]=fa; dep[u]=dep[fa]+1; for(int v:adj[u]){ if(v==fa)continue; dfs(v,u); } } void DFS(int u){ vst[u]=1; for(int v:adj[u]){ if(v==pa[u])continue; for(int id:df[v]){ int flag=0; for(int c:q[id]){ if(vst[c]==0 and vst[pa[c]]==0){ flag=1; break; } } ans[id]=flag; } DFS(v); } vst[u]=0; } int main(){ #ifndef ONLINE_JUDGE freopen("in.in","r",stdin); freopen("out.out","w",stdout); #endif int n,m; cin>>n>>m; for(int i=1;i<n;i++){ int u,v; scanf("%d %d",&u,&v); adj[u].push_back(v); adj[v].push_back(u); } dfs(1); for(int i=1;i<=m;i++){ int k; scanf("%d",&k); int mx=0,id; for(int j=1;j<=k;j++){ int v; scanf("%d",&v); q[i].push_back(v); if(dep[v]>mx){ mx=dep[v]; id=v; } } df[id].push_back(i); } DFS(1); for(int i=1;i<=m;i++){ if(ans[i]){ puts("NO"); } else puts("YES"); } return 0; }
// Parse parses a string value into a time during the week. The expected format // is like: "Thu 7:30PM". The local location is used. func Parse(val string) (Time, error) { if len(val) < 4 { return Time{}, errors.New("bad weekday") } day, ok := strToDay[val[:4]] if !ok { return Time{}, errors.New("bad weekday") } t, err := time.Parse(time.Kitchen, val[4:]) if err != nil { return Time{}, fmt.Errorf("bad time: %v", err) } return Time{day: day, hour: t.Hour(), min: t.Minute()}, nil }
This post is a transcript of 10 For The Producers – Episode 13, material that is the intellectual property of Cloud Imperium Games (CIG) and it’s subsidiaries. INN is a Star Citizen fan site and is not officially affiliated with CIG, but we reprint their materials with permission as a service to the community. INN edits our transcripts for the purpose of making the various show participants easier to understand in writing. Enjoy! 10 For The Producers – Episode 13 – Transcript Intro: JR: Hey guys welcome to another episode of 10 for the producers, texas edition! Yee haa!! [Gun sounds] JH: Niiiice… JR: Yeeah, so welcome to another special episode. We would like to thank subscribers first of all for making video content like this possible, we wouldn’t be here making this video if it weren’t for you guys so thanks for that. I’m Jake Ross by the way, associate producer of the Persistent Universe. JH: I am Jason Hutchins, Senior producer for Star Citizen. JR: And we’re here to answer questions from the forums. We got 10 questions here that we’ve kind of gone through thanks to those of you who have submitted questions. We can only get through 10 of them so apologies in advance but with that Jason. JH: I will take it away! JR: Take it away. JH: Our first question is from… (1:06) Graxas asks: With so many gameplay mechanics and system content needing to be developed for the PU, how do you make prioritisation decisions on what is developed first? JR: Yeah I’d say first and foremost, the priorities come from Chris and Tony. Those are the guys directing the project so they pave the way for how we are supposed to prioritise our features. So they decide “Okay we have an upcoming release, what do we want to show?” And then once they decide what we want to show we’ll break those features down into critical priority, high priority and moderate priority. And what that really means is that a critical feature is something that is an absolute must-have for that release; a high priority feature is a nice-to-have, it’s something that’s not essential but it’s something that we would like to have in the release; and then a moderate is a wishlist item, so it’s something if we get to it “great” if not “no sweat”. JR: So for example, for the first Social Module release we had ArcCorp, the environment itself, it was a pretty critical, must-have feature. Without ArcCorp there you wouldn’t have had an environment to run around in: so it had to be there. That’s a critical feature. We also had emote text descriptions, it was like a high feature. We wanted everyone to know when you did your emotes what was happening as you did them so the text would show up in the chat box. So that was a nice to have. And then moderate would be the critical, er, moderate would be the chat … JH: Opposite of critical! JR: Oh so critical! Moderate would be the chat, additional extraneous chat features like the ability to minimise windows, customise chat just a little bit and that didn’t end up making it in. JH: Stuff that we want to do eventually but we don’t have to do right this second for this release. JR: Exactly, and then those features that don’t make it into that release they’ll either waterfall into the next release, sometimes at higher priority, sometimes not at higher priority depending on what Tony and Chris want to focus on; or they’ll go into a backlog for later review. JH: A little peak behind the curtain too, we’re trying to standardise what we are doing on the Production side, as far as using the same kind of nomenclature for features that we do for bugs, so we have critical bugs; high priority bugs; and moderate bugs. If it is a critical bug we’re not going to ship a patch with it. JR: No. JH: Just like if it is a critical feature, or how that relates to task, we’re not going to ship the patch without that being done. Also there is a focus shift to try and prioritise features going forward that are needed for both Persistent Universe and Squadron 42 and Star Citizen as a whole. So we’re trying to look at it holistically rather than being split up across various modules. (3:39) Foible asks: Chris Roberts has stated that it his intention that after the PU launches, his vision is to see a continued roll-out of content, as long as there is interest in the game. He sees these releases to be as frequent as a few times per week. Do you envision these releases to be a progression of the story arc or just a technical expansion of the universe? In other words, will there be a progressive story arc that will unravel with time in the PU? JH: That’s a really interesting question. The PU is first and foremost an open world style first person universe and it’s a sandbox really. There will be story yes, but it’s the same kind of story you have for settings, like world settings. Rather than a story arc, missions will have stories, characters you run into will have stories. But for big sweeping story arcs, you’ll get those with Squadron 42. The PU will support and can live around that story and that dovetails nicely with what you’ve got. JR: That’s actually a nice segue into this question.. (4:52) Nahema Darkstar asks: Will there be a storyline in Star Marine to be a little like Squadron 42 with some rendered cutscenes? Or is Star Marine just an FPS like unreal tournament, quake wars, etc? And will there be some FPS in Squadron 42 or maybe in the next part of the single player game? JH: There will be FPS in Squadron 42. The thing I want to talk about here though is; Star Marine is a simulation within the first person universe. Much like Arena Commander is a simulation within the first person universe. It’s a game within a game. Star Marine is going to be a test bed for new features, new game types, new mission objectives and maps. We even plan on rolling out a way to do white box testing in the environments that you expect to use in the PU or even a part of a level for Squadron 42. Perhaps maybe we’ll even roll out new enemy AI behaviour for certain monsters or other NPCs out there so we can test within that environment and get that working right and tweaked, get feedback from backers and get that into the game itself proper. The other questions will there be a story? Yes. Some modes will have a story and again it’s like the setting, rather than the story. So it’s like pirates are attacking the station. Fight them off. That’s a story but it’s not a big story. So it’s not a story like you see in Squadron 42 that’s where the big story arcs are going to come from. JR: And Star Marine is also used like Arena Commander is used like a simulation to practice your dog fighting skills. This is a simulation to practice your first person shooter skills if you don’t want to jump right into the game. JH: Yeah. Not only that not only first person shooter skills. Your first person maneuvering skills. Say a place to learn how the character interacts with the environment. (7:00) Mr. Nowak asks: Cheers to the PU team! With the recent ongoing restructuring of the company what has been the most challenging aspect this has brought to your work as producers? How do you cope with the organizational change when you have a system in the work place. JR: The most challenging part of any kind of restructuring is just keeping team morale up. Nobody likes to see friends leave the company or moved to other parts of the project; They like to kind of keep the status quo so to speak so when that shifts the team morale sort of takes a hit. Part of the job of a producer is the momma bear factor that you like to call it. It’s that support and encouragement that the producer can provide. When team morale is high, productivity is high and so it’s really crucial to our job to make sure that happens. Communicating what the new structure is to the entire team, making sure there are no questions if and there are questions, make ourselves available for those developers who have questions about the new structure. Just being aware of the feel of the team and interacting with them whenever we need to update them on everything that’s going on. JH: Jake and I were also adjusting to our roles here. There’s a bit of a learning curve that we’re in the middle of so really, ask us again once when we know what the new normal is. JR: The normal is always shifting [chuckles] JH: The next question is from… (8:36) Amontillado asks: What’s one of the accomplishments that your team has achieved thus far, that you’re most proud of? JR: It’s a tough one. There’s a couple of things that stand out. Namely the Social Module release in general. The first release was just a huge success story for our team. It was one of if not the only Star Citizen release that actually hit the deadline that we set out to hit. Partnership with Behaviour in Austin has been really great so far, and that I think has shown with the release of ArcCorp. The community can now see evidence of that partnership. The ArcCorp environment is amazing it’s really beautiful to look at even though it’s supposed to be a super grungy environment. It went through several design iterations over the past..I don’t know how long but they’ve been working on it for a while. We tried to get it just right to suit what our gameplay needs were and as those needs evolved we had to switch things up so Tony (Zurovec) and Mark Skelton went back and forth with Behaviour to make sure all those needs were taken of and iterating and polishing and that kind of thing. The emotes were a fun addition as well getting to see. Some of our animators here in Austin actually captured a lot of those emotes with motion capture. So seeing them work on those, get those in the game, to tag team with the designers and stuff. It was cool to see those in the communities hands as well JH: You know we’ve done so much in the past year, since i’ve been here, that it’s hard to choose just one. So i’m not going to answer this. This is a Rosemary’s Baby kind of question and i’ll have no part of this! (10:25) Drewcifer asks: How much time is spent on average by individual functional personnel (non-producers/project managers) providing content for all these regular updates? How do you all prevent “status updates” from interfering with personnel doing their jobs? Most of the status content we’re seeing looks like it takes a while to put together and that it would really get in the way of actually creating the universe. As such when I stop seeing something like Around the Verse’s “Ship Shape” during a given week or several weeks straight I’m assuming the personnel are refocusing on actually getting the ships out. Accurate or are those folks on vacation? JH: Your assumption is correct: they all go on holiday until the camera rolls really! JR: [chuckles] JH: No, they don’t. They do, in fact, get back to work on their respective pipelines. It certainly can be a big distraction. Not that we don’t love spending a lot of time with Thomas here, or writing letters for the community. It’s an important thing though. So the community updates that we do, they’re written as a partnership between the senior production staff, the developer leads that they work with closely, and the Community team. JH: The senior producers are in a position to have a big picture view of the project. Which makes them uniquely suited to write this kind of report. One mantra of Production is “protect your team” and that’s protect your team from distractions; protect your team from things that keep them doing their job. So a good producer will take this to heart and keep the impact of these kind of reports to a minimum. Try to keep it to no more than a few minutes per developer per week on these kind of reports. JH: It’s the collating of the report, and the gathering of the necessary materials, the writing it down and editing it that really takes the time. I’ve personally spent: the worst was probably a day and a half total, spread out over a few days, writing some of the bigger Star Marine reports that I was working on. But I would say that the average time for me has worked out at less than four hours a week. That’s a chunk of time but if I worked a forty hour week, which would be awesome, it’s less than ten percent of my time. So I would say the average time is not that great. JR: And it’s important for a producer to know what his or her team is working on at any given point. So we have weekly scrums here, we have leads meetings, we have just the day-to-day interaction with developers. At any point I know what my team is working on at any given time. So making these reports, I know you do the Star Marine reports, and I’ll chip in on the monthly report you guys see; but with any of these reports it’s easy for the producer to take that information from his head and put it on paper because he already just knows what his team is working on. In some instances it’s not the case and you actually have to reach out and get detailed updates for things you don’t have the information on. But for the most part you should be able to pull a lot of that information from your head already. So the important thing is, like you said, to protect your team from that time and a good way to do that is just to pull it from your own head. JH: I should point out we’re not doing the Star Marine reports any more. Our focus has shifted so that it’s specifically a report on the progress of 2.0 and ultimately that will be a report on our Star Citizen releases in general. I’ve been partnering with Tom Johnson in Manchester on those reports now, so we’re splitting the burden which makes it both easier and harder for both of us. JH: The other thing I wanted to point out is that developers like Zane Bien working on the User Interface or Mark Abent who does the Bugsmasher, those guys obviously spend more time but I think those are important updates. The communication with our backers, with you guys, and getting that feedback from the community really keeps the team going and allows us to make course corrections where we need to. It’s great and it’s almost a unique situation in the industry. (14:54) Doc asks: How do you even keep track of all the small things up to the big i.e the sheer size and difference of things like (1) creating modular assets of new designs that can be used for new systems/stations/landing points, (2) bugfixing and adding important content to ArcCorp (e.g. Subsumption update, netcode…buggies) and (3) all the as-of-yet secret economy systems (simulators, deep designs) for the far away multi-system PU?! Do you prioritize all those things on a daily or weekly basis? JR: Yeah, there are a lot of requests that come in from Chris and Tony for all kinds of things and speaking for Tony…Tony’s general philosophy he’s told me several times is that he really only has a 12 month horizon meaning that he only really cares at any given moment about what’s on the horizon for the next 12 months, what’s on the schedule for that time. It makes things a little easier to manage on my side since the amount of content in the game planned right now takes us well beyond 12 months from now. Things that don’t fall in the next 12 months usually fall into a backlog for later review, things aren’t falling through the cracks or anything, we document everything and make sure it goes into a backlog. That backlog grows, grows and grows but 12 months everytime something gets finished or completed we pull something else out. That’s kinda how a backlog works. JH: There’s never a shortage of good ideas. JR: No, never and a lot of those good ideas come from the community so thanks. As far as keeping track of the features goes…usually it’s a matter of just pulling features from the backlog, producers gathering estimates and dependencies for those features from the lead. A lot of times it’s a relationship between the producer and the lead talking about a feature figuring out what’s required, how we break this down into individual tasks and then taking those tasks and documenting them in our task tracking software. We use JIRA here. That’s kind of how that whole process works for me at least. JH: Also, as I mentioned earlier, our new focus will be looking at the backlog to see what features are needed for the Persistent Universe and Squadron 42 so we can focus on Star Citizen as a holistic product. If something is Persistent Universe only, it’ll probably go back to the backlog for the year for the next time we review the backlog. We’ll build those features for the Persistent Universe as we get closer to rolling those features out and rolling out different patches for the Persistent Universe. JH: So basically, if Squadron 42 needs something that is also beneficial for the PU, those things will get prioritized higher than something that’s only beneficial for the PU. (18:00) Lock Ostrie asks: How do you plan on building out the Persistent Universe? Are you planning on prioritizing the Stanton system and then work out from there or are you planning to finish building out systems that already have content for them for things like SQ42 or is there no real plan set for building out the PU at this time? JR: Our midterm plan is to flesh out an entire system so almost like a vertical slice of what a system in the PU entails. I think we’ll be focusing mostly on the Stanton system for the foreseeable future getting it to a gold standard quality we’re focusing on the four landing zones in Stanton; ArcCorp’s Area 18, Hurston, Microtech and Crusader. So getting all those landing zones fleshed out getting it so you can actually visit those landing zones, so we’ll be working with other teams across the project who are fleshing out the space portion, the spaces in-between the landing zones starting with Crusader in 2.0. Basically getting a gold standard system is going to be our near to mid term priority. JH: When I first joined the team over a year ago now the idea was to be designing the first five star systems. What we did not know at that point was how long did it take us to make a single star system or even a single landing zone. Over the course of the year we really fleshed out a single landing zone and we’ve done most of the work needed for our second landing zone and just recently added points of interest around the Stanton star system itself. This is teaching us how long it’s going to take us to do it. Even though I would say we are not one hundred percent complete with the landing zone yet once we have that, it will give us an idea how much more we need to do. So we shifted our focus from instead of the first five complete star systems to the first five landing zones. So we currently have four landing zones in Stanton. Those are landing zones, not like i’m going out to a space station to make a quick repair and getting back on. That’s not a landing zone that’s more like a mission. We’re adding those as well. This tell us what we need to do. Then from here we’ll move on to the next one which is Nyx, the Levski landing zone. A much less populated system. That’s going to need a lot of activity in it as well, cause you don’t want a single star system with one landing zone and that’s it, how are those going to work out? This will tell us what a medium sized system takes to build, what a very small size system takes to build. Then we can look at our processes and see how we need to tweak those processes and see how we can go forward making a lot of content. JR: It will give us actual metrics which is something we don’t have for some of the areas of our project. So once we get those metrics we can actually calculate ok this takes this long. If we want twenty systems by this time we know it will take this long to make. That will give us information like do we need to ramp up more resources or outsource a little bit more JH: And how we can make changes to the system to make it better or faster or more procedural or what have you. (21:26) Zeshio asks: Do you have different team members who are really excited to work on certain parts of Star Citizen that aren’t in the pipeline yet? For example, maybe a team member is dying to work on mineable asteroid fields but the work isn’t there yet. Do you have any examples of what team members are excited to work on as we progress into 2.0 and beyond? JR: This is kind of already in the pipelines and pre-production phase but I know Mark Skelton is super excited to get his hands on the additional landing zones for Stanton…. So Hurston, Microtech and Crusader. We’ve got one landing zone out there and we’ve got another one almost finished and Nyx and so moving onto these next three there’s lots of opportunities to create new things and new aesthetics that we haven’t seen yet in Star Citizen. Microtech is kind of a cold snowy planet and crusader is kind of a vespen, kind of cloud city. JH: Cloud City JR: Cloud city. Hurston’s a refinery type atmosphere. I know as an art director that’s a lot of things to look forward to. We’re already in pre-production phases there and as we enter white box phase we’ll get some of those white boxes back which is basically just the designer taking these 3D objects and building out a space, rudimentary kind of space. We’ll get that back and then Mark Skelton will put our concept artists to work and do paintovers so it’s actually what is this space actually going to look like and that’s really exciting for him I know. I myself am excited to see the first iteration of the character customization because I’m one of those guys who spends hours just customizing my character and making them just perfect before I even start the game so I’m excited about that. JH: I don’t even want to talk about how much time I spent trying to make my Fallout 4 character look like Pam Poovy last night… It’s a little embarrassing. What’s also embarrassing is I failed, I didn’t really get it… JR: Awww man that’s a bummer JH: I just want to mention that this may seem out of character for me because I’m one of the leads of kind of the first person gameplay but let me just put this out there. I’m most excited for the opportunity to work on game systems that don’t involve combat. The way you interact with mining or the way you can be as Todd Pappy said:” A space trucker”. Where you don’t need to shoot anybody at all and to kind of make it around the universe and earn a living like, that’s interesting to me. When I see people play some of the other games, especially the MMO’s and try to do it as a complete pacifist and the lengths that they have to go to kind of earn that as an achievement for themselves… we’re making a sandbox and that just really geeks me out the fact that we’re making a sandbox for people to play in. You don’t have to kill anybody if you don’t want to. I’m really look forward to seeing how that shakes out JR: Not a lot of games reward the pacifist lifestyle so it’s kind of nice. Cool I got another question from.. (24:23) Lock Ostrie asks: In the last monthly report, it was said that Casaba Outlet was wrapping up in order to get ready for the shopping release. Does this mean the persistence update or at least parts of it has already been folded into 1.3 and 2.0 or is persistence and shopping now part of the same release? JH: Persistence in the traditional sense of the word is not folded into 1.3 which is live or 2.0 in any kind of meaningful player face anyway. Though we are working as we speak on the back end required to support player persistence. That being said our partners at Behaviour and Turbulent are working on some really clever things that allow us to be able to customise our character and have that state stick when you’re going from one map type to another map type. We’re a ways off from having traditional persistence and by that I mean an inventory system that allows you to pick up an item, drop an item, sell an item, trade an item with a friend have that thing and have it persist between play sessions like if you logout of the session and log back in. We’re going to fake it, right now and then we’re going to have real persistence coming in. I’m going with soon™ JR: Last thing i’m going to add to that. The persistence as with everything will be put out. We have iterations, we’ll have the first iteration go out to you guys and then as we release 2.1, 2.2, we’ll put in more functionality surrounding persistence. We’ll have hangar storage somewhere in there, being able to buy something and send it back to your hangar. Potentially even as you have clothing in Casaba outlet picking clothing and being able to persist that across places as you will. It’s an iterative process as it is with everything. Be patient and you’ll get it. Outro: JH: Thank you all very much for joining us for this very Texas edition of 10 for the Producers. JR: Woo JH: I forgot to wear my cowboy hat. Yeah, sorry. So Jake, thank the backers and subscribers for making this possible. JR: Thank you so much. JH: Good job. Thanks guys.
/** * <code>AttributeBean</code> holds data for an attribute definition for a CI.<br> * It's always contained inside a <code>CiBean</code> and does not have<br> * a reference to the owner <code>CiBean</code>.<br> * <br> * The Attribute bean can produce XML snippet for itself. * */ public class AttributeBean { // The alias for this attribute. private String alias; // The displayName for this attribute. private String displayName; // Is the type a complex type, pointer to another CI template private boolean complexType; // The type alias private String type; // The reference type alias, can be null if no reference type is requeired. private String refType; // The max occurrence of this attribute. private String maxOccurs; // The min occurrence of this attribute. private String minOccurs; // The description for the attribute. private String description; // The back end id, Read-Only. private Long id; // If this attribute is defined on this template, Read-Only. private boolean derived; // Create Date. private Date createDate; // Last Modified Time. private Date lastModified; /** * Basic constructor * */ public AttributeBean() { } /** * Help constructor to minimize code lines. * @param alias * @param type * @param refType * @param complex */ public AttributeBean(String alias, String type, String refType, boolean complex) { setAlias(alias); setType(type); setRefType(refType); setComplexType(complex); } public AttributeBean(String displayName, String alias, String type, String refType, boolean complex) { setDisplayName(displayName); setAlias(alias); setType(type); setRefType(refType); setComplexType(complex); } public void setDescription(String d) { this.description = d; } public String getDescription() { return (this.description); } public String getAlias() { return alias; } public void setAlias(String alias) { this.alias = alias; } public String getMaxOccurs() { return maxOccurs; } public void setMaxOccurs(String maxOccurs) { this.maxOccurs = maxOccurs; } public String getMinOccurs() { return minOccurs; } public void setMinOccurs(String minOccurs) { this.minOccurs = minOccurs; } public String getDisplayName() { return displayName; } public void setDisplayName(String name) { this.displayName = name; } public String getRefType() { return refType; } public void setRefType(String refType) { this.refType = refType; } public String getType() { return type; } public void setType(String type) { this.type = type; } public Date getCreateDate() { return createDate; } public void setCreateDate(Date createDate) { this.createDate = createDate; } public Date getLastModified() { return lastModified; } public void setLastModified(Date lastModified) { this.lastModified = lastModified; } public String toString() { return (alias + "<" + type + ">[" + minOccurs + ".." + maxOccurs + " <<" + refType + ">>]"); } public int fetchMaxOccursAsInt() { if (maxOccurs == null) { return (1); } if (maxOccurs.equals("unbound")) { return (-1); } int value = Integer.parseInt(maxOccurs); return (value); } public int fetchMinOccursAsInt() { if (minOccurs == null) { return (1); } int value = Integer.parseInt(minOccurs); return (value); } public boolean isComplexType() { // TODO Auto-generated method stub return (this.complexType); } public void setComplexType(boolean value) { this.complexType = value; } public String toXML(int level) { return(toXML(level, false)); } public String toXML(int level, boolean compact) { StringBuffer buf = new StringBuffer(); // Compact mode don't show derived attributes. if (compact && this.isDerived()) { return(buf.toString()); } buf.append("\n"); // Attributes buf.append(XmlParser.getTab(level) + "<" + XmlParser.ATTRIBUTE_ELEMENT.getName()); buf.append(" " + XmlParser.ATT_ALIAS_ATTR.getName() + "=\"" + this.getAlias() + "\""); if (this.getDisplayName() != null) { buf.append(" " + XmlParser.NAME_ATTR.getName() + "=\"" + this.getDisplayName() + "\""); } buf.append(" " + XmlParser.ATTR_DERIVED.getName() + "=\"" + this.isDerived() + "\""); if (this.getId() != null) { buf.append(" " + XmlParser.ID_ATTR.getName() + "=\"" + this.getId() + "\""); } if (this.getCreateDate() != null) { buf.append(" " + XmlParser.CREATE_DATE_ATTR.getName() + "=\"" + CiBean.toXmlDateTime(this.getCreateDate()) + "\""); } if (this.getLastModified() != null) { buf.append(" " + XmlParser.LAST_MODIFIED_ATTR.getName() + "=\"" + CiBean.toXmlDateTime(this.getLastModified()) + "\""); } buf.append(">"); buf.append("\n"); if (this.description != null) { buf.append(XmlParser.getTab(level + 1) + "<" + XmlParser.DESCRIPTION_ELEMENT.getName() + ">"); buf.append(CiBean.toXmlString(this.description)); buf.append("</" + XmlParser.DESCRIPTION_ELEMENT.getName() + ">"); buf.append("\n"); } // Type if (this.isComplexType()) { buf.append(XmlParser.getTab(level + 1) + "<" + XmlParser.COMPLEX_TYPE_ELEMENT.getName() + ">"); buf.append("\n"); buf.append(XmlParser.getTab(level + 2) + "<" + XmlParser.REF_ELEMENT.getName() + " "); buf.append(XmlParser.ALIAS_ATTR.getName() + "=\"" + this.getType() + "\"/>"); buf.append("\n"); if (this.getRefType() != null) { buf.append(XmlParser.getTab(level + 2) + "<" + XmlParser.REF_TYPE_ELEMENT.getName() + ">"); buf.append("<" + XmlParser.REF_ELEMENT.getName() + " "); buf.append(XmlParser.ALIAS_ATTR.getName() + "=\"" + this.getRefType() + "\"/>"); buf.append("</" + XmlParser.REF_TYPE_ELEMENT.getName() + ">"); buf.append("\n"); } buf.append(XmlParser.getTab(level + 1) + "</" + XmlParser.COMPLEX_TYPE_ELEMENT.getName() + ">"); buf.append("\n"); } else { buf.append(XmlParser.getTab(level + 1) + "<" + XmlParser.SIMPLE_TYPE_ELEMENT.getName() + ">" + this.getType() + "</" + XmlParser.SIMPLE_TYPE_ELEMENT.getName() + ">"); buf.append("\n"); } // Policy if (this.getMaxOccurs() != null || this.getMinOccurs() != null) { buf.append(XmlParser.getTab(level + 1) + "<" + XmlParser.POLICY_ELEMENT.getName() + ">"); buf.append("\n"); if (this.getMaxOccurs() != null) { buf.append(XmlParser.getTab(level + 2) + "<" + XmlParser.MAX_OCCURS_ELEMENT.getName() + ">" + this.getMaxOccurs() + "</" + XmlParser.MAX_OCCURS_ELEMENT.getName() + ">"); buf.append("\n"); } if (this.getMinOccurs() != null) { buf.append(XmlParser.getTab(level + 2) + "<" + XmlParser.MIN_OCCURS_ELEMENT.getName() + ">" + this.getMinOccurs() + "</" + XmlParser.MIN_OCCURS_ELEMENT.getName() + ">"); buf.append("\n"); } buf.append(XmlParser.getTab(level + 1) + "</" + XmlParser.POLICY_ELEMENT.getName() + ">"); buf.append("\n"); } // End Attributes buf.append(XmlParser.getTab(level)); buf.append("</" + XmlParser.ATTRIBUTE_ELEMENT.getName() + ">"); return (buf.toString()); } public void setId(Long id) { this.id = id; } public Long getId() { return(this.id); } public void setIdAsString(String id) { if (id == null) { return; } this.id = Long.parseLong(id); } public String getIdAsString() { if (this.id == null) { return(null); } return(this.id.toString()); } public void setDerived(boolean b) { this.derived = b; } public boolean isDerived() { return(this.derived); } public AttributeBean copy() { AttributeBean copy = new AttributeBean(); copy.setAlias(this.getAlias()); copy.setId(getId()); copy.setDerived(this.isDerived()); copy.setType(this.getType()); copy.setRefType(this.getRefType()); copy.setMaxOccurs(this.getMaxOccurs()); copy.setMinOccurs(this.getMinOccurs()); copy.setComplexType(this.isComplexType()); copy.setDescription(this.getDescription()); copy.setDisplayName(this.getDisplayName()); return(copy); } }
// Clone returns a new AllocationSet with a deep copy of the given // AllocationSet's allocations. func (as *AllocationSet) Clone() *AllocationSet { if as == nil { return nil } as.RLock() defer as.RUnlock() allocs := map[string]*Allocation{} for k, v := range as.allocations { allocs[k] = v.Clone() } externalKeys := map[string]bool{} for k, v := range as.externalKeys { externalKeys[k] = v } idleKeys := map[string]bool{} for k, v := range as.idleKeys { idleKeys[k] = v } return &AllocationSet{ allocations: allocs, externalKeys: externalKeys, idleKeys: idleKeys, Window: as.Window.Clone(), } }
Desperado, you’ve been out ridin’ fences for far too long so check out the 5 easiest fences to install yourself. I have a friend who sings that song so beautifully; he’s a little 28-year-old cherub/full grown man. It sounds something like this, in case you are not familiar with the song. When I was repeatedly listening to it to sooth my mind–I began to wonder, what the hell is “riding fences?” Are they his fences? What’s desperado’s game here? Then through some thorough internet cowboy research I learned what I will now impart: To ‘ride fences’ is to ride one’s horse along a far-reaching stretch of fencing that encloses a cattle ranch–usually in the American West, or cowboy country with the purpose of fence upkeep, namely, evaluating its integrity and maintaining it. Well, they’re ya go. However, if you choose, like I do, to imagine desperado atop a vast length of fence, his horse off in the distance, waving his hat in the air as the sunsets behind him–who can really blame you. It’s like the old saying: riding fences is in the eye of the beholder. Good neighbors make good fence riders, or something. Still, neighbors aside, fences are important. They are the moat around your castle. Anyone who’s seen Game of Thrones or the animated version of Disney’s The Sword in the Stone can tell you, moats are kind of a big deal. If you want to secure your home with a fence, or land moat, here you can see 5 easiest fences to install yourself. Installing fence in your home will give your house a modern look and it will help you keep your life more private and secure. The white picket American dream sure to keep your neighbors’ four-wheelers off your annuals and give you something to lean on… as you scream at them to stop riding four wheelers around the cul-de-sac. Fences will keep your home safer from burglars. Thieves will think of it as an obstacle and in many cases, they avoid houses with fences. Even though this may be the main reason to install a fence in your yard, you should also think about your privacy. Also, if you have children, the most important thing is their safety, with installing a fence you will keep them inside the yard where they will be more secure. This way you can prevent a potential accident. Of course, there are a lot of people who feel secure without their fences, but they want to upgrade the house aesthetic with a beautiful fence. Not all of these 5 easiest fences to install yourself will provide you the security and privacy you need, but they will help you choose which one you can DIY. Also, if you are interested in this kind of DIY projects you can see these 17 Easiest and Quickest DIY Projects for Your Home, as well. There are a lot of different types of fences, like wood, iron, bamboo, chain link and also electric fences. A lot of countries have some kind of ‘traditional’ style when it comes to yard fences. For example, we all know that in America wood fences are the most popular. Seeing a lot of similar houses with the same fence and front yard makes the neighborhood look beautiful and neat. With this you can see that the people living in this area are taking care of their private lives and they tend to make living in this neighborhood more secure. Calling a professional carpenter can be expensive. That is why we wanted to give you a list of 5 easiest fences to install yourself. In order to build some of the fences, you will need some tools. The necessary tools will vary from a fence to fence, but for most of them, you will need a shovel, digger, hammer, pliers, measuring tape, screws, nails, and brush and a circular saw. If you want maximum security and are building an iron fence, you will need some more tools like a drill, welding machine, and grinder. In order to finish the job successfully, you need to have a previous experience with these tools. Even though these fences are the easiest to build, this doesn’t mean you don’t need previous experience using the tools! In order to create the list, we collected and sorted information about the fences that require the least time, minimum tools for their installing and offer at least some decent security. We used HomeAdvisor as our starting point and primary source. Now, without further ado we present you 5 easiest fences to install yourself:
/** * On app destroy stop media player * Un-register all receiver * */ @Override protected void onDestroy() { ring.stop(); unregisterReceiver(); super.onDestroy(); }
An In Vitro Organ Culturing System for Intervertebral Disc Explants With Vertebral Endplates: A Feasibility Study With Ovine Caudal Discs Study Design. Whole ovine caudal intervertebral discs with vertebral endplates were cultured under uniaxial diurnal loading for 7 days. Objectives. To establish and characterize an organ culture system for intervertebral discs, in which disc cells may be “maintained” in their native three-dimensional environment under load. Summary of Background Data. In vitro culturing of entire discs with preserved biologic and structural integrity would be a useful model to study the effects of nutrition and mechanical loading. Methods. To maintain endplate permeability, sheep were systemically anticoagulated before death and their caudal vasculature was evacuated with saline postmortem. The first 4 caudal discs were explanted with their adjacent endplates and cultured in bioreactors under uniaxial diurnal loading (0.2 MPa for 8 hours and 0.8 MPa for 16 hours) for 4 or 7 days. Solute transport into the center of the disc was measured after 4 days of culture using a low molecular weight fluorescent marker. Cell viability, glycosaminoglycan synthesis rate, and gene expression profile were measured after 7 days of culture and compared with fresh tissue. Results. Fluorescent images showed that solutes could diffuse into the disc under both static and diurnal loading, but penetration through the endplate increased with diurnal loading. Cell viability and glycosaminoglycan synthesis rates remained unchanged after 7 days of culture. Expression of catabolic genes was significantly up-regulated, whereas anabolic genes tended to be down-regulated after 7 days. Conclusions. With this novel preparation and culturing technique, endplate permeability could be maintained, which allowed culturing of intact disc explants with endplates for up to 7 days.
/* * Return the number of bytes that can be * read from the current position * */ size_t rbuf_length(struct rbuf *b) { unsigned int n; struct region *reg; size_t offset; size_t len; size_t reg_offset; offset = b->read_offset; len = 0; do { n = offset / CHUNK_SIZE; if(n >= b->n_regions) { break; } else if((reg = b->regions[n]) == NULL) { break; } reg_offset = offset % CHUNK_SIZE; if(reg_offset > reg->len) { break; } len += reg->len - reg_offset; offset += reg->len - reg_offset; } while(reg->len == CHUNK_SIZE); return len; }
// Callback from the C++ side that calls the styles to actually build objects void buildForStyle(long styleID,VectorObject[] vecObjs,VectorTileData tileData) { if (viewC.get() == null) return; VectorStyle style = styleDelegate.styleForUUID(styleID,viewC.get()); style.buildObjects(vecObjs,tileData,viewC.get()); }
/** * Replaces the given member {@code target} in the list with {@code editedMember}. * {@code ClubBook}'s tag list will be updated with the tags of {@code editedMember}. * * @throws DuplicateMatricNumberException if updating the member's details causes the member's matriculation number * to be equivalent to that of another existing member in the list. * @throws MemberNotFoundException if {@code target} could not be found in the list. * @see #syncWithMasterTagList(Member) */ public void updateMember(Member target, Member editedMember) throws DuplicateMatricNumberException, MemberNotFoundException { requireNonNull(editedMember); deleteTagsUniqueToMember(target); Member syncedEditedMember = syncWithMasterTagList(editedMember); try { members.setMember(target, syncedEditedMember); } catch (DuplicateMatricNumberException dme) { addMemberTags(target); throw dme; } }
// import { PropertyType } from "."; // export declare class FeatureType { // mode?: string; // notification?: string; // icon?: string; // properties?: { // [key: string]: string | PropertyType; // }; // }
def init_logging(log_base=None, file_level=logging.DEBUG, console_level=logging.NOTSET): formatter = _Rfc3339Formatter( "%(asctime)s [%(levelname)s] (%(threadName)s) %(message)s") handlers = [] if log_base: for lvl in sorted(logging._levelToName): if lvl < file_level: continue log_file = "%s.%s" % (log_base, logging._levelToName[lvl]) h = _RotatingErrorHandler( log_file, maxBytes=LOG_MAX_SIZE, backupCount=LOG_BACKUP_COUNT, encoding="utf-8") h.setLevel(lvl) handlers.append(h) if console_level: h = _ConsoleErrorHandler() h.setLevel(console_level) handlers.append(h) for h in handlers: h.setFormatter(formatter) logging.basicConfig(level=logging.DEBUG, handlers=handlers)
package com.benbarron.rx.lang; /** * Represents a single unit value. */ public enum Unit { Default; }
var _FileReader = FileReader; export {_FileReader as FileReader}; export class Uint8ArrayWrapper { static create(buffer: ArrayBuffer) { return new Uint8Array(buffer); } }
import {Component, Injectable, trigger, state, transition, animate, style, ElementRef} from '@angular/core'; import {AirlockService} from "../../services/airlock.service"; import {Season} from "../../model/season"; import {ViewChild} from "@angular/core"; import {TransparentSpinner} from "../../theme/airlock.components/transparentSpinner/transparentSpinner.service"; import {GlobalState} from "../../global.state"; import {VerifyActionModal} from "../../theme/airlock.components/verifyActionModal/verifyActionModal.component"; import {FeatureUtilsService} from "../../services/featureUtils.service"; import {StringsService} from "../../services/strings.service"; import {Product} from "../../model/product"; import {User} from "../../model/user"; import {Role} from "../../model/role"; import {AddUserModal} from "../../theme/airlock.components/addUserModal"; import {Modal} from "angular2-modal/plugins/bootstrap/modal"; import {Webhook} from "../../model/webhook"; import {AddNotificationModal} from "../../theme/airlock.components/addNotificationModal/addNotificationModal.component"; import {AddWebhookModal} from "../../theme/airlock.components/addWebhookModal"; @Component({ selector: 'webhooks', providers: [TransparentSpinner,FeatureUtilsService], styles: [require('./webhooks.scss')], template: require('./webhooks.html'), animations: [ trigger('slideInOut', [ state('in', style({ transform: 'translate3d(0, 0, 0)' })), state('out', style({ transform: 'translate3d(100%, 0, 0)' })), transition('in => out', animate('400ms ease-in-out')), transition('out => in', animate('400ms ease-in-out')) ]), ] }) export class WebhooksPage { @ViewChild('verifyActionModal') verifyActionModal: VerifyActionModal; @ViewChild('addWebhookModal') addWebhookModal: AddWebhookModal; valid: boolean = true; webhooks: Webhook[]; filteredItems: Array<string> = new Array<string>(); selectedId = null; selectedIndex = -1; public sortBy; data; public sortOrder = "asc"; scrolledToSelected = false; filterlistDict: {string: Array<string>} = {string:[]}; editDialogOpen: boolean = false; loading: boolean = false; showDialog = false; filteredUsers: Array<User> = new Array<User>(); searchQueryString: string = null; allProductsProd: Product; public status: {isopen: boolean} = {isopen: false}; constructor(private _airLockService:AirlockService, private _appState: GlobalState, private _stringsSrevice: StringsService, public modal: Modal) { this.allProductsProd = new Product(); this.allProductsProd.name = "Default for new products"; this.allProductsProd.uniqueId = null; } setEditDialog(isOpen: boolean) { this.editDialogOpen = isOpen; } addWebhook() { let prods = this._appState.getData('products'); this.addWebhookModal.open(prods); } isShowOptions(){ return (!this._airLockService.isViewer()); } isViewer():boolean { return this._airLockService.isViewer(); } toggleDataCollectionDetails(){ this.showDialog = !this.showDialog; } initData() { this.loading = true; this._airLockService.getWebhooks().then((webhooks) => { this.webhooks = webhooks; console.log(webhooks) this.loading = false; }).catch(error => { this.loading = false; this._airLockService.notifyDataChanged("error-notification",`Failed to load webhooks: ${error}`); }); } ngOnInit() { this.initData(); // this.initProductList(); } ngOnDestroy() { } getProductFromID(prodId: string, products: Product[]): Product { for (let prod of products || []) { if (prod.uniqueId === prodId) { return prod; } } return null; } isCellOpen(expID:string): boolean { return false; // var index = this.openExperiments.indexOf(expID, 0); // return index > -1; } setShowConfig(show:boolean) { // this.showConfig = show; // if (show) { // this.filterlistDict["type"] = []; // } else { // this.filterlistDict["type"] = ["CONFIG_MUTUAL_EXCLUSION_GROUP", "CONFIGURATION_RULE"]; // } } webhookIsInFilter(event) {} webhookChangedStatus(event) {} public refreshTable() { // this.selectProduct(this.selectedProduct); this.initData(); } public beforeUpdate() { this.loading = true; } public afterUpdate() { this.loading = false; } public onSearchQueryChanged(term:string) { this.loading = true; setTimeout(() => { this.filteredItems = []; this.searchQueryString = term; this.createFilteredList(); this.loading = false; }, 100); } getString(name: string) { return this._stringsSrevice.getString(name); } createFilteredList() { this.filteredItems = []; this.selectedId = null; this.scrolledToSelected = false; this.selectedIndex = -1; let term = this.searchQueryString; let items = []; if (term && term.length > 0 && this.webhooks) { for (var webhook of this.webhooks) { if (this.isPartOfSearch(term, webhook)) { items.push(webhook.uniqueId); } } this.filteredItems = items; } } getNumItems() { if (this.filteredItems && this.searchQueryString && this.searchQueryString.length > 0) { return this.filteredItems.length; } return 0; } _stringIncludes(str:string, term:string) { if (!str) { return false; } if (!term) { return true; } return (str.toLowerCase().indexOf(term.toLowerCase()) > -1); } isPartOfSearch(term:string, webhook:Webhook):boolean { if (!term || term=="") { return true; } let lowerTerm = term.toLowerCase(); let displayName = webhook.name ? webhook.name : ""; displayName = displayName ? displayName.toLowerCase() : ""; let fullName = webhook.name; fullName = fullName ? fullName.toLowerCase() : ""; return displayName.includes(lowerTerm) || fullName.includes((lowerTerm)); } showNextSearchResult(forward:boolean) { if (this.filteredItems.length > 0) { if (forward) { if (this.selectedIndex >= (this.filteredItems.length-1)) { this.selectedIndex = 0; } else { this.selectedIndex++; } } else { if (this.selectedIndex == 0) { this.selectedIndex = this.filteredItems.length-1; } else { this.selectedIndex--; } } this.selectedId = this.filteredItems[this.selectedIndex]; this.scrolledToSelected = false; } } itemIsSelected(itemObj:any) { if (itemObj.id && itemObj.id == this.selectedId && !this.scrolledToSelected) { let y = itemObj.offset; this.checkIfInView(y); this.scrolledToSelected = true; } } checkIfInView(top: number){ let windowScroll = jQuery(window).scrollTop(); if (top > 0) { var offset = top - windowScroll; if(offset > window.innerHeight || offset < 0){ // Not in view so scroll to it // jQuery('html,body').animate({scrollTop: offset-300}, 500); var scrollNode = document.scrollingElement ? document.scrollingElement : document.body; scrollNode.scrollTop = top-200; return false; } } return true; } }
"Against the backdrop of a difficult economic situation, people are becoming more prudent," Dmitry Abzalov of the Center for Strategic Communications said at the news conference. "It is important for most people to preserve their way of life, their lifestyle, their traditions. So they tend toward conservatism. This is normal." This, Abzalov added, represented "a global trend." The Kremlin apparently believes it has found the ultimate wedge issue to unite its supporters and divide its opponents, both in Russia and the West, and garner support in the developing world. They seem to believe they have found the ideology that will return Russia to its rightful place as a great power with a messianic mission and the ability to win hearts and minds globally. As the West becomes increasingly multicultural, less patriarchal and traditional, and more open to gay rights, Russia will be a lodestone for the multitudes who oppose this trajectory. Just as the Communist International, or Comintern, and what Soviet ideologists called the "correlation of forces" sought to unite progressive elements around the globe behind Moscow, the world's traditionalists will now line up behind Putin. And there is some evidence that this message may be resonating. "While his stance as a defender of traditional values has drawn the mockery of Western media and cultural elites, Putin is not wrong in saying that he can speak for much of mankind," conservative American commentator Patrick Buchanan wrote. "Putin may be seeing the future with more clarity than Americans still caught in a Cold War paradigm." The 21st century, Buchanan adds, may be marked by a struggle pitting "conservatives and traditionalists in every country arrayed against the militant secularism of a multicultural and transnational elite." Others on the American right, like Rod Dreher, a senior editor of the "American Conservative," also wrote favorably—albeit in a more nuanced manner—of Putin's speech. "Putin may be a cold-eyed cynic, but he’s also onto something," he wrote. And the Kremlin, according to political analyst Aleksandr Morozov, has been spending considerable resources laying the groundwork to Putin's transformation into a global conservative icon. They have used forums like the Dialogue of Civilizations and the Valdai Discussion Group to influence elite opinion, Morozov writes. They have co-opted Western pundits on the RT (formerly Russia Today) English-language television station. And they have subsidized the research of Western academics at Russian universities. "It is a mistake to believe that Putin wants to lower a new Iron Curtain, build a new Berlin Wall and pursue a policy of isolationism," Morozov wrote in Colta.ru. "On the contrary, Putin is creating a new Comintern. This is not isolationism, but rather the maximum Putinization of the world. The Comintern was a complex system that worked with ideologically sympathetic intellectuals and politicians. What we are seeing now is not an attempt to restore the past, but the creation of an entirely new hegemony."