text
stringlengths 2
100k
| meta
dict |
---|---|
//
// Licensed under the terms in License.txt
//
// Copyright 2010 Allen Ding. All rights reserved.
//
#import "Kiwi.h"
#import "KiwiTestConfiguration.h"
#import "TestClasses.h"
#if KW_TESTS_ENABLED
@interface KWTestCaseTest : SenTestCase
@end
@implementation KWTestCaseTest
- (void)testItShouldClearStubsAfterExamplesRun {
KWTestCase *testCase = [[[KWTestCase alloc] init] autorelease];
id subject = [Cruiser cruiser];
NSUInteger crewComplement = [subject crewComplement];
[subject stub:@selector(crewComplement) andReturn:[KWValue valueWithUnsignedInt:42]];
STAssertEquals([subject crewComplement], 42u, @"expected method to be stubbed");
[testCase tearDownExampleEnvironment];
STAssertEquals([subject crewComplement], crewComplement, @"expected method stub to be cleared after examples run");
}
- (void)testItShouldNotifyVerifiersOfEndOfExample {
KWTestCase *testCase = [[[KWTestCase alloc] init] autorelease];
id verifier = [[[TestVerifier alloc] init] autorelease];
[testCase addVerifier:verifier];
[testCase invokeTest];
STAssertTrue([verifier notifiedOfEndOfExample], @"expected spec to notify end of example verifiers");
}
@end
#endif // #if KW_TESTS_ENABLED
| {
"pile_set_name": "Github"
} |
package terraform
import (
"fmt"
"log"
"strings"
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/hashicorp/terraform-plugin-sdk/internal/configs"
"github.com/hashicorp/terraform-plugin-sdk/internal/plans"
"github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange"
"github.com/hashicorp/terraform-plugin-sdk/internal/providers"
"github.com/hashicorp/terraform-plugin-sdk/internal/states"
"github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalCheckPlannedChange is an EvalNode implementation that produces errors
// if the _actual_ expected value is not compatible with what was recorded
// in the plan.
//
// Errors here are most often indicative of a bug in the provider, so our
// error messages will report with that in mind. It's also possible that
// there's a bug in Terraform's Core's own "proposed new value" code in
// EvalDiff.
type EvalCheckPlannedChange struct {
Addr addrs.ResourceInstance
ProviderAddr addrs.AbsProviderConfig
ProviderSchema **ProviderSchema
// We take ResourceInstanceChange objects here just because that's what's
// convenient to pass in from the evaltree implementation, but we really
// only look at the "After" value of each change.
Planned, Actual **plans.ResourceInstanceChange
}
func (n *EvalCheckPlannedChange) Eval(ctx EvalContext) (interface{}, error) {
providerSchema := *n.ProviderSchema
plannedChange := *n.Planned
actualChange := *n.Actual
schema, _ := providerSchema.SchemaForResourceAddr(n.Addr.ContainingResource())
if schema == nil {
// Should be caught during validation, so we don't bother with a pretty error here
return nil, fmt.Errorf("provider does not support %q", n.Addr.Resource.Type)
}
var diags tfdiags.Diagnostics
absAddr := n.Addr.Absolute(ctx.Path())
log.Printf("[TRACE] EvalCheckPlannedChange: Verifying that actual change (action %s) matches planned change (action %s)", actualChange.Action, plannedChange.Action)
if plannedChange.Action != actualChange.Action {
switch {
case plannedChange.Action == plans.Update && actualChange.Action == plans.NoOp:
// It's okay for an update to become a NoOp once we've filled in
// all of the unknown values, since the final values might actually
// match what was there before after all.
log.Printf("[DEBUG] After incorporating new values learned so far during apply, %s change has become NoOp", absAddr)
default:
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Provider produced inconsistent final plan",
fmt.Sprintf(
"When expanding the plan for %s to include new values learned so far during apply, provider %q changed the planned action from %s to %s.\n\nThis is a bug in the provider, which should be reported in the provider's own issue tracker.",
absAddr, n.ProviderAddr.ProviderConfig.Type,
plannedChange.Action, actualChange.Action,
),
))
}
}
errs := objchange.AssertObjectCompatible(schema, plannedChange.After, actualChange.After)
for _, err := range errs {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Provider produced inconsistent final plan",
fmt.Sprintf(
"When expanding the plan for %s to include new values learned so far during apply, provider %q produced an invalid new value for %s.\n\nThis is a bug in the provider, which should be reported in the provider's own issue tracker.",
absAddr, n.ProviderAddr.ProviderConfig.Type, tfdiags.FormatError(err),
),
))
}
return nil, diags.Err()
}
// EvalDiff is an EvalNode implementation that detects changes for a given
// resource instance.
type EvalDiff struct {
Addr addrs.ResourceInstance
Config *configs.Resource
Provider *providers.Interface
ProviderAddr addrs.AbsProviderConfig
ProviderSchema **ProviderSchema
State **states.ResourceInstanceObject
PreviousDiff **plans.ResourceInstanceChange
// CreateBeforeDestroy is set if either the resource's own config sets
// create_before_destroy explicitly or if dependencies have forced the
// resource to be handled as create_before_destroy in order to avoid
// a dependency cycle.
CreateBeforeDestroy bool
OutputChange **plans.ResourceInstanceChange
OutputValue *cty.Value
OutputState **states.ResourceInstanceObject
Stub bool
}
// TODO: test
func (n *EvalDiff) Eval(ctx EvalContext) (interface{}, error) {
state := *n.State
config := *n.Config
provider := *n.Provider
providerSchema := *n.ProviderSchema
if providerSchema == nil {
return nil, fmt.Errorf("provider schema is unavailable for %s", n.Addr)
}
if n.ProviderAddr.ProviderConfig.Type == "" {
panic(fmt.Sprintf("EvalDiff for %s does not have ProviderAddr set", n.Addr.Absolute(ctx.Path())))
}
var diags tfdiags.Diagnostics
// Evaluate the configuration
schema, _ := providerSchema.SchemaForResourceAddr(n.Addr.ContainingResource())
if schema == nil {
// Should be caught during validation, so we don't bother with a pretty error here
return nil, fmt.Errorf("provider does not support resource type %q", n.Addr.Resource.Type)
}
forEach, _ := evaluateResourceForEachExpression(n.Config.ForEach, ctx)
keyData := EvalDataForInstanceKey(n.Addr.Key, forEach)
configVal, _, configDiags := ctx.EvaluateBlock(config.Config, schema, nil, keyData)
diags = diags.Append(configDiags)
if configDiags.HasErrors() {
return nil, diags.Err()
}
absAddr := n.Addr.Absolute(ctx.Path())
var priorVal cty.Value
var priorValTainted cty.Value
var priorPrivate []byte
if state != nil {
if state.Status != states.ObjectTainted {
priorVal = state.Value
priorPrivate = state.Private
} else {
// If the prior state is tainted then we'll proceed below like
// we're creating an entirely new object, but then turn it into
// a synthetic "Replace" change at the end, creating the same
// result as if the provider had marked at least one argument
// change as "requires replacement".
priorValTainted = state.Value
priorVal = cty.NullVal(schema.ImpliedType())
}
} else {
priorVal = cty.NullVal(schema.ImpliedType())
}
proposedNewVal := objchange.ProposedNewObject(schema, priorVal, configVal)
// Call pre-diff hook
if !n.Stub {
err := ctx.Hook(func(h Hook) (HookAction, error) {
return h.PreDiff(absAddr, states.CurrentGen, priorVal, proposedNewVal)
})
if err != nil {
return nil, err
}
}
log.Printf("[TRACE] Re-validating config for %q", n.Addr.Absolute(ctx.Path()))
// Allow the provider to validate the final set of values.
// The config was statically validated early on, but there may have been
// unknown values which the provider could not validate at the time.
validateResp := provider.ValidateResourceTypeConfig(
providers.ValidateResourceTypeConfigRequest{
TypeName: n.Addr.Resource.Type,
Config: configVal,
},
)
if validateResp.Diagnostics.HasErrors() {
return nil, validateResp.Diagnostics.InConfigBody(config.Config).Err()
}
// The provider gets an opportunity to customize the proposed new value,
// which in turn produces the _planned_ new value. But before
// we send back this information, we need to process ignore_changes
// so that CustomizeDiff will not act on them
var ignoreChangeDiags tfdiags.Diagnostics
proposedNewVal, ignoreChangeDiags = n.processIgnoreChanges(priorVal, proposedNewVal)
diags = diags.Append(ignoreChangeDiags)
if ignoreChangeDiags.HasErrors() {
return nil, diags.Err()
}
resp := provider.PlanResourceChange(providers.PlanResourceChangeRequest{
TypeName: n.Addr.Resource.Type,
Config: configVal,
PriorState: priorVal,
ProposedNewState: proposedNewVal,
PriorPrivate: priorPrivate,
})
diags = diags.Append(resp.Diagnostics.InConfigBody(config.Config))
if diags.HasErrors() {
return nil, diags.Err()
}
plannedNewVal := resp.PlannedState
plannedPrivate := resp.PlannedPrivate
if plannedNewVal == cty.NilVal {
// Should never happen. Since real-world providers return via RPC a nil
// is always a bug in the client-side stub. This is more likely caused
// by an incompletely-configured mock provider in tests, though.
panic(fmt.Sprintf("PlanResourceChange of %s produced nil value", absAddr.String()))
}
// We allow the planned new value to disagree with configuration _values_
// here, since that allows the provider to do special logic like a
// DiffSuppressFunc, but we still require that the provider produces
// a value whose type conforms to the schema.
for _, err := range plannedNewVal.Type().TestConformance(schema.ImpliedType()) {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Provider produced invalid plan",
fmt.Sprintf(
"Provider %q planned an invalid value for %s.\n\nThis is a bug in the provider, which should be reported in the provider's own issue tracker.",
n.ProviderAddr.ProviderConfig.Type, tfdiags.FormatErrorPrefixed(err, absAddr.String()),
),
))
}
if diags.HasErrors() {
return nil, diags.Err()
}
if errs := objchange.AssertPlanValid(schema, priorVal, configVal, plannedNewVal); len(errs) > 0 {
if resp.LegacyTypeSystem {
// The shimming of the old type system in the legacy SDK is not precise
// enough to pass this consistency check, so we'll give it a pass here,
// but we will generate a warning about it so that we are more likely
// to notice in the logs if an inconsistency beyond the type system
// leads to a downstream provider failure.
var buf strings.Builder
fmt.Fprintf(&buf, "[WARN] Provider %q produced an invalid plan for %s, but we are tolerating it because it is using the legacy plugin SDK.\n The following problems may be the cause of any confusing errors from downstream operations:", n.ProviderAddr.ProviderConfig.Type, absAddr)
for _, err := range errs {
fmt.Fprintf(&buf, "\n - %s", tfdiags.FormatError(err))
}
log.Print(buf.String())
} else {
for _, err := range errs {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Provider produced invalid plan",
fmt.Sprintf(
"Provider %q planned an invalid value for %s.\n\nThis is a bug in the provider, which should be reported in the provider's own issue tracker.",
n.ProviderAddr.ProviderConfig.Type, tfdiags.FormatErrorPrefixed(err, absAddr.String()),
),
))
}
return nil, diags.Err()
}
}
// TODO: We should be able to remove this repeat of processing ignored changes
// after the plan, which helps providers relying on old behavior "just work"
// in the next major version, such that we can be stricter about ignore_changes
// values
plannedNewVal, ignoreChangeDiags = n.processIgnoreChanges(priorVal, plannedNewVal)
diags = diags.Append(ignoreChangeDiags)
if ignoreChangeDiags.HasErrors() {
return nil, diags.Err()
}
// The provider produces a list of paths to attributes whose changes mean
// that we must replace rather than update an existing remote object.
// However, we only need to do that if the identified attributes _have_
// actually changed -- particularly after we may have undone some of the
// changes in processIgnoreChanges -- so now we'll filter that list to
// include only where changes are detected.
reqRep := cty.NewPathSet()
if len(resp.RequiresReplace) > 0 {
for _, path := range resp.RequiresReplace {
if priorVal.IsNull() {
// If prior is null then we don't expect any RequiresReplace at all,
// because this is a Create action.
continue
}
priorChangedVal, priorPathDiags := hcl.ApplyPath(priorVal, path, nil)
plannedChangedVal, plannedPathDiags := hcl.ApplyPath(plannedNewVal, path, nil)
if plannedPathDiags.HasErrors() && priorPathDiags.HasErrors() {
// This means the path was invalid in both the prior and new
// values, which is an error with the provider itself.
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Provider produced invalid plan",
fmt.Sprintf(
"Provider %q has indicated \"requires replacement\" on %s for a non-existent attribute path %#v.\n\nThis is a bug in the provider, which should be reported in the provider's own issue tracker.",
n.ProviderAddr.ProviderConfig.Type, absAddr, path,
),
))
continue
}
// Make sure we have valid Values for both values.
// Note: if the opposing value was of the type
// cty.DynamicPseudoType, the type assigned here may not exactly
// match the schema. This is fine here, since we're only going to
// check for equality, but if the NullVal is to be used, we need to
// check the schema for th true type.
switch {
case priorChangedVal == cty.NilVal && plannedChangedVal == cty.NilVal:
// this should never happen without ApplyPath errors above
panic("requires replace path returned 2 nil values")
case priorChangedVal == cty.NilVal:
priorChangedVal = cty.NullVal(plannedChangedVal.Type())
case plannedChangedVal == cty.NilVal:
plannedChangedVal = cty.NullVal(priorChangedVal.Type())
}
eqV := plannedChangedVal.Equals(priorChangedVal)
if !eqV.IsKnown() || eqV.False() {
reqRep.Add(path)
}
}
if diags.HasErrors() {
return nil, diags.Err()
}
}
eqV := plannedNewVal.Equals(priorVal)
eq := eqV.IsKnown() && eqV.True()
var action plans.Action
switch {
case priorVal.IsNull():
action = plans.Create
case eq:
action = plans.NoOp
case !reqRep.Empty():
// If there are any "requires replace" paths left _after our filtering
// above_ then this is a replace action.
if n.CreateBeforeDestroy {
action = plans.CreateThenDelete
} else {
action = plans.DeleteThenCreate
}
default:
action = plans.Update
// "Delete" is never chosen here, because deletion plans are always
// created more directly elsewhere, such as in "orphan" handling.
}
if action.IsReplace() {
// In this strange situation we want to produce a change object that
// shows our real prior object but has a _new_ object that is built
// from a null prior object, since we're going to delete the one
// that has all the computed values on it.
//
// Therefore we'll ask the provider to plan again here, giving it
// a null object for the prior, and then we'll meld that with the
// _actual_ prior state to produce a correctly-shaped replace change.
// The resulting change should show any computed attributes changing
// from known prior values to unknown values, unless the provider is
// able to predict new values for any of these computed attributes.
nullPriorVal := cty.NullVal(schema.ImpliedType())
// create a new proposed value from the null state and the config
proposedNewVal = objchange.ProposedNewObject(schema, nullPriorVal, configVal)
resp = provider.PlanResourceChange(providers.PlanResourceChangeRequest{
TypeName: n.Addr.Resource.Type,
Config: configVal,
PriorState: nullPriorVal,
ProposedNewState: proposedNewVal,
PriorPrivate: plannedPrivate,
})
// We need to tread carefully here, since if there are any warnings
// in here they probably also came out of our previous call to
// PlanResourceChange above, and so we don't want to repeat them.
// Consequently, we break from the usual pattern here and only
// append these new diagnostics if there's at least one error inside.
if resp.Diagnostics.HasErrors() {
diags = diags.Append(resp.Diagnostics.InConfigBody(config.Config))
return nil, diags.Err()
}
plannedNewVal = resp.PlannedState
plannedPrivate = resp.PlannedPrivate
for _, err := range plannedNewVal.Type().TestConformance(schema.ImpliedType()) {
diags = diags.Append(tfdiags.Sourceless(
tfdiags.Error,
"Provider produced invalid plan",
fmt.Sprintf(
"Provider %q planned an invalid value for %s%s.\n\nThis is a bug in the provider, which should be reported in the provider's own issue tracker.",
n.ProviderAddr.ProviderConfig.Type, absAddr, tfdiags.FormatError(err),
),
))
}
if diags.HasErrors() {
return nil, diags.Err()
}
}
// If our prior value was tainted then we actually want this to appear
// as a replace change, even though so far we've been treating it as a
// create.
if action == plans.Create && priorValTainted != cty.NilVal {
if n.CreateBeforeDestroy {
action = plans.CreateThenDelete
} else {
action = plans.DeleteThenCreate
}
priorVal = priorValTainted
}
// As a special case, if we have a previous diff (presumably from the plan
// phases, whereas we're now in the apply phase) and it was for a replace,
// we've already deleted the original object from state by the time we
// get here and so we would've ended up with a _create_ action this time,
// which we now need to paper over to get a result consistent with what
// we originally intended.
if n.PreviousDiff != nil {
prevChange := *n.PreviousDiff
if prevChange.Action.IsReplace() && action == plans.Create {
log.Printf("[TRACE] EvalDiff: %s treating Create change as %s change to match with earlier plan", absAddr, prevChange.Action)
action = prevChange.Action
priorVal = prevChange.Before
}
}
// Call post-refresh hook
if !n.Stub {
err := ctx.Hook(func(h Hook) (HookAction, error) {
return h.PostDiff(absAddr, states.CurrentGen, action, priorVal, plannedNewVal)
})
if err != nil {
return nil, err
}
}
// Update our output if we care
if n.OutputChange != nil {
*n.OutputChange = &plans.ResourceInstanceChange{
Addr: absAddr,
Private: plannedPrivate,
ProviderAddr: n.ProviderAddr,
Change: plans.Change{
Action: action,
Before: priorVal,
After: plannedNewVal,
},
RequiredReplace: reqRep,
}
}
if n.OutputValue != nil {
*n.OutputValue = configVal
}
// Update the state if we care
if n.OutputState != nil {
*n.OutputState = &states.ResourceInstanceObject{
// We use the special "planned" status here to note that this
// object's value is not yet complete. Objects with this status
// cannot be used during expression evaluation, so the caller
// must _also_ record the returned change in the active plan,
// which the expression evaluator will use in preference to this
// incomplete value recorded in the state.
Status: states.ObjectPlanned,
Value: plannedNewVal,
Private: plannedPrivate,
}
}
return nil, nil
}
func (n *EvalDiff) processIgnoreChanges(prior, proposed cty.Value) (cty.Value, tfdiags.Diagnostics) {
// ignore_changes only applies when an object already exists, since we
// can't ignore changes to a thing we've not created yet.
if prior.IsNull() {
return proposed, nil
}
ignoreChanges := n.Config.Managed.IgnoreChanges
ignoreAll := n.Config.Managed.IgnoreAllChanges
if len(ignoreChanges) == 0 && !ignoreAll {
return proposed, nil
}
if ignoreAll {
return prior, nil
}
if prior.IsNull() || proposed.IsNull() {
// Ignore changes doesn't apply when we're creating for the first time.
// Proposed should never be null here, but if it is then we'll just let it be.
return proposed, nil
}
return processIgnoreChangesIndividual(prior, proposed, ignoreChanges)
}
func processIgnoreChangesIndividual(prior, proposed cty.Value, ignoreChanges []hcl.Traversal) (cty.Value, tfdiags.Diagnostics) {
// When we walk below we will be using cty.Path values for comparison, so
// we'll convert our traversals here so we can compare more easily.
ignoreChangesPath := make([]cty.Path, len(ignoreChanges))
for i, traversal := range ignoreChanges {
path := make(cty.Path, len(traversal))
for si, step := range traversal {
switch ts := step.(type) {
case hcl.TraverseRoot:
path[si] = cty.GetAttrStep{
Name: ts.Name,
}
case hcl.TraverseAttr:
path[si] = cty.GetAttrStep{
Name: ts.Name,
}
case hcl.TraverseIndex:
path[si] = cty.IndexStep{
Key: ts.Key,
}
default:
panic(fmt.Sprintf("unsupported traversal step %#v", step))
}
}
ignoreChangesPath[i] = path
}
var diags tfdiags.Diagnostics
ret, _ := cty.Transform(proposed, func(path cty.Path, v cty.Value) (cty.Value, error) {
// First we must see if this is a path that's being ignored at all.
// We're looking for an exact match here because this walk will visit
// leaf values first and then their containers, and we want to do
// the "ignore" transform once we reach the point indicated, throwing
// away any deeper values we already produced at that point.
var ignoreTraversal hcl.Traversal
for i, candidate := range ignoreChangesPath {
if path.Equals(candidate) {
ignoreTraversal = ignoreChanges[i]
}
}
if ignoreTraversal == nil {
return v, nil
}
// If we're able to follow the same path through the prior value,
// we'll take the value there instead, effectively undoing the
// change that was planned.
priorV, diags := hcl.ApplyPath(prior, path, nil)
if diags.HasErrors() {
// We just ignore the errors and move on here, since we assume it's
// just because the prior value was a slightly-different shape.
// It could potentially also be that the traversal doesn't match
// the schema, but we should've caught that during the validate
// walk if so.
return v, nil
}
return priorV, nil
})
return ret, diags
}
// EvalDiffDestroy is an EvalNode implementation that returns a plain
// destroy diff.
type EvalDiffDestroy struct {
Addr addrs.ResourceInstance
DeposedKey states.DeposedKey
State **states.ResourceInstanceObject
ProviderAddr addrs.AbsProviderConfig
Output **plans.ResourceInstanceChange
OutputState **states.ResourceInstanceObject
}
// TODO: test
func (n *EvalDiffDestroy) Eval(ctx EvalContext) (interface{}, error) {
absAddr := n.Addr.Absolute(ctx.Path())
state := *n.State
if n.ProviderAddr.ProviderConfig.Type == "" {
if n.DeposedKey == "" {
panic(fmt.Sprintf("EvalDiffDestroy for %s does not have ProviderAddr set", absAddr))
} else {
panic(fmt.Sprintf("EvalDiffDestroy for %s (deposed %s) does not have ProviderAddr set", absAddr, n.DeposedKey))
}
}
// If there is no state or our attributes object is null then we're already
// destroyed.
if state == nil || state.Value.IsNull() {
return nil, nil
}
// Call pre-diff hook
err := ctx.Hook(func(h Hook) (HookAction, error) {
return h.PreDiff(
absAddr, n.DeposedKey.Generation(),
state.Value,
cty.NullVal(cty.DynamicPseudoType),
)
})
if err != nil {
return nil, err
}
// Change is always the same for a destroy. We don't need the provider's
// help for this one.
// TODO: Should we give the provider an opportunity to veto this?
change := &plans.ResourceInstanceChange{
Addr: absAddr,
DeposedKey: n.DeposedKey,
Change: plans.Change{
Action: plans.Delete,
Before: state.Value,
After: cty.NullVal(cty.DynamicPseudoType),
},
Private: state.Private,
ProviderAddr: n.ProviderAddr,
}
// Call post-diff hook
err = ctx.Hook(func(h Hook) (HookAction, error) {
return h.PostDiff(
absAddr,
n.DeposedKey.Generation(),
change.Action,
change.Before,
change.After,
)
})
if err != nil {
return nil, err
}
// Update our output
*n.Output = change
if n.OutputState != nil {
// Record our proposed new state, which is nil because we're destroying.
*n.OutputState = nil
}
return nil, nil
}
// EvalReduceDiff is an EvalNode implementation that takes a planned resource
// instance change as might be produced by EvalDiff or EvalDiffDestroy and
// "simplifies" it to a single atomic action to be performed by a specific
// graph node.
//
// Callers must specify whether they are a destroy node or a regular apply
// node. If the result is NoOp then the given change requires no action for
// the specific graph node calling this and so evaluation of the that graph
// node should exit early and take no action.
//
// The object written to OutChange may either be identical to InChange or
// a new change object derived from InChange. Because of the former case, the
// caller must not mutate the object returned in OutChange.
type EvalReduceDiff struct {
Addr addrs.ResourceInstance
InChange **plans.ResourceInstanceChange
Destroy bool
OutChange **plans.ResourceInstanceChange
}
// TODO: test
func (n *EvalReduceDiff) Eval(ctx EvalContext) (interface{}, error) {
in := *n.InChange
out := in.Simplify(n.Destroy)
if n.OutChange != nil {
*n.OutChange = out
}
if out.Action != in.Action {
if n.Destroy {
log.Printf("[TRACE] EvalReduceDiff: %s change simplified from %s to %s for destroy node", n.Addr, in.Action, out.Action)
} else {
log.Printf("[TRACE] EvalReduceDiff: %s change simplified from %s to %s for apply node", n.Addr, in.Action, out.Action)
}
}
return nil, nil
}
// EvalReadDiff is an EvalNode implementation that retrieves the planned
// change for a particular resource instance object.
type EvalReadDiff struct {
Addr addrs.ResourceInstance
DeposedKey states.DeposedKey
ProviderSchema **ProviderSchema
Change **plans.ResourceInstanceChange
}
func (n *EvalReadDiff) Eval(ctx EvalContext) (interface{}, error) {
providerSchema := *n.ProviderSchema
changes := ctx.Changes()
addr := n.Addr.Absolute(ctx.Path())
schema, _ := providerSchema.SchemaForResourceAddr(n.Addr.ContainingResource())
if schema == nil {
// Should be caught during validation, so we don't bother with a pretty error here
return nil, fmt.Errorf("provider does not support resource type %q", n.Addr.Resource.Type)
}
gen := states.CurrentGen
if n.DeposedKey != states.NotDeposed {
gen = n.DeposedKey
}
csrc := changes.GetResourceInstanceChange(addr, gen)
if csrc == nil {
log.Printf("[TRACE] EvalReadDiff: No planned change recorded for %s", addr)
return nil, nil
}
change, err := csrc.Decode(schema.ImpliedType())
if err != nil {
return nil, fmt.Errorf("failed to decode planned changes for %s: %s", addr, err)
}
if n.Change != nil {
*n.Change = change
}
log.Printf("[TRACE] EvalReadDiff: Read %s change from plan for %s", change.Action, addr)
return nil, nil
}
// EvalWriteDiff is an EvalNode implementation that saves a planned change
// for an instance object into the set of global planned changes.
type EvalWriteDiff struct {
Addr addrs.ResourceInstance
DeposedKey states.DeposedKey
ProviderSchema **ProviderSchema
Change **plans.ResourceInstanceChange
}
// TODO: test
func (n *EvalWriteDiff) Eval(ctx EvalContext) (interface{}, error) {
changes := ctx.Changes()
addr := n.Addr.Absolute(ctx.Path())
if n.Change == nil || *n.Change == nil {
// Caller sets nil to indicate that we need to remove a change from
// the set of changes.
gen := states.CurrentGen
if n.DeposedKey != states.NotDeposed {
gen = n.DeposedKey
}
changes.RemoveResourceInstanceChange(addr, gen)
return nil, nil
}
providerSchema := *n.ProviderSchema
change := *n.Change
if change.Addr.String() != addr.String() || change.DeposedKey != n.DeposedKey {
// Should never happen, and indicates a bug in the caller.
panic("inconsistent address and/or deposed key in EvalWriteDiff")
}
schema, _ := providerSchema.SchemaForResourceAddr(n.Addr.ContainingResource())
if schema == nil {
// Should be caught during validation, so we don't bother with a pretty error here
return nil, fmt.Errorf("provider does not support resource type %q", n.Addr.Resource.Type)
}
csrc, err := change.Encode(schema.ImpliedType())
if err != nil {
return nil, fmt.Errorf("failed to encode planned changes for %s: %s", addr, err)
}
changes.AppendResourceInstanceChange(csrc)
if n.DeposedKey == states.NotDeposed {
log.Printf("[TRACE] EvalWriteDiff: recorded %s change for %s", change.Action, addr)
} else {
log.Printf("[TRACE] EvalWriteDiff: recorded %s change for %s deposed object %s", change.Action, addr, n.DeposedKey)
}
return nil, nil
}
| {
"pile_set_name": "Github"
} |
Title: CHMOD 计算器
Date: 2006-10-31 09:18
Author: toy
Category: Apps
Slug: chmod_calculator
有些初学 *Nix 的朋友,对于 chmod
命令后面所跟的那串数字很是令人费解。到底在那串数字背后隐藏着什么秘密呢?在更改相应权限的时候,又如何得到那些数字呢?[CHMOD
Calculator](http://www.mistupid.com/internet/chmod.htm) 这个简单的在线
chmod 计算器,将助你一臂之力。

(via [digg](http://digg.com/linux_unix/Best_CHMOD_Calculator), thanks!)
| {
"pile_set_name": "Github"
} |
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
8E335399177892A000E92480 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8E335398177892A000E92480 /* Images.xcassets */; };
8E40C5B11773866D002489E6 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8E40C5B01773866D002489E6 /* Foundation.framework */; };
8E40C5B31773866D002489E6 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8E40C5B21773866D002489E6 /* CoreGraphics.framework */; };
8E40C5B51773866D002489E6 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8E40C5B41773866D002489E6 /* UIKit.framework */; };
8E40C5BB1773866D002489E6 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 8E40C5B91773866D002489E6 /* InfoPlist.strings */; };
8E40C5BD1773866D002489E6 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 8E40C5BC1773866D002489E6 /* main.m */; };
8E40C5EB177394F8002489E6 /* CoreText.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8E40C5EA177394F8002489E6 /* CoreText.framework */; };
8E40C5ED17739502002489E6 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8E40C5EC17739502002489E6 /* QuartzCore.framework */; };
8E40C5EF1773951D002489E6 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8E40C5EE1773951D002489E6 /* Accelerate.framework */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
8E3353951778911600E92480 /* Utility.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Utility.h; path = "Hello World/Utility.h"; sourceTree = "<group>"; };
8E335398177892A000E92480 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = "<group>"; };
8E40C5AD1773866D002489E6 /* Hello World.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Hello World.app"; sourceTree = BUILT_PRODUCTS_DIR; };
8E40C5B01773866D002489E6 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
8E40C5B21773866D002489E6 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
8E40C5B41773866D002489E6 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
8E40C5B81773866D002489E6 /* Hello World-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = "Hello World-Info.plist"; sourceTree = "<group>"; };
8E40C5BA1773866D002489E6 /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/InfoPlist.strings; sourceTree = "<group>"; };
8E40C5BC1773866D002489E6 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = main.m; path = "Hello World/main.m"; sourceTree = "<group>"; };
8E40C5BE1773866D002489E6 /* Hello World-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Hello World-Prefix.pch"; sourceTree = "<group>"; };
8E40C5C91773866D002489E6 /* XCTest.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = XCTest.framework; path = Library/Frameworks/XCTest.framework; sourceTree = DEVELOPER_DIR; };
8E40C5EA177394F8002489E6 /* CoreText.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreText.framework; path = System/Library/Frameworks/CoreText.framework; sourceTree = SDKROOT; };
8E40C5EC17739502002489E6 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
8E40C5EE1773951D002489E6 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
8E40C5AA1773866D002489E6 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
8E40C5EF1773951D002489E6 /* Accelerate.framework in Frameworks */,
8E40C5ED17739502002489E6 /* QuartzCore.framework in Frameworks */,
8E40C5EB177394F8002489E6 /* CoreText.framework in Frameworks */,
8E40C5B31773866D002489E6 /* CoreGraphics.framework in Frameworks */,
8E40C5B51773866D002489E6 /* UIKit.framework in Frameworks */,
8E40C5B11773866D002489E6 /* Foundation.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
8E40C5A41773866D002489E6 = {
isa = PBXGroup;
children = (
8E40C5BC1773866D002489E6 /* main.m */,
8E3353951778911600E92480 /* Utility.h */,
8E40C5B71773866D002489E6 /* Supporting Files */,
8E40C5AF1773866D002489E6 /* Frameworks */,
8E40C5AE1773866D002489E6 /* Products */,
);
sourceTree = "<group>";
};
8E40C5AE1773866D002489E6 /* Products */ = {
isa = PBXGroup;
children = (
8E40C5AD1773866D002489E6 /* Hello World.app */,
);
name = Products;
sourceTree = "<group>";
};
8E40C5AF1773866D002489E6 /* Frameworks */ = {
isa = PBXGroup;
children = (
8E40C5EE1773951D002489E6 /* Accelerate.framework */,
8E40C5EC17739502002489E6 /* QuartzCore.framework */,
8E40C5EA177394F8002489E6 /* CoreText.framework */,
8E40C5B01773866D002489E6 /* Foundation.framework */,
8E40C5B21773866D002489E6 /* CoreGraphics.framework */,
8E40C5B41773866D002489E6 /* UIKit.framework */,
8E40C5C91773866D002489E6 /* XCTest.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
8E40C5B71773866D002489E6 /* Supporting Files */ = {
isa = PBXGroup;
children = (
8E335398177892A000E92480 /* Images.xcassets */,
8E40C5B81773866D002489E6 /* Hello World-Info.plist */,
8E40C5B91773866D002489E6 /* InfoPlist.strings */,
8E40C5BE1773866D002489E6 /* Hello World-Prefix.pch */,
);
name = "Supporting Files";
path = "Hello World";
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
8E40C5AC1773866D002489E6 /* Hello World */ = {
isa = PBXNativeTarget;
buildConfigurationList = 8E40C5D91773866D002489E6 /* Build configuration list for PBXNativeTarget "Hello World" */;
buildPhases = (
8E40C5A91773866D002489E6 /* Sources */,
8E40C5AA1773866D002489E6 /* Frameworks */,
8E40C5AB1773866D002489E6 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = "Hello World";
productName = "Hello World";
productReference = 8E40C5AD1773866D002489E6 /* Hello World.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
8E40C5A51773866D002489E6 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0500;
ORGANIZATIONNAME = "Erica Sadun";
};
buildConfigurationList = 8E40C5A81773866D002489E6 /* Build configuration list for PBXProject "Hello World" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
);
mainGroup = 8E40C5A41773866D002489E6;
productRefGroup = 8E40C5AE1773866D002489E6 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
8E40C5AC1773866D002489E6 /* Hello World */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
8E40C5AB1773866D002489E6 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
8E40C5BB1773866D002489E6 /* InfoPlist.strings in Resources */,
8E335399177892A000E92480 /* Images.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
8E40C5A91773866D002489E6 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
8E40C5BD1773866D002489E6 /* main.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
8E40C5B91773866D002489E6 /* InfoPlist.strings */ = {
isa = PBXVariantGroup;
children = (
8E40C5BA1773866D002489E6 /* en */,
);
name = InfoPlist.strings;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
8E40C5D71773866D002489E6 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_STATIC_ANALYZER_MODE = deep;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 7.0;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = (
"-Wall",
"-Wextra",
"-Wno-unused-parameter",
);
RUN_CLANG_STATIC_ANALYZER = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
8E40C5D81773866D002489E6 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_STATIC_ANALYZER_MODE = deep;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = YES;
ENABLE_NS_ASSERTIONS = NO;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 7.0;
OTHER_CFLAGS = (
"-Wall",
"-Wextra",
"-Wno-unused-parameter",
);
RUN_CLANG_STATIC_ANALYZER = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
8E40C5DA1773866D002489E6 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "Hello World/Hello World-Prefix.pch";
INFOPLIST_FILE = "Hello World/Hello World-Info.plist";
PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = app;
};
name = Debug;
};
8E40C5DB1773866D002489E6 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME = LaunchImage;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "Hello World/Hello World-Prefix.pch";
INFOPLIST_FILE = "Hello World/Hello World-Info.plist";
PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = app;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
8E40C5A81773866D002489E6 /* Build configuration list for PBXProject "Hello World" */ = {
isa = XCConfigurationList;
buildConfigurations = (
8E40C5D71773866D002489E6 /* Debug */,
8E40C5D81773866D002489E6 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
8E40C5D91773866D002489E6 /* Build configuration list for PBXNativeTarget "Hello World" */ = {
isa = XCConfigurationList;
buildConfigurations = (
8E40C5DA1773866D002489E6 /* Debug */,
8E40C5DB1773866D002489E6 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 8E40C5A51773866D002489E6 /* Project object */;
}
| {
"pile_set_name": "Github"
} |
"""
Tests for zipline.pipeline.loaders.frame.DataFrameLoader.
"""
from unittest import TestCase
from mock import patch
from numpy import arange, ones
from numpy.testing import assert_array_equal
from pandas import (
DataFrame,
DatetimeIndex,
Int64Index,
)
from trading_calendars import get_calendar
from zipline.lib.adjustment import (
ADD,
Float64Add,
Float64Multiply,
Float64Overwrite,
MULTIPLY,
OVERWRITE,
)
from zipline.pipeline.data import USEquityPricing
from zipline.pipeline.domain import US_EQUITIES
from zipline.pipeline.loaders.frame import DataFrameLoader
class DataFrameLoaderTestCase(TestCase):
def setUp(self):
self.trading_day = get_calendar("NYSE").day
self.nsids = 5
self.ndates = 20
self.sids = Int64Index(range(self.nsids))
self.dates = DatetimeIndex(
start='2014-01-02',
freq=self.trading_day,
periods=self.ndates,
)
self.mask = ones((len(self.dates), len(self.sids)), dtype=bool)
def tearDown(self):
pass
def test_bad_input(self):
data = arange(100).reshape(self.ndates, self.nsids)
baseline = DataFrame(data, index=self.dates, columns=self.sids)
loader = DataFrameLoader(
USEquityPricing.close,
baseline,
)
with self.assertRaises(ValueError):
# Wrong column.
loader.load_adjusted_array(
US_EQUITIES,
[USEquityPricing.open],
self.dates,
self.sids,
self.mask,
)
with self.assertRaises(ValueError):
# Too many columns.
loader.load_adjusted_array(
US_EQUITIES,
[USEquityPricing.open, USEquityPricing.close],
self.dates,
self.sids,
self.mask,
)
def test_baseline(self):
data = arange(100).reshape(self.ndates, self.nsids)
baseline = DataFrame(data, index=self.dates, columns=self.sids)
loader = DataFrameLoader(USEquityPricing.close, baseline)
dates_slice = slice(None, 10, None)
sids_slice = slice(1, 3, None)
[adj_array] = loader.load_adjusted_array(
US_EQUITIES,
[USEquityPricing.close],
self.dates[dates_slice],
self.sids[sids_slice],
self.mask[dates_slice, sids_slice],
).values()
for idx, window in enumerate(adj_array.traverse(window_length=3)):
expected = baseline.values[dates_slice, sids_slice][idx:idx + 3]
assert_array_equal(window, expected)
def test_adjustments(self):
data = arange(100).reshape(self.ndates, self.nsids)
baseline = DataFrame(data, index=self.dates, columns=self.sids)
# Use the dates from index 10 on and sids 1-3.
dates_slice = slice(10, None, None)
sids_slice = slice(1, 4, None)
# Adjustments that should actually affect the output.
relevant_adjustments = [
{
'sid': 1,
'start_date': None,
'end_date': self.dates[15],
'apply_date': self.dates[16],
'value': 0.5,
'kind': MULTIPLY,
},
{
'sid': 2,
'start_date': self.dates[5],
'end_date': self.dates[15],
'apply_date': self.dates[16],
'value': 1.0,
'kind': ADD,
},
{
'sid': 2,
'start_date': self.dates[15],
'end_date': self.dates[16],
'apply_date': self.dates[17],
'value': 1.0,
'kind': ADD,
},
{
'sid': 3,
'start_date': self.dates[16],
'end_date': self.dates[17],
'apply_date': self.dates[18],
'value': 99.0,
'kind': OVERWRITE,
},
]
# These adjustments shouldn't affect the output.
irrelevant_adjustments = [
{ # Sid Not Requested
'sid': 0,
'start_date': self.dates[16],
'end_date': self.dates[17],
'apply_date': self.dates[18],
'value': -9999.0,
'kind': OVERWRITE,
},
{ # Sid Unknown
'sid': 9999,
'start_date': self.dates[16],
'end_date': self.dates[17],
'apply_date': self.dates[18],
'value': -9999.0,
'kind': OVERWRITE,
},
{ # Date Not Requested
'sid': 2,
'start_date': self.dates[1],
'end_date': self.dates[2],
'apply_date': self.dates[3],
'value': -9999.0,
'kind': OVERWRITE,
},
{ # Date Before Known Data
'sid': 2,
'start_date': self.dates[0] - (2 * self.trading_day),
'end_date': self.dates[0] - self.trading_day,
'apply_date': self.dates[0] - self.trading_day,
'value': -9999.0,
'kind': OVERWRITE,
},
{ # Date After Known Data
'sid': 2,
'start_date': self.dates[-1] + self.trading_day,
'end_date': self.dates[-1] + (2 * self.trading_day),
'apply_date': self.dates[-1] + (3 * self.trading_day),
'value': -9999.0,
'kind': OVERWRITE,
},
]
adjustments = DataFrame(relevant_adjustments + irrelevant_adjustments)
loader = DataFrameLoader(
USEquityPricing.close,
baseline,
adjustments=adjustments,
)
expected_baseline = baseline.iloc[dates_slice, sids_slice]
formatted_adjustments = loader.format_adjustments(
self.dates[dates_slice],
self.sids[sids_slice],
)
expected_formatted_adjustments = {
6: [
Float64Multiply(
first_row=0,
last_row=5,
first_col=0,
last_col=0,
value=0.5,
),
Float64Add(
first_row=0,
last_row=5,
first_col=1,
last_col=1,
value=1.0,
),
],
7: [
Float64Add(
first_row=5,
last_row=6,
first_col=1,
last_col=1,
value=1.0,
),
],
8: [
Float64Overwrite(
first_row=6,
last_row=7,
first_col=2,
last_col=2,
value=99.0,
)
],
}
self.assertEqual(formatted_adjustments, expected_formatted_adjustments)
mask = self.mask[dates_slice, sids_slice]
with patch('zipline.pipeline.loaders.frame.AdjustedArray') as m:
loader.load_adjusted_array(
US_EQUITIES,
columns=[USEquityPricing.close],
dates=self.dates[dates_slice],
sids=self.sids[sids_slice],
mask=mask,
)
self.assertEqual(m.call_count, 1)
args, kwargs = m.call_args
assert_array_equal(kwargs['data'], expected_baseline.values)
self.assertEqual(kwargs['adjustments'], expected_formatted_adjustments)
| {
"pile_set_name": "Github"
} |
[ This is the ChangeLog from the former keyserver/ directory which
kept the old gpgkeys_* keyserver access helpers. We keep it here
to document the history of certain keyserver relates features. ]
2011-12-01 Werner Koch <[email protected]>
NB: ChangeLog files are no longer manually maintained. Starting
on December 1st, 2011 we put change information only in the GIT
commit log, and generate a top-level ChangeLog file from logs at
"make dist". See doc/HACKING for details.
2011-01-20 Werner Koch <[email protected]>
* gpgkeys_hkp.c (get_name): Remove test for KS_GETNAME. It is
always true.
(search_key): Remove test for KS_GETNAME. It is always false.
2009-08-26 Werner Koch <[email protected]>
* gpgkeys_hkp.c: Include util.h.
(send_key): Use strconcat to build KEY.
(appendable_path): New.
(get_name): Use strconcat to build REQUEST.
(search_key): Ditto.
* ksutil.c: Include util.h.
(parse_ks_options): Use make_filename_try for the ca-cert-file arg.
2009-07-06 David Shaw <[email protected]>
* gpgkeys_hkp.c (main, srv_replace): Minor tweaks to use the
DNS-SD names ("pgpkey-http" and "pgpkey-https") in SRV lookups
instead of "hkp" and "hkps".
2009-06-24 Werner Koch <[email protected]>
* gpgkeys_ldap.c (send_key): Do not deep free a NULL modlist.
Reported by Fabian Keil.
2009-05-28 David Shaw <[email protected]>
From 1.4:
* curl-shim.c (curl_slist_append, curl_slist_free_all): New.
Simple wrappers around strlist_t to emulate the curl way of doing
string lists.
(curl_easy_setopt): Handle the curl HTTPHEADER option.
* gpgkeys_curl.c, gpgkeys_hkp.c (main): Avoid caches to get the
most recent copy of the key. This is bug #1061.
2009-05-27 David Shaw <[email protected]>
From 1.4:
* gpgkeys_hkp.c (srv_replace): New function to transform a SRV
hostname to a real hostname.
(main): Call it from here for the HAVE_LIBCURL case (without
libcurl is handled via the curl-shim).
* curl-shim.h, curl-shim.c (curl_easy_setopt, curl_easy_perform):
Add a CURLOPT_SRVTAG_GPG_HACK (passed through the the http
engine).
2009-05-10 David Shaw <[email protected]>
From 1.4:
* gpgkeys_hkp.c (send_key, get_key, get_name, search_key, main):
Add support for SSLized HKP.
* curl-shim.h (curl_version): No need to provide a version for
curl-shim as it always matches the GnuPG version.
* gpgkeys_curl.c, gpgkeys_hkp.c (main): Show which version of curl
we're using as part of --version.
* gpgkeys_curl.c, gpgkeys_finger.c, gpgkeys_hkp.c,
gpgkeys_ldap.c (show_help): Document --version.
2009-05-04 David Shaw <[email protected]>
* gpgkeys_mailto.in: Set 'mail-from' as a keyserver-option, rather
than the ugly ?from= syntax.
2009-01-22 Werner Koch <[email protected]>
* Makefile.am (gpg2keys_curl_LDADD, gpg2keys_hkp_LDADD): Add all
standard libs.
2008-10-20 Werner Koch <[email protected]>
* curl-shim.c (curl_global_init): Mark usused arg.
(curl_version_info): Ditto.
2008-08-29 Werner Koch <[email protected]>
* gpgkeys_kdns.c: Changed copyright notice to the FSF.
2008-04-21 Werner Koch <[email protected]>
* ksutil.c (w32_init_sockets) [HAVE_W32_SYSTEM]: New.
* curl-shim.c (curl_easy_init) [HAVE_W32_SYSTEM]: Call it.
* gpgkeys_finger.c: s/_WIN32/HAVE_W32_SYSTEM/.
(init_sockets): Remove.
(connect_server) [HAVE_W32_SYSTEM]: Call new function.
2008-04-14 David Shaw <[email protected]>
* gpgkeys_curl.c (main), gpgkeys_hkp.c (main): Make sure all
libcurl number options are passed as long.
* curl-shim.c (curl_easy_setopt): Minor tweak to match the real
curl better - libcurl uses 'long', not 'unsigned int'.
2008-04-07 Werner Koch <[email protected]>
* gpgkeys_kdns.c: New.
* Makefile.am: Support kdns.
* no-libgcrypt.c (gcry_strdup): Fix. It was not used.
2008-03-25 Werner Koch <[email protected]>
* gpgkeys_ldap.c (build_attrs): Take care of char defaulting to
unsigned when using hextobyte.
2007-10-25 David Shaw <[email protected]> (wk)
From 1.4 (July):
* gpgkeys_ldap.c (main): Fix bug in setting up whether to verify
peer SSL cert. This used to work with older OpenLDAP, but is now
more strictly handled.
* gpgkeys_ldap.c (search_key, main): Fix bug where searching for
foo bar (no quotes) on the command line resulted in searching for
"foo\2Abar" due to LDAP quoting. The proper search is "foo*bar".
2007-06-11 Werner Koch <[email protected]>
* gpgkeys_hkp.c (send_key): Rename eof to r_eof as some Windows
header defines such a symbol.
(main): Likewise.
2007-06-06 Werner Koch <[email protected]>
* gpgkeys_ldap.c (send_key, send_key_keyserver): Rename eof to
r_eof as some Windows file has such a symbol.
(main): Likewise.
2007-05-07 Werner Koch <[email protected]>
* Makefile.am (gpg2keys_ldap_LDADD): Add GPG_ERROR_LIBS.
2007-05-04 Werner Koch <[email protected]>
* gpgkeys_test.in: Rename to ..
* gpg2keys_test.in: .. this.
* gpgkeys_mailto.in: Rename to ..
* gpg2keys_mailto.in: .. this
* Makefile.am: Likewise
2007-03-13 David Shaw <[email protected]>
From STABLE-BRANCH-1-4
* gpgkeys_curl.c (main): Use curl_version_info to verify that the
protocol we're about to use is actually available.
* curl-shim.h, curl-shim.c (curl_free): Make into a macro.
(curl_version_info): New. Only advertises "http" for our shim, of
course.
2007-03-09 David Shaw <[email protected]>
From STABLE-BRANCH-1-4
* gpgkeys_ldap.c (send_key): Missing a free().
* curl-shim.c (curl_easy_perform): Some debugging items that may
be handy.
2006-12-03 David Shaw <[email protected]>
* gpgkeys_hkp.c (search_key): HKP keyservers like the 0x to be
present when searching by keyID.
2006-11-22 Werner Koch <[email protected]>
* Makefile.am (gpg2keys_ldap_LDADD): Add jnlib. This is needed
for some replacement functions.
2006-11-21 Werner Koch <[email protected]>
* curl-shim.c (curl_easy_perform): Made BUFLEN and MAXLNE a size_t.
2006-11-05 David Shaw <[email protected]>
* gpgkeys_hkp.c (curl_mrindex_writer): Revert previous change.
Key-not-found still has a HTML response.
2006-10-24 Marcus Brinkmann <[email protected]>
* Makefile.am (gpg2keys_ldap_CPPFLAGS): Rename second instance to ...
(gpg2keys_finger_CPPFLAGS): ... this.
2006-10-20 Werner Koch <[email protected]>
* Makefile.am: Reporder macros for better readability.
(gpg2keys_finger_LDADD): Add GPG_ERROR_LIBS.
2006-10-19 David Shaw <[email protected]>
* gpgkeys_hkp.c (curl_mrindex_writer): Print a warning if we see
HTML coming back from a MR hkp query.
2006-10-17 Werner Koch <[email protected]>
* Makefile.am: Removed W32LIBS as they are included in NETLIBS.
Removed PTH_LIBS.
2006-09-26 Werner Koch <[email protected]>
* curl-shim.c: Adjusted for changes in http.c.
(curl_easy_perform): Changed LINE from unsigned char* to char*.
* Makefile.am (gpg2keys_curl_LDADD, gpg2keys_hkp_LDADD)
[FAKE_CURL]: Need to link against common_libs and pth.
* curl-shim.h, curl-shim.c: Removed license exception as not
needed here.
2006-09-22 Werner Koch <[email protected]>
* gpgkeys_curl.c, gpgkeys_hkp.c, gpgkeys_ldap.c, curl-shim.c:
* curl-shim.h, ksutil.c, ksutil.h: Add special license exception
for OpenSSL. This helps to avoid license conflicts if OpenLDAP or
cURL is linked against OpenSSL and we would thus indirectly link
to OpenSSL. This is considered a bug fix and forgives all
possible violations, pertaining to this issue, possibly occured in
the past.
* no-libgcrypt.c: Changed license to a simple all permissive one.
* Makefile.am (gpg2keys_ldap_LDADD): For license reasons do not
link against common_libs.
(gpg2keys_curl_LDADD, gpg2keys_hkp_LDADD): Ditto.
* ksutil.c (ks_hextobyte, ks_toupper, ks_strcasecmp): New.
Identical to the ascii_foo versions from jnlib.
* gpgkeys_ldap.c: Include assert.h.
(main): Replace BUG by assert.
(build_attrs): Use ks_hextobyte and ks_strcasecmp.
* gpgkeys_finger.c (get_key): Resolved signed/unisgned char
mismatch.
2006-09-19 Werner Koch <[email protected]>
* no-libgcrypt.c: New. Taken from ../tools.
* Makefile.am: Add no-libgcrypt to all sources.
2006-09-06 Marcus Brinkmann <[email protected]>
* Makefile.am (AM_CFLAGS): Add $(GPG_ERROR_CFLAGS).
2006-08-16 Werner Koch <[email protected]>
* Makefile.am: Renamed all binaries to gpg2keys_*.
(gpg2keys_ldap_CPPFLAGS): Add AM_CPPFLAGS.
2006-08-15 Werner Koch <[email protected]>
* Makefile.am: Adjusted to the gnupg2 framework.
2006-08-14 Werner Koch <[email protected]>
* curl-shil.c, curl-shim.h: Changed to make use of the new http.c
API.
* curl-shim.c (curl_easy_perform): Add missing http_close to the
POST case.
2006-07-24 David Shaw <[email protected]> (wk)
* curl-shim.c (curl_easy_perform): Minor cleanup of proxy code.
* gpgkeys_hkp.c (send_key)
* gpgkeys_ldap.c (send_key, send_key_keyserver): Fix string
matching problem when the ascii armored form of the key happens to
match "KEY" at the beginning of the line.
2006-04-26 David Shaw <[email protected]>
* gpgkeys_http.c, gpgkeys_oldhkp.c: Removed.
* Makefile.am: Don't build gpgkeys_http or gpgkeys_(old)hkp any
longer as this is done via curl or fake-curl.
* ksutil.h, ksutil.c, gpgkeys_hkp.c, gpgkeys_curl.c: Minor
#include tweaks as FAKE_CURL is no longer meaningful.
2006-04-10 David Shaw <[email protected]>
* gpgkeys_ldap.c (ldap_quote, get_name, search_key): LDAP-quote
directly into place rather than mallocing temporary buffers.
* gpgkeys_ldap.c (get_name): Build strings with strcat rather than
using sprintf which is harder to read and modify.
* ksutil.h, ksutil.c (classify_ks_search): Add
KS_SEARCH_KEYID_SHORT and KS_SEARCH_KEYID_LONG to search for a key
ID.
* gpgkeys_ldap.c (search_key): Use it here to flip from pgpUserID
searches to pgpKeyID or pgpCertID.
2006-03-27 David Shaw <[email protected]>
* gpgkeys_ldap.c: #define LDAP_DEPRECATED for newer OpenLDAPs so
they use the regular old API that is compatible with other LDAP
libraries.
2006-03-03 David Shaw <[email protected]>
* gpgkeys_ldap.c (main): Fix build problem with non-OpenLDAP LDAP
libraries that have TLS.
2006-02-23 David Shaw <[email protected]>
* ksutil.c (init_ks_options): Default include-revoked and
include-subkeys to on, as gpg isn't doing this any longer.
2006-02-22 David Shaw <[email protected]>
* gpgkeys_hkp.c (get_name): A GETNAME query turns exact=on to cut
down on odd matches.
2006-02-21 David Shaw <[email protected]>
* gpgkeys_ldap.c (make_one_attr, build_attrs, send_key): Don't
allow duplicate attributes as OpenLDAP is now enforcing this.
* gpgkeys_ldap.c (main): Add binddn and bindpw so users can pass
credentials to a remote LDAP server.
* curl-shim.h, curl-shim.c (curl_easy_init, curl_easy_setopt,
curl_easy_perform): Mingw has 'stderr' as a macro?
* curl-shim.h, curl-shim.c (curl_easy_init, curl_easy_setopt,
curl_easy_perform): Add CURLOPT_VERBOSE and CURLOPT_STDERR for
easier debugging.
2006-01-16 David Shaw <[email protected]>
* gpgkeys_hkp.c (send_key): Do not escape the '=' in the HTTP POST
when uploading a key.
2005-12-23 David Shaw <[email protected]>
* ksutil.h, ksutil.c (parse_ks_options): New keyserver command
"getname".
* gpgkeys_hkp.c (main, get_name), gpgkeys_ldap.c (main, get_name):
Use it here to do direct name (rather than key ID) fetches.
2005-12-19 David Shaw <[email protected]>
* ksutil.h, ksutil.c (curl_armor_writer, curl_writer,
curl_writer_finalize): New functionality to handle binary format
keys by armoring them for input to GPG.
* gpgkeys_curl.c (get_key), gpgkeys_hkp.c (get_key): Call it here.
2005-12-07 David Shaw <[email protected]>
* gpgkeys_finger.c (get_key), gpgkeys_curl.c (get_key): Better
language for the key-not-found error.
* ksutil.c (curl_err_to_gpg_err): Add CURLE_OK and
CURLE_COULDNT_CONNECT.
* gpgkeys_curl.c (get_key): Give key-not-found error if no data is
found (or file itself is not found) during a fetch.
2005-12-06 David Shaw <[email protected]>
* curl-shim.c (curl_easy_perform): Fix build warning (code before
declaration).
2005-11-02 David Shaw <[email protected]>
* gpgkeys_hkp.c (search_key): Fix warning with typecast (though
curl should really have defined that char * as const).
2005-08-25 David Shaw <[email protected]>
* ksutil.h, ksutil.c (parse_ks_options): Remove exact-name and
exact-email.
(classify_ks_search): Mimic the gpg search modes instead with *,
=, <, and @.
* gpgkeys_ldap.c (search_key), gpgkeys_hkp.c (search_key): Call
them here. Suggested by Jason Harris.
2005-08-18 David Shaw <[email protected]>
* ksutil.h, ksutil.c (parse_ks_options): New keyserver-option
exact-name. The last of exact-name and exact-email overrides the
earlier.
* gpgkeys_ldap.c (search_key), gpgkeys_hkp.c (search_key): Use it
here to do a name-only search.
* gpgkeys_ldap.c (ldap_quote): \-quote a string for LDAP.
* gpgkeys_ldap.c (search_key): Use it here to escape reserved
characters in searches.
2005-08-17 David Shaw <[email protected]>
* ksutil.h, ksutil.c (parse_ks_options): New keyserver-option
exact-email.
* gpgkeys_ldap.c (search_key), gpgkeys_hkp.c (search_key): Use it
here to do an email-only search.
2005-08-08 David Shaw <[email protected]>
* Makefile.am: Include LDAP_CPPFLAGS when building LDAP.
2005-08-03 David Shaw <[email protected]>
* gpgkeys_hkp.c (main), gpgkeys_curl.c (main), curl-shim.h: Show
version of curl (or curl-shim) when debug is set.
2005-07-20 David Shaw <[email protected]>
* gpgkeys_curl.c (get_key, main): Don't try and be smart about
what protocols we handle. Directly pass them to curl or fake-curl
and see if an error comes back.
* curl-shim.h, curl-shim.c (handle_error), ksutil.c
(curl_err_to_gpg_err): Add support for CURLE_UNSUPPORTED_PROTOCOL
in fake curl.
* Makefile.am: Don't need -DFAKE_CURL any longer since it's in
config.h.
2005-06-23 David Shaw <[email protected]>
* gpgkeys_mailto.in, gpgkeys_test.in: Use @VERSION@ so version
string stays up to date.
* gpgkeys_http.c: Don't need to define HTTP_PROXY_ENV here since
it's in ksutil.h.
* gpgkeys_curl.c (get_key, main), gpgkeys_hkp.c (main): Pass AUTH
values to curl or curl-shim.
* curl-shim.c (curl_easy_perform), gpgkeys_curl.c (main),
gpgkeys_hkp.c (main): Use curl-style proxy semantics.
* curl-shim.h, curl-shim.c (curl_easy_setopt, curl_easy_perform):
Add CURLOPT_USERPWD option for HTTP auth.
* gpgkeys_http.c (get_key), gpgkeys_oldhkp (send_key, get_key,
search_key): No longer need to pass a proxyauth.
* gpgkeys_http.c (get_key): Pass auth outside of the URL.
2005-06-21 David Shaw <[email protected]>
* gpgkeys_http.c (get_key), gpgkeys_oldhkp.c (send_key, get_key,
search_key): Fix http_open/http_open_document calls to pass NULL
for auth and proxyauth since these programs pass them in the URL.
2005-06-20 David Shaw <[email protected]>
* gpgkeys_hkp.c (append_path, send_key, get_key, search_key,
main), gpgkeys_oldhkp.c (main): Properly handle double slashes in
paths.
2005-06-05 David Shaw <[email protected]>
* ksutil.c (init_ks_options, parse_ks_options): Provide a default
"/" path unless overridden by the config. Allow config to specify
items multiple times and take the last specified item.
2005-06-04 David Shaw <[email protected]>
* gpgkeys_hkp.c, gpgkeys_oldhkp.c: Add support for HKP servers
that aren't at the root path. Suggested by Jack Bates.
2005-06-01 David Shaw <[email protected]>
* ksutil.c [HAVE_DOSISH_SYSTEM]: Fix warnings on mingw32. Noted
by Joe Vender.
2005-05-04 David Shaw <[email protected]>
* ksutil.h, ksutil.c: #ifdef so we can build without libcurl or
fake-curl.
2005-05-03 David Shaw <[email protected]>
* gpgkeys_http.c: Need GET defined.
2005-05-01 David Shaw <[email protected]>
* gpgkeys_hkp.c, gpgkeys_oldhkp.c, ksutil.h: Some minor cleanup
and comments as to the size of MAX_LINE and MAX_URL.
2005-04-16 David Shaw <[email protected]>
* gpgkeys_hkp.c: New hkp handler that uses curl or curl-shim.
* Makefile.am: Build new gpgkeys_hkp.
* curl-shim.c (curl_easy_perform): Cleanup.
* ksutil.h, ksutil.c (curl_writer), gpgkeys_curl.c (get_key): Pass
a context to curl_writer so we can support multiple fetches in a
single session.
* curl-shim.h, curl-shim.c (handle_error, curl_easy_setopt,
curl_easy_perform): Add POST functionality to the curl shim.
* curl-shim.h, curl-shim.c (curl_escape, curl_free): Emulate
curl_escape and curl_free.
* gpgkeys_curl.c (main): If the http-proxy option is given without
any arguments, try to get the proxy from the environment.
* ksutil.h, ksutil.c (curl_err_to_gpg_err, curl_writer): Copy from
gpgkeys_curl.c.
* gpgkeys_oldhkp.c: Copy from gpgkeys_hkp.c.
2005-03-22 David Shaw <[email protected]>
* gpgkeys_ldap.c, ksutil.h, ksutil.c (print_nocr): Moved from
gpgkeys_ldap.c. Print a string, but strip out any CRs.
* gpgkeys_finger.c (get_key), gpgkeys_hkp.c (get_key),
gpgkeys_http.c (get_key): Use it here when outputting key material
to canonicalize line endings.
2005-03-19 David Shaw <[email protected]>
* gpgkeys_ldap.c (main): Fix three wrong calls to fail_all().
Noted by Stefan Bellon.
2005-03-17 David Shaw <[email protected]>
* ksutil.c (parse_ks_options): Handle verbose=nnn.
* Makefile.am: Calculate GNUPG_LIBEXECDIR directly. Do not
redefine $libexecdir.
* gpgkeys_curl.c, gpgkeys_finger.c, gpgkeys_ldap.c: Start using
parse_ks_options and remove a lot of common code.
* ksutil.h, ksutil.c (parse_ks_options): Parse OPAQUE, and default
debug with no arguments to 1.
2005-03-16 David Shaw <[email protected]>
* gpgkeys_ldap.c: Include lber.h if configure determines we need
it.
* ksutil.h, ksutil.c (ks_action_to_string): New.
(free_ks_options): Only free if options exist.
* ksutil.h, ksutil.c (init_ks_options, free_ks_options,
parse_ks_options): Pull a lot of duplicated code into a single
options parser for all keyserver helpers.
2005-02-11 David Shaw <[email protected]>
* curl-shim.c (curl_easy_perform): Fix compile warning.
* curl-shim.h, gpgkeys_curl.c (main), gpgkeys_ldap.c (main): Add
ca-cert-file option, to pass in the SSL cert.
* curl-shim.h, curl-shim.c: New. This is code to fake the curl
API in terms of the current HTTP iobuf API.
* gpgkeys_curl.c [FAKE_CURL], Makefile.am: If FAKE_CURL is set,
link with the iobuf code rather than libcurl.
2005-02-05 David Shaw <[email protected]>
* gpgkeys_finger.c (main), gpgkeys_hkp.c (main): Fix --version
output.
* gpgkeys_curl.c (main): Make sure the curl handle is cleaned up
on failure.
2005-02-01 David Shaw <[email protected]>
* gpgkeys_hkp.c (get_key), gpgkeys_http.c (get_key): Fix missing
http_close() calls. Noted by Phil Pennock.
* ksutil.h: Up the default timeout to two minutes.
2005-01-24 David Shaw <[email protected]>
* gpgkeys_ldap.c (print_nocr): New.
(get_key): Call it here to canonicalize line endings.
* gpgkeys_curl.c (writer): Discard everything outside the BEGIN
and END lines when retrieving keys. Canonicalize line endings.
(main): Accept FTPS.
2005-01-21 David Shaw <[email protected]>
* gpgkeys_ldap.c (main): Add "check-cert" option to disable SSL
certificate checking (which is on by default).
* gpgkeys_curl.c (main): Add "debug" option to match the LDAP
helper. Add "check-cert" option to disable SSL certificate
checking (which is on by default).
2005-01-18 David Shaw <[email protected]>
* gpgkeys_curl.c: Fix typo.
2005-01-18 Werner Koch <[email protected]>
* gpgkeys_curl.c: s/MAX_PATH/URLMAX_PATH/g to avoid a clash with
the W32 defined macro. Removed unneeded initialization of static
variables.
* gpgkeys_http.c: Ditto.
* ksutil.h: s/MAX_PATH/URLMAX_PATH/.
2005-01-17 David Shaw <[email protected]>
* gpgkeys_curl.c (main): Only allow specified protocols to use the
curl handler.
* Makefile.am: Use LIBCURL_CPPFLAGS instead of LIBCURL_INCLUDES.
2005-01-13 David Shaw <[email protected]>
* ksutil.h, gpgkeys_curl.c, gpgkeys_hkp.c, gpgkeys_ldap.c,
gpgkeys_finger.c, gpgkeys_http.c: Part 2 of the cleanup. Move all
the various defines to ksutil.h.
* gpgkeys_finger.c, gpgkeys_hkp.c, gpgkeys_http.c, gpgkeys_ldap.c:
Part 1 of a minor cleanup to use #defines instead of hard-coded
sizes.
* gpgkeys_finger.c (connect_server): Use INADDR_NONE instead of
SOCKET_ERROR. Noted by Timo.
2005-01-09 David Shaw <[email protected]>
* gpgkeys_curl.c (get_key): Newer versions of libcurl don't define
TRUE.
2004-12-24 David Shaw <[email protected]>
* gpgkeys_curl.c (main): Use new defines for opting out of certain
transfer protocols. Allow setting HTTP proxy via "http-proxy=foo"
option (there is natural support in libcurl for the http_proxy
environment variable).
* Makefile.am: Remove the conditional since this is all handled in
autoconf now.
2004-12-22 David Shaw <[email protected]>
* gpgkeys_curl.c (main): New "follow-redirects" option. Takes an
optional numeric value for the maximum number of redirects to
allow. Defaults to 5.
* gpgkeys_curl.c (main), gpgkeys_finger.c (main), gpgkeys_hkp.c
(main), gpgkeys_http.c (main), gpgkeys_ldap.c (main): Make sure
that a "timeout" option passed with no arguments is properly
handled.
* gpgkeys_curl.c (get_key, writer): New function to wrap around
fwrite to avoid DLL access problem on win32.
* gpgkeys_http.c (main, get_key): Properly pass authentication
info through to the http library.
* Makefile.am: Build gpgkeys_http or gpgkeys_curl as needed.
* gpgkeys_curl.c (main, get_key): Minor tweaks to work with either
FTP or HTTP.
* gpgkeys_ftp.c: renamed to gpgkeys_curl.c.
* gpgkeys_ftp.c (main, get_key): Use auth data as passed by gpg.
Use CURLOPT_FILE instead of CURLOPT_WRITEDATA (same option, but
backwards compatible).
2004-12-21 David Shaw <[email protected]>
* gpgkeys_ftp.c: New.
* Makefile.am: Build it if requested.
2004-12-14 Werner Koch <[email protected]>
* Makefile.am (install-exec-hook, uninstall-hook): Removed. For
Windows reasons we can't use the symlink trick.
2004-12-03 David Shaw <[email protected]>
* Makefile.am: The harmless "ignored error" on gpgkeys_ldap
install on top of an existing install is bound to confuse people.
Use ln -s -f to force the overwrite.
2004-10-28 David Shaw <[email protected]>
* gpgkeys_finger.c [_WIN32] (connect_server): Fix typo.
2004-10-28 Werner Koch <[email protected]>
* Makefile.am (other_libs): New. Also include LIBICONV. Noted by
Tim Mooney.
2004-10-28 Werner Koch <[email protected]>
* Makefile.am (other_libs):
2004-10-18 David Shaw <[email protected]>
* gpgkeys_hkp.c (send_key, get_key, search_key): Use "hkp" instead
of "x-hkp" so it can be used as a SRV tag.
2004-10-16 David Shaw <[email protected]>
* gpgkeys_finger.c [_WIN32] (connect_server): Fix typo.
2004-10-15 Werner Koch <[email protected]>
* gpgkeys_ldap.c (main, show_help): Kludge to implement standard
GNU options. Factored help printing out.
* gpgkeys_finger.c (main, show_help): Ditto.
* gpgkeys_hkp.c (main, show_help): Ditto.
* gpgkeys_http.c (main, show_help): Ditto.
* gpgkeys_test.in, gpgkeys_mailto.in: Implement --version and --help.
* Makefile.am: Add ksutil.h.
2004-10-14 David Shaw <[email protected]>
* gpgkeys_finger.c (main): We do not support relay fingering
(i.e. "finger://relayhost/[email protected]"), but finger URLs are
occasionally miswritten that way. Give an error in this case.
2004-10-14 Werner Koch <[email protected]>
* gpgkeys_finger.c (get_key): s/unsigned char/byte/ due
to a strange typedef for RISC OS. Noted by Stefan.
2004-10-13 David Shaw <[email protected]>
* gpgkeys_ldap.c (main), gpgkeys_hkp.c (main), gpgkeys_http.c
(main), gpgkeys_finger.c (main): Call timeout functions before
performing an action that could block for a long time.
* ksutil.h, ksutil.c: New. Right now just contains timeout
functions.
2004-10-11 David Shaw <[email protected]>
* gpgkeys_finger.c, gpgkeys_hkp.c, gpgkeys_http.c, gpgkeys_ldap.c:
Fix a few occurances of "filename" to `filename'.
2004-10-11 Werner Koch <[email protected]>
* gpgkeys_finger.c: New.
2004-08-27 Stefan Bellon <[email protected]>
* gpgkeys_hkp.c (search_key): Fix the prior faulty fix by
introducing a cast but leaving skey unsigned.
* gpgkeys_hkp.c (search_key): Change type of variable skey from
unsigned char* to char* to fix type incompatibility.
2004-08-23 David Shaw <[email protected]>
* gpgkeys_ldap.c (get_key, search_key), gpgkeys_hkp.c (get_key,
search_key), gpgkeys_http.c (get_key): Do not give informational
logs since this is now done inside gpg.
* gpgkeys_hkp.c (dehtmlize): Understand the quote character
(i.e. """) in HTML responses.
(search_key): Search key must be unsigned for url encoder to work
properly for 8-bit values.
* gpgkeys_ldap.c (get_key): Factor out informational display into
new function build_info().
* gpgkeys_ldap.c (build_attrs): Properly terminate user ID strings
that got shrunk due to encoding.
2004-08-22 David Shaw <[email protected]>
* gpgkeys_ldap.c (find_basekeyspacedn): Use LDAP_SCOPE_BASE along
with a full DN rather than LDAP_SCOPE_ONELEVEL plus a filter to
find the pgpServerInfo object. Some LDAP setups don't like the
search.
(main): Stop binding to the server since it seems no server really
requires it, and some require it not be there.
2004-07-29 David Shaw <[email protected]>
* gpgkeys_ldap.c (main): Add "debug" option. This is only really
useful with OpenLDAP, but it's practically vital to debug SSL and
TLS setups. Add "basedn" option. This allows users to override
the autodetection for base DN. SSL overrides TLS, so TLS will not
be started on SSL connections (starting an already started car).
2004-07-28 David Shaw <[email protected]>
* gpgkeys_ldap.c (build_attrs): Add "pgpKeySize" and "pgpSubKeyID"
attributes so we can do subkey searches.
* gpgkeys_ldap.c (main): Under certain error conditions, we might
try and unbind twice. Don't.
* gpgkeys_ldap.c (join_two_modlists): New.
(send_key): Use new function so we can try a modify operation
first, and fail over to an add if that fails. Add cannot cope
with the NULLs at the head of the modify request, so we jump into
the list in the middle.
2004-07-27 David Shaw <[email protected]>
* gpgkeys_ldap.c (main): Don't try and error out before making a
ldaps connection to the NAI keyserver since we cannot tell if it
is a NAI keyserver until we connect. Fail if we cannot find a
base keyspace DN. Fix a false success message for TLS being
enabled.
2004-07-20 Werner Koch <[email protected]>
* gpgkeys_ldap.c [_WIN32]: Include Windows specific header files.
Suggested by Brian Gladman.
2004-05-26 David Shaw <[email protected]>
* gpgkeys_http.c: General polish and removal of leftover stuff
from gpgkeys_hkp.c.
2004-05-21 David Shaw <[email protected]>
* gpgkeys_http.c (get_key): Cosmetic fix - make sure that URLs
with no path use a path of "/".
* gpgkeys_ldap.c (ldap2epochtime): We can always rely on timegm()
being available now, since it's a replacement function.
2004-05-20 David Shaw <[email protected]>
* gpgkeys_http.c: New program to do a simple HTTP file fetch using
the keyserver interface.
* Makefile.am: Build it.
2004-02-28 David Shaw <[email protected]>
* Makefile.am: Don't split LDADD across two lines since some make
programs can't handle blank lines after a \ continuation. Noted
by Christoph Moench-Tegeder.
2004-02-25 David Shaw <[email protected]>
* gpgkeys_ldap.c (send_key): List pgpCertID as one of the deleted
attributes. This guarantees that if something goes wrong, we
won't be able to complete the transaction, thus leaving any key
already existing on the server intact.
2004-02-23 David Shaw <[email protected]>
* gpgkeys_ldap.c (delete_one_attr): Removed.
(make_one_attr): Delete functionality added. Optional deduping
functionality added (currently only used for pgpSignerID).
(build_attrs): Translate sig entries into pgpSignerID. Properly
build the timestamp for pgpKeyCreateTime and pgpKeyExpireTime.
2004-02-22 David Shaw <[email protected]>
* gpgkeys_ldap.c (delete_one_attr): New function to replace
attributes with NULL (a "delete" that works even for nonexistant
attributes).
(send_key): Use it here to remove attributes so a modify operation
starts with a clean playing field. Bias sends to modify before
add, since (I suspect) people update their existing keys more
often than they make and send new keys to the server.
2004-02-21 David Shaw <[email protected]>
* gpgkeys_ldap.c (epoch2ldaptime): New. Converse of
ldap2epochtime.
(make_one_attr): New. Build a modification list in memory to send
to the LDAP server.
(build_attrs): New. Parse INFO lines sent over by gpg.
(free_mod_values): New. Unwinds a modification list.
(send_key_keyserver): Renamed from old send_key().
(send_key): New function to send a key to a LDAP server.
(main): Use send_key() for real LDAP servers, send_key_keyserver()
otherwise.
2004-02-20 David Shaw <[email protected]>
* gpgkeys_ldap.c: Replacement prototypes for setenv and unsetenv.
(search_key): Catch a SIZELIMIT_EXCEEDED error and show the user
whatever the server did give us.
(find_basekeyspacedn): There is no guarantee that namingContexts
will be readable.
* Makefile.am: Link gpgkeys_ldap with libutil.a to get the
replacement functions (and eventually translations, etc).
2004-02-19 David Shaw <[email protected]>
* gpgkeys_ldap.c (ldap2epochtime): LDAP timestamps are UTC, so do
not correct for timezones.
(main): Find the basekeyspacedn before we try to start TLS, so we
can give a better error message when a user tries to use TLS with
a LDAP keyserver.
* Makefile.am: Add automake conditionals to symlink gpgkeys_ldaps
to gpgkeys_ldap when needed.
* gpgkeys_ldap.c (main): Add support for LDAPS and TLS
connections. These are only useful and usable when talking to
real LDAP keyservers. Add new "tls" option to tune TLS use from
off, to try quietly, to try loudly, or to require TLS.
* gpgkeys_ldap.c (find_basekeyspacedn): New function to figure out
what kind of LDAP server we're talking to (either real LDAP or the
LDAP keyserver), and return the baseKeySpaceDN to find keys under.
(main): Call it from here, and remove the old code that only
handled the LDAP keyserver.
2004-02-18 David Shaw <[email protected]>
* gpgkeys_ldap.c (ldap_to_gpg_err): Make sure that
LDAP_OPT_ERROR_NUMBER is defined before we use it.
* gpgkeys_mailto.in: Fix VERSION number.
2004-01-13 Werner Koch <[email protected]>
* gpgkeys_hkp.c (send_key): Add a content type.
2004-01-11 David Shaw <[email protected]>
* gpgkeys_hkp.c (search_key): Catch a mangled input file (useful
if something other than GnuPG is calling the program).
(main): Avoid possible pre-string write. Noted by Christian
Biere.
* gpgkeys_ldap.c (main): Avoid possible pre-string write.
2003-12-28 David Shaw <[email protected]>
* gpgkeys_hkp.c (send_key, get_key, main): Work with new HTTP code
that passes the proxy in from the outside. If the command file
sends a proxy, use it. If it sends "http-proxy" with no
arguments, use $http_proxy from the environment. Suggested by
Christian Biere.
2003-12-28 Stefan Bellon <[email protected]>
* gpgkeys_hkp.c, gpgkeys_ldap.c [__riscos__]: Removal of
unnecessary #ifdef __riscos__ sections.
2003-11-27 Werner Koch <[email protected]>
* gpgkeys_hkp.c (get_key): Fixed invalid use of fprintf without
format string.
2003-10-25 Werner Koch <[email protected]>
* Makefile.am (gpgkeys_hkp_LDADD): Replaced INTLLIBS by LIBINTL.
2003-07-10 David Shaw <[email protected]>
* Makefile.am: Use W32LIBS where appropriate.
2003-05-30 David Shaw <[email protected]>
* gpgkeys_hkp.c, gpgkeys_ldap.c: #include <getopt.h> if it is
available. Also include extern references for optarg and optind
since there is no guarantee that any header file will include
them. Standards? We don't need no stinkin' standards.
* Makefile.am: Use @GETOPT@ to pull in libiberty on those
platforms that need it.
2003-04-08 David Shaw <[email protected]>
* gpgkeys_hkp.c (dehtmlize, parse_hkp_index): Fix memory
corruption bug on some platforms.
2003-03-11 David Shaw <[email protected]>
* gpgkeys_hkp.c (get_key): Properly handle CRLF line endings in
the armored key.
(main): Accept "try-dns-srv" option.
* Makefile.am: Use @CAPLIBS@ to link in -lcap if we are using
capabilities. Use @SRVLIBS@ to link in the resolver if we are
using DNS SRV.
2003-02-11 David Shaw <[email protected]>
* Makefile.am: Use a local copy of libexecdir along with @PACKAGE@
so it can be easily overridden at make time.
2003-01-29 David Shaw <[email protected]>
* gpgkeys_mailto.in: Fix regexp to work properly if the "keyid" is
not a keyid, but rather a text string from the user ID.
2003-01-06 David Shaw <[email protected]>
* gpgkeys_hkp.c (get_key): Use options=mr when getting a key so
keyserver doesn't attach the HTML header which we will just have
to discard.
2002-11-17 David Shaw <[email protected]>
* gpgkeys_ldap.c (main), gpgkeys_hkp.c (main): Use new keyserver
protocol version.
2002-11-14 David Shaw <[email protected]>
* gpgkeys_ldap.c (get_key): The deduping code requires
"pgpcertid", but that was not available when running without
verbose on. Noted by Stefan.
2002-11-10 David Shaw <[email protected]>
* gpgkeys_ldap.c (get_key): Fix typo in deduping code.
2002-11-05 David Shaw <[email protected]>
* gpgkeys_ldap.c (key_in_keylist, add_key_to_keylist,
free_keylist, get_key, search_key): The LDAP keyserver doesn't
remove duplicates, so remove them locally. Do not include the key
modification time in the search response.
2002-11-04 David Shaw <[email protected]>
* gpgkeys_hkp.c (send_key), gpgkeys_ldap.c (send_key): Properly
handle an input file that does not include any key data at all.
2002-10-24 David Shaw <[email protected]>
* gpgkeys_hkp.c (main), gpgkeys_ldap.c (main): Add -V flag to
output protocol and program version.
2002-10-21 David Shaw <[email protected]>
* Makefile.am: Anything linking with libutil.a needs INTLLIBS as
well on platforms where INTLLIBS is set.
2002-10-14 David Shaw <[email protected]>
* gpgkeys_hkp.c (write_quoted): Use %-encoding instead of
\-encoding.
(parse_hkp_index): Use new keyserver key listing format, and add
support for disabled keys via include-disabled.
* gpgkeys_ldap.c (get_key): Don't print keysize unless it's >0.
(printquoted): Use %-encoding instead of \-encoding.
(search_key): Use new keyserver key listing format.
2002-10-08 David Shaw <[email protected]>
* gpgkeys_ldap.c (search_key, main): Make sure LDAP values are
freed in case of error.
* gpgkeys_ldap.c (fail_all): New function to unwind a keylist and
error each item.
(main): Call fail_all from here, as needed. Also add a NO_MEMORY
error in an appropriate place and fix error return code.
(ldap_err_to_gpg_err): Add KEYSERVER_UNREACHABLE.
* gpgkeys_hkp.c (fail_all): New function to unwind a keylist and
error each item.
(main): Call fail_all from here. Also add a NO_MEMORY error in an
appropriate place.
(get_key): Use new UNREACHABLE error for network errors.
2002-09-26 Werner Koch <[email protected]>
* gpgkeys_ldap.c (send_key): Removed non-constant initializers.
2002-09-24 David Shaw <[email protected]>
* gpgkeys_ldap.c (ldap_err_to_gpg_err, ldap_to_gpg_err, send_key,
get_key, search_key, main): Some minor error reporting
enhancements for use with GPA (show reasons for KEY FAILED).
* gpgkeys_hkp.c (send_key, get_key, search_key, main): Some minor
error reporting enhancements for use with GPA (show reasons for
KEY FAILED).
2002-09-20 Werner Koch <[email protected]>
* gpgkeys_hkp.c (handle_old_hkp_index): s/input/inp/ to avoid
shadowing warning.
2002-09-19 David Shaw <[email protected]>
* gpgkeys_hkp.c (get_key, handle_old_hkp_index, search_key):
Properly handle line truncation.
2002-09-16 David Shaw <[email protected]>
* gpgkeys_mailto.in: Add quasi-RFC-2368 mailto:email@addr?from=
syntax so people can set their own email address to respond to.
* gpgkeys_hkp.c (get_key): Properly respond with KEY FAILED (to
gpg) and "key not found" (to user) on failure.
2002-09-13 David Shaw <[email protected]>
* gpgkeys_hkp.c: (search_key, handle_old_hkp_index): Try and
request a machine-readable key index. If the server supports
this, pass it through. If the server does not support it, parse
the "index" page.
2002-09-12 Stefan Bellon <[email protected]>
* gpgkeys_hkp.c: Tidied up RISC OS initializations.
2002-09-12 David Shaw <[email protected]>
* gpgkeys_hkp.c (main): Remove warning - this is no longer
experimental code.
2002-09-09 Werner Koch <[email protected]>
* gpgkeys_hkp.c (send_key, get_key, search_key): Check return
value of malloc.
(dehtmlize): Use ascii_tolower to protect against weird locales.
Cast the argument for isspace for the sake of broken HP/UXes.
(search_key): Check return value of realloc.
2002-09-09 David Shaw <[email protected]>
* gpgkeys_ldap.c (get_key): Some compilers (RISC OS, HPUX c89)
don't like using variables as array initializers.
* gpgkeys_hkp.c (send_key): Use CRLF in headers.
2002-08-28 David Shaw <[email protected]>
* gpgkeys_hkp.c (parse_hkp_index): Use same types on all
platforms. This was probably leftover from earlier code where the
typing mattered.
* gpgkeys_hkp.c: Overall cleanup from iobuf conversion. Be
consistent in m_alloc and malloc usage. Remove include-disabled
(meaningless on HKP). RISC OS tweak.
2002-08-27 David Shaw <[email protected]>
* gpgkeys_hkp.c, Makefile.am: Convert over to using iobufs.
* gpgkeys_hkp.c (http_get, http_post): Use CRLF for line endings.
* gpgkeys_hkp.c: Include util.h on RISC OS as per Stefan. Include
a replacement for hstrerror() for those platforms (such as RISC
OS) that don't have it.
2002-08-26 David Shaw <[email protected]>
* Makefile.am: May as well include gpgkeys_hkp.c in the
distribution now. It works well enough without proxies, and isn't
built by default. It would be good to get some test experience
with it.
* gpgkeys_hkp.c (main): Don't warn about include-subkeys - it
isn't unsupported, it's actually non-meaningful in the context of
HKP (yet).
* gpgkeys_hkp.c (parse_hkp_index, dehtmlize): Move HTML
functionality into new "dehtmlize" function. Remove HTML before
trying to parse each line from the keyserver. If the keyserver
provides key type information in the listing, use it. (Copy over
from g10/hkp.c).
2002-08-19 David Shaw <[email protected]>
* gpgkeys_hkp.c (get_key, parse_hkp_index): Bring over latest code
from g10/hkp.c.
* gpgkeys_ldap.c (get_key): Fix cosmetic URL display problem
(extra ":" at the end).
2002-08-03 Stefan Bellon <[email protected]>
* gpgkeys_ldap.c: Tidied up RISC OS initializations.
2002-07-25 David Shaw <[email protected]>
* gpgkeys_hkp.c: "Warning" -> "WARNING"
2002-07-24 David Shaw <[email protected]>
* Makefile.am: Install keyserver helpers in @GNUPG_LIBEXECDIR@
2002-07-15 David Shaw <[email protected]>
* gpgkeys_ldap.c (send_key, get_key, main): Consult the server
version string to determine whether to use pgpKey or pgpKeyV2.
2002-07-09 David Shaw <[email protected]>
* gpgkeys_mailto.in: Use new OPAQUE tag for non net-path URIs.
Fail more elegantly if there is no email address to send to. Show
the GnuPG version in the message body.
2002-07-04 David Shaw <[email protected]>
* gpgkeys_ldap.c (get_key), gpgkeys_hkp.c (get_key): Display
keyserver URI as a URI, but only if verbose.
2002-07-01 David Shaw <[email protected]>
* gpgkeys_hkp.c (parse_hkp_index): Error if the keyserver returns
an unparseable HKP response.
* gpgkeys_hkp.c (main): Warn on honor-http-proxy,
broken-http-proxy, and include-subkeys (not supported yet).
* gpgkeys_ldap.c (main), gpgkeys_hkp.c (http_connect, main): Fix
some shadowing warnings.
2002-06-11 David Shaw <[email protected]>
* Makefile.am: Don't hard-code the LDAP libraries - get them from
LDAPLIBS via configure. Also, gpgkeys_hkp is a program, not a
script.
2002-06-10 David Shaw <[email protected]>
* gpgkeys_ldap.c (include_subkeys): Default "include-subkeys" to
off, since GnuPG now defaults it to on.
2002-06-06 David Shaw <[email protected]>
* gpgkeys_hkp.c (parse_hkp_index): Type tweaks.
* gpgkeys_hkp.c (main): Add experimental code warning.
2002-06-05 David Shaw <[email protected]>
* Makefile.am, gpgkeys_hkp.c (new): Experimental HKP keyserver
interface.
2002-05-08 David Shaw <[email protected]>
* gpgkeys_ldap.c: Include <lber.h> if we absolutely must. This
helps when compiling against a very old OpenLDAP.
2002-04-29 David Shaw <[email protected]>
* gpgkeys_mailto.in: Properly handle key requests in full
fingerprint form.
2002-03-29 David Shaw <[email protected]>
* gpgkeys_ldap.c (printquoted): Quote backslashes within keyserver
search responses.
2002-02-25 David Shaw <[email protected]>
* gpgkeys_ldap (get_key): LDAP keyservers do not support v3
fingerprints, so error out if someone tries. Actually, they don't
support any fingerprints, but at least we can calculate a keyid
from a v4 fingerprint.
2002-02-23 David Shaw <[email protected]>
* gpgkeys_ldap: Clarify the notion of a partial failure. This is
possible if more than one key is being handled in a batch, and one
fails while the other succeeds. Note that a search that comes up
with no results is not a failure - that is a valid response of "no
answer".
* gpgkeys_ldap.c (get_key): Allow GnuPG to send us full v4
fingerprints, long key ids, or short key ids while fetching.
Since the LDAP server doesn't actually handle fingerprints, chop
them down to long key ids for actual use.
* gpgkeys_ldap.c (main, get_key): When searching for a keyid,
search for subkeys as well as primary keys. This is mostly
significant when automatically fetching the key based on the id in
a header (i.e. "signature made by...."). "no-include-subkeys"
disables.
2002-02-14 David Shaw <[email protected]>
* gpgkeys_ldap.c: Fix compiler warning.
* gpgkeys_ldap.c: Be much more robust with mangled input files.
2001-12-28 David Shaw <[email protected]>
* gpgkeys_mailto.in: Use the new OUTOFBAND indicator so gpg knows
not to try and import anything. Also turn on perl -w for
warnings.
* gpgkeys_ldap.c (main): If we're using temp files (rather than
stdin/stdout), make sure the file is closed when we're done.
2001-12-20 David Shaw <[email protected]>
* Properly free the LDAP response when we're done with it.
* Now that we handle multiple keys, we must remove duplicates as
the LDAP keyserver returns keys with multiple user IDs multiple
times.
* Properly handle multiple keys with the same key ID (it's really
rare, so fetch "0xDEADBEEF" to test this).
2001-12-17 David Shaw <[email protected]>
* gpgkeys_ldap.c, gpgkeys_mailto.in: Fix GNU capitalization
issues. Prefix log messages with "gpgkeys" to clarify which
program is generating them.
2001-12-14 David Shaw <[email protected]>
* gpgkeys_ldap.c (search_key): Use unsigned int rather than uint
for portability.
2001-12-04 David Shaw <[email protected]>
* Initial version of gpgkeys_ldap (LDAP keyserver helper) and
gpgkeys_mailto (email keyserver helper)
Copyright 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
2007 Free Software Foundation, Inc.
This file is free software; as a special exception the author gives
unlimited permission to copy and/or distribute it, with or without
modifications, as long as this notice is preserved.
This file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
Local Variables:
buffer-read-only: t
End:
| {
"pile_set_name": "Github"
} |
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto">
<item
android:id="@+id/action_github"
android:orderInCategory="100"
app:showAsAction="always"
android:icon="@drawable/ic_github"
android:title="@string/action_git_repo"/>
</menu>
| {
"pile_set_name": "Github"
} |
# W2J-CLI
JavaWeb Command-line Framework, help you easily build a command line JavaWeb System.
It can be easily combined with any Java framework , and no other dependency.Even you don't need create a HTML page.
## Why need it
- Some functions are provided to professional and do not want to invest in developing front-end.
- Quick development for a simple management system
- provided some tools through a web page
- Quick,simple,fun,geek and so on
## Get it
##### Maven
```xml
<!-- https://mvnrepository.com/artifact/top.thinkin/w2j-cli-core -->
<dependency>
<groupId>top.thinkin</groupId>
<artifactId>w2j-cli-core</artifactId>
<version>0.1.3</version>
</dependency>
```
##### Gradle
```
// https://mvnrepository.com/artifact/top.thinkin/w2j-cli-core
compile group: 'top.thinkin', name: 'w2j-cli-core', version: '0.1.3'
```
## Some examples
#### Base
Base input and output,Text-table, Confirm,Auto-Complete

create a class and write annotations for root command,commands and parameters
```java
@HJRoot(name="task",help = "task related operation")
public class TaskTest {
@HJCommand(name = "list",help = "get the list of task")
public String list(
@HJValue(name="start",help = "start time. example:2017-12-1")
String start,
@HJValue(name="end",help = "end time. example:2018-12-1")
String end
) throws WjException {
TextTable table = TextTable.create();
table.config("id");
table.config("name");
table.config("status");
table.config("createTime");
table.add(Arrays.asList("1","task-1","open","2017-12-1"));
table.add(Arrays.asList("2","task-2","open","2017-12-2" ));
return View.text(table.buildTable());
}
@HJCommand(name = "stop",ask = true,help = "stop the task")
public String close(
@HJValue(name="id",help = "the task id",req = true)
String id,
@HJContext()
Context context
){
return View.text("task is closed");
}
}
```
#### Help
Based on you annotations, W2J-CLI could automatically generate the help documents

#### Login
W2J-CLI have provided a built-in login module.

This codes simulates a simple logon process
```java
public class YesLogin implements WJLogin<Context> {
public final static String AUTH = "LX2F8rdCA2wKel9yR42";
public String login(String root, String pass, Context context) {
if("root".equals(root)&&"pass".equals(pass)){
return View.OK(AUTH);
}else{
return View.error("error");
}
}
public boolean filter(String auth, Context context) {
if(AUTH.equals(auth)){
return true;
}else{
return false;
}
}
}
```
#### Use scripts to get more powerful functions
Users can create scripts to achieve stronger functionality.
Of course, there are some commonly used built-in scripts.Likes loop command script, you can use it to achieve some animation effects.
```Java
/**
* Circular sending command.
* The result will be displayed on the screen and overlay the last display.
* @param cli
* @param stopPrefix If the prefix is this, the loop will stop
* @param interval Time interval of a request
* @return
*/
String script = ScriptKits.LOOP_CLI("task get -id "+id,"ok",500);
```

## Getting Started
W2J-CLI can combined with any Java framework,likes spring,sptingMVC,struts2 and so on.
There has a example for combined with base servlet,you can get other ways in wiki
**build web.xml**
```xml
<servlet>
<servlet-name>DispatcherServlet</servlet-name>
<servlet-class>top.test.web.TestAction</servlet-class>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet>
<servlet-name>HtmlServlet</servlet-name>
<servlet-class>top.test.web.HtmlAction</servlet-class>
<load-on-startup>1</load-on-startup>
</servlet>
<servlet-mapping>
<servlet-name>DispatcherServlet</servlet-name>
<url-pattern>/api/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>HtmlServlet</servlet-name>
<url-pattern>/html</url-pattern>
</servlet-mapping>
```
**build html Servlet**
```java
public class HtmlAction extends HttpServlet {
HTMLConfig config;
public void init() throws ServletException {
try {
// the postUrl is necessary ,if you use built-in login module needLogin must be true
// there has some other configuration items,you can get them in wiki
config = HTMLConfig.cteate().setPostUrl("http://127.0.0.1:8082/api").needLogin(true).build();
} catch (Exception e) {
e.printStackTrace();
}
}
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.setCharacterEncoding("UTF-8");
resp.setContentType("text/html;charset=UTF-8");
PrintWriter writer = resp.getWriter();
writer.write(config.html());
writer.close();
}
}
```
**build the handler Servlet**
```java
public class TestAction extends HttpServlet {
CommandManage commandManage;
public void init(){
try {
commandManage = CommandManage.config()
.setLogin(new YesLogin()).add(new HelloTest()).add(new TaskTest());
} catch (Exception e) {
e.printStackTrace();
}
}
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
req.setCharacterEncoding("UTF-8");
Context context = new Context();//the context of your design
String cli = req.getParameter("cli");//get the command line
String auth = req.getParameter("auth");//get the login authcode
String x = null;
try {
x = commandManage.handleCommand(cli,context,auth);
} catch (Exception e) {
e.printStackTrace();
}
resp.setContentType("text/html;charset=UTF-8");
PrintWriter writer = resp.getWriter();
writer.write(x);
writer.close();
}
}
```
### Prerequisites
JDK 1.6+
## Authors
* **Dong Bin** - *Initial work* - [BOLG](http://thinkin.top)
## License
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details
## Acknowledgments
* Hat tip to anyone who's code was used
* Inspiration
* etc
| {
"pile_set_name": "Github"
} |
<%@ page language="java" contentType="text/html; charset=GBK"%>
<%@ page import="java.util.Map,java.util.TreeMap" %>
<%@ taglib prefix="s2" uri="/struts-tags" %>
<%
Map checkState=new TreeMap();
checkState.put("1","已审核");
checkState.put("0","未审核");
checkState.put("all","全部");
Map payforState=new TreeMap();
payforState.put("1","已付费");
payforState.put("0","未付费");
payforState.put("all","全部");
request.setAttribute("checkState",checkState);
request.setAttribute("payforState",payforState);
%>
<html>
<head><title>后台-侧栏</title></head>
<body background="images/back.gif">
<center>
<table border="0" width="220" height="100%" cellspacing="0" cellpadding="0" style="margin-top:7">
<!-- 显示方式 -->
<tr height="30" bgcolor="#F0F0F0"><td style="text-indent:5;border:1 solid"><font color="#004790"><b>■显示方式</b></font></td></tr>
<tr height="1"><td></td></tr>
<s2:form action="admin_ListShow.action?" theme="simple">
<tr>
<td align="center" valign="top" style="border:1 solid">
<table border="0" width="220" height="150" rules="all" cellspacing="0">
<tr>
<td align="center" colspan="2">
<fieldset style="height:60;width:210">
<legend>★付费状态</legend>
<br>
<s2:radio list="#request.payforState" name="showType.payforType" value="%{showType.payforType}"/>
</fieldset>
<fieldset style="height:60;width:210">
<legend>★审核状态</legend>
<br>
<s2:radio list="#request.checkState" name="showType.stateType" value="%{showType.stateType}"/>
</fieldset>
</td>
</tr>
<tr align="center" height="30" bgcolor="lightgrey">
<td>
信息类别:
<s2:select
emptyOption="true"
list="#session.typeMap"
name="showType.infoType"/>
<s2:submit value="显示"/>
</td>
</tr>
</table>
</td>
</tr>
</s2:form>
<tr height="5"><td></td></tr>
<!-- 设置已付费信息 -->
<tr height="30" bgcolor="#F0F0F0"><td style="text-indent:5" style="border:1 solid"><font color="#004790"><b>■付费设置</b></font></td></tr>
<tr height="1"><td></td></tr>
<form action="admin_SetMoneyShow.action">
<tr>
<td align="center" valign="top" style="border:1 solid">
<table border="0" width="220" height="80" cellspacing="0" rules="none">
<tr height="25"><td align="center" valign="bottom">请输入要设为已付费状态的信息ID:</td></tr>
<tr height="40">
<td align="center">
<input type="text" name="moneyID" value="${param['moneyID']}" size="24"/>
<input type="submit" value="查询"/>
</td>
</tr>
</table>
</td>
</tr>
</form>
<tr height="5"><td></td></tr>
<tr height="30" bgcolor="#F0F0F0"><td style="text-indent:5;border:1 solid"><font color="#004790"><b>■日历</b></font></td></tr>
<tr height="1"><td></td></tr>
<!-- 日历 -->
<tr height="130">
<td valign="top" style="border:1 solid"><jsp:include page="/pages/calendar.jsp"/></td>
</tr>
</table>
</center>
</body>
</html> | {
"pile_set_name": "Github"
} |
/*
This file is part of cpp-ethereum.
cpp-ethereum is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
cpp-ethereum is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
*/
/** @file DB.h
* @author Gav Wood <[email protected]>
* @date 2014
*/
#pragma once
#ifndef __APPLE__
#pragma warning(push)
#pragma warning(disable: 4100 4267)
#endif
#if ETH_ROCKSDB
#include <rocksdb/db.h>
#include <rocksdb/write_batch.h>
namespace ldb = rocksdb;
#else
#include <leveldb/db.h>
#include <leveldb/write_batch.h>
namespace ldb = leveldb;
#endif
#ifndef __APPLE__
#pragma warning(pop)
#endif
#define DEV_LDB 1
| {
"pile_set_name": "Github"
} |
{% extends "base.html" %}
{% block content %}
<article>
<h2>{{ article.title }}</h2>
{{ article.content }}
<h6>Written by <a href="{{ SITEURL }}/author/{{ article.author|lower|replace(' ', '-') }}.html">{{ article.author }}</a> on {{ article.locale_date }}.</h6>
</article>
{% if DISQUS_SITENAME %}
<hr/>
<div class="row">
<div class="small-12 columns">
<h3>Comments</h3>
<div id="disqus_thread"></div>
<script type="text/javascript">
var disqus_shortname = '{{ DISQUS_SITENAME }}';
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
</script>
<noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
<a href="http://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a>
</div>
</div>
{% endif %}
{% endblock %}
| {
"pile_set_name": "Github"
} |
% seemovie() - see an EEG movie produced by eegmovie()
%
% Usage: >> seemovie(Movie,ntimes,Colormap)
%
% Inputs:
% Movie = Movie matrix returned by eegmovie()
% ntimes = Number of times to display {0 -> -10}
% If ntimes < 0, movie will play forward|backward
% Colormap = Color map returned by eegmovie() {0 -> default}
%
% Author: Scott Makeig & Colin Humphries, CNL / Salk Institute, 6/3/97
%
% See also: eegmovie()
% Copyright (C) 6/3/97 Scott Makeig & Colin Humphries, CNL / Salk Institute, La Jolla CA
%
% This program is free software; you can redistribute it and/or modify
% it under the terms of the GNU General Public License as published by
% the Free Software Foundation; either version 2 of the License, or
% (at your option) any later version.
%
% This program is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with this program; if not, write to the Free Software
% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
% 10-31-97 changed test for user-defined colormap -ch & sm
% 1-8-98 added '\n' at end, improved help msg -sm
% 01-25-02 reformated help, added link -ad
function seemovie(Movie,ntimes,Colormap)
fps = 10; % projetion speed (requested frames per second)
if nargin<1
help seemovie
return
end
if nargin<3
Colormap = 0;
end
if nargin<2
ntimes = -10; % default to playing foward|backward endlessly
end
if ntimes == 0
ntimes = -10;
end
clf
axes('Position',[0 0 1 1]);
if size(Colormap,2) == 3 % if colormap user-defined
colormap(Colormap)
else
colormap([jet(64);0 0 0]); % set up the default topoplot color map
end
if ntimes > 0,
fprintf('Movie will play slowly once, then %d times faster.\n',ntimes);
else
fprintf('Movie will play slowly once, then %d times faster forwards and backwards.\n',-ntimes);
end
if abs(ntimes) > 7
fprintf(' Close figure to abort: ');
end
%%%%%%%%%%%%%%%%%%%%%%%% Show the movie %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
movie(Movie,ntimes,fps);
%%%%%%%%%%%%%%%%%%%%%%%% The End %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if abs(ntimes) > 7
fprintf('\n');
end
| {
"pile_set_name": "Github"
} |
//! moment.js locale configuration
//! locale : Telugu [te]
//! author : Krishna Chaitanya Thota : https://github.com/kcthota
import moment from '../moment';
export default moment.defineLocale('te', {
months : 'జనవరి_ఫిబ్రవరి_మార్చి_ఏప్రిల్_మే_జూన్_జూలై_ఆగస్టు_సెప్టెంబర్_అక్టోబర్_నవంబర్_డిసెంబర్'.split('_'),
monthsShort : 'జన._ఫిబ్ర._మార్చి_ఏప్రి._మే_జూన్_జూలై_ఆగ._సెప్._అక్టో._నవ._డిసె.'.split('_'),
monthsParseExact : true,
weekdays : 'ఆదివారం_సోమవారం_మంగళవారం_బుధవారం_గురువారం_శుక్రవారం_శనివారం'.split('_'),
weekdaysShort : 'ఆది_సోమ_మంగళ_బుధ_గురు_శుక్ర_శని'.split('_'),
weekdaysMin : 'ఆ_సో_మం_బు_గు_శు_శ'.split('_'),
longDateFormat : {
LT : 'A h:mm',
LTS : 'A h:mm:ss',
L : 'DD/MM/YYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY, A h:mm',
LLLL : 'dddd, D MMMM YYYY, A h:mm'
},
calendar : {
sameDay : '[నేడు] LT',
nextDay : '[రేపు] LT',
nextWeek : 'dddd, LT',
lastDay : '[నిన్న] LT',
lastWeek : '[గత] dddd, LT',
sameElse : 'L'
},
relativeTime : {
future : '%s లో',
past : '%s క్రితం',
s : 'కొన్ని క్షణాలు',
ss : '%d సెకన్లు',
m : 'ఒక నిమిషం',
mm : '%d నిమిషాలు',
h : 'ఒక గంట',
hh : '%d గంటలు',
d : 'ఒక రోజు',
dd : '%d రోజులు',
M : 'ఒక నెల',
MM : '%d నెలలు',
y : 'ఒక సంవత్సరం',
yy : '%d సంవత్సరాలు'
},
dayOfMonthOrdinalParse : /\d{1,2}వ/,
ordinal : '%dవ',
meridiemParse: /రాత్రి|ఉదయం|మధ్యాహ్నం|సాయంత్రం/,
meridiemHour : function (hour, meridiem) {
if (hour === 12) {
hour = 0;
}
if (meridiem === 'రాత్రి') {
return hour < 4 ? hour : hour + 12;
} else if (meridiem === 'ఉదయం') {
return hour;
} else if (meridiem === 'మధ్యాహ్నం') {
return hour >= 10 ? hour : hour + 12;
} else if (meridiem === 'సాయంత్రం') {
return hour + 12;
}
},
meridiem : function (hour, minute, isLower) {
if (hour < 4) {
return 'రాత్రి';
} else if (hour < 10) {
return 'ఉదయం';
} else if (hour < 17) {
return 'మధ్యాహ్నం';
} else if (hour < 20) {
return 'సాయంత్రం';
} else {
return 'రాత్రి';
}
},
week : {
dow : 0, // Sunday is the first day of the week.
doy : 6 // The week that contains Jan 1st is the first week of the year.
}
});
| {
"pile_set_name": "Github"
} |
// Get duplication information for a project.
package sonargo
import "net/http"
type DuplicationsService struct {
client *Client
}
type DuplicationsShowObject struct {
Duplications []*Duplication `json:"duplications,omitempty"`
Files *Files `json:"files,omitempty"`
}
type Block struct {
Ref string `json:"_ref,omitempty"`
From int64 `json:"from,omitempty"`
Size int64 `json:"size,omitempty"`
}
type Duplication struct {
Blocks []*Block `json:"blocks,omitempty"`
}
type Files struct {
One *File `json:"1,omitempty"`
Two *File `json:"2,omitempty"`
Three *File `json:"3,omitempty"`
}
type File struct {
Key string `json:"key,omitempty"`
Name string `json:"name,omitempty"`
ProjectName string `json:"projectName,omitempty"`
}
type DuplicationsShowOption struct {
Key string `url:"key,omitempty"` // Description:"File key",ExampleValue:"my_project:/src/foo/Bar.php"
Uuid string `url:"uuid,omitempty"` // Description:"File ID. If provided, 'key' must not be provided.",ExampleValue:"584a89f2-8037-4f7b-b82c-8b45d2d63fb2"
}
// Show Get duplications. Require Browse permission on file's project
func (s *DuplicationsService) Show(opt *DuplicationsShowOption) (v *DuplicationsShowObject, resp *http.Response, err error) {
err = s.ValidateShowOpt(opt)
if err != nil {
return
}
req, err := s.client.NewRequest("GET", "duplications/show", opt)
if err != nil {
return
}
v = new(DuplicationsShowObject)
resp, err = s.client.Do(req, v)
if err != nil {
return nil, resp, err
}
return
}
| {
"pile_set_name": "Github"
} |
/* fre:ac - free audio converter
* Copyright (C) 2001-2020 Robert Kausch <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of
* the License, or (at your option) any later version.
*
* THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. */
#ifndef H_FREAC_MAIN
#define H_FREAC_MAIN
#include <smooth.h>
#include <boca.h>
#include "freac.h"
#include "dialogs/config/config.h"
#ifdef __WIN32__
# define DLLEXPORT __declspec (dllexport)
#else
# define DLLEXPORT __attribute__ ((visibility ("default")))
#endif
using namespace smooth;
using namespace smooth::GUI;
extern "C"
{
Int DLLEXPORT StartGUI(const Array<String> &);
}
namespace freac
{
class JobList;
class LayerJoblist;
class LayerThreads;
class Notification;
class freacGUI : public freac
{
private:
/* Singleton class, therefore protected constructor/destructor
*/
freacGUI();
~freacGUI();
Window *mainWnd;
Titlebar *mainWnd_titlebar;
Statusbar *mainWnd_statusbar;
Menubar *mainWnd_menubar;
Menubar *mainWnd_iconbar;
PopupMenu *menu_file;
PopupMenu *menu_addsubmenu;
PopupMenu *menu_files;
PopupMenu *menu_drives;
PopupMenu *menu_database;
PopupMenu *menu_database_query;
PopupMenu *menu_options;
PopupMenu *menu_configurations;
PopupMenu *menu_seldrive;
PopupMenu *menu_processing;
PopupMenu *menu_processors;
PopupMenu *menu_encode;
PopupMenu *menu_encoders;
PopupMenu *menu_encoder_options;
PopupMenu *menu_help;
Array<PopupMenu *, Void *> formatMenus;
MenuEntry *allowOverwriteMenuEntry;
Hyperlink *hyperlink;
TabWidget *tabs_main;
LayerJoblist *tab_layer_joblist;
LayerThreads *tab_layer_threads;
JobList *joblist;
Notification *notification;
Int clicked_configuration;
Int clicked_drive;
Int clicked_encoder;
Int clicked_processor;
Array<BoCA::AS::ExtensionComponent *, Void *> extensionComponents;
Void InitExtensionComponents();
Void FreeExtensionComponents();
Void Close();
Void About();
Void ConfigureSettings(ConfigurePage);
Void ConfigureComponent(const String &);
Void ShowHelp();
Void ShowTipOfTheDay();
Void ReportIssue();
Void SuggestFeature();
Void CheckForUpdates();
Bool SetLanguage();
Void FillMenus();
Void Convert();
Void PauseResumeEncoding();
Void StopEncoding();
Void ParseArguments(const Array<String> &);
slots:
Bool ExitProc();
Void MessageProc(Int, Int, Int);
Void OnChangePosition(const Point &);
Void OnChangeSize(const Size &);
Void OnSelectConfiguration();
Void OnChangeConfiguration();
Void OnDriveChange();
Void OnDiscInsert(Int);
Void OnDiscRemove(Int);
Void ReadCD() { ReadCD(False); }
Void ReadCD(Bool);
Void QueryCDDB();
Void QueryCDDBLater();
Void SubmitCDDBData();
Void ManageCDDBData();
Void ManageCDDBBatchData();
Void ManageCDDBBatchQueries();
Void AddFilesFromDirectory();
Void AddFilesByPattern();
Void ToggleSignalProcessing();
Void ToggleUseInputDirectory();
Void ToggleEncodeToSingleFile();
Void ConfigureSettings();
Void ConfigureProcessing();
Void ConfigureEncoder();
Void ConfigureProcessor();
Void ConfirmDeleteAfterEncoding();
public:
/* Returns a new or existing instance of freacGUI
*/
static freacGUI *Get();
/* Destroys an existing instance of freacGUI
*/
static Void Free();
};
};
#endif
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) Enalean, 2020 - Present. All Rights Reserved.
*
* This file is a part of Tuleap.
*
* Tuleap is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Tuleap is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Tuleap. If not, see <http://www.gnu.org/licenses/>.
*/
@import '../../../src/themes/tlp/src/scss/variables/variables-green';
@import 'testplan';
| {
"pile_set_name": "Github"
} |
/***********************************************************************
A JavaScript tokenizer / parser / beautifier / compressor.
https://github.com/mishoo/UglifyJS2
-------------------------------- (C) ---------------------------------
Author: Mihai Bazon
<[email protected]>
http://mihai.bazon.net/blog
Distributed under the BSD license:
Copyright 2012 (c) Mihai Bazon <[email protected]>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the following
disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
***********************************************************************/
"use strict";
function DEFNODE(type, props, methods, base) {
if (typeof base === "undefined") base = AST_Node;
props = props ? props.split(/\s+/) : [];
var self_props = props;
if (base && base.PROPS) props = props.concat(base.PROPS);
var code = [
"return function AST_", type, "(props){",
"if(props){",
];
props.forEach(function(prop) {
code.push("this.", prop, "=props.", prop, ";");
});
var proto = base && new base;
if (proto && proto.initialize || methods && methods.initialize) code.push("this.initialize();");
code.push("}}");
var ctor = new Function(code.join(""))();
if (proto) {
ctor.prototype = proto;
ctor.BASE = base;
}
if (base) base.SUBCLASSES.push(ctor);
ctor.prototype.CTOR = ctor;
ctor.PROPS = props || null;
ctor.SELF_PROPS = self_props;
ctor.SUBCLASSES = [];
if (type) {
ctor.prototype.TYPE = ctor.TYPE = type;
}
if (methods) for (var name in methods) if (HOP(methods, name)) {
if (/^\$/.test(name)) {
ctor[name.substr(1)] = methods[name];
} else {
ctor.prototype[name] = methods[name];
}
}
ctor.DEFMETHOD = function(name, method) {
this.prototype[name] = method;
};
if (typeof exports !== "undefined") {
exports["AST_" + type] = ctor;
}
return ctor;
}
var AST_Token = DEFNODE("Token", "type value line col pos endline endcol endpos nlb comments_before comments_after file raw", {
}, null);
var AST_Node = DEFNODE("Node", "start end", {
_clone: function(deep) {
if (deep) {
var self = this.clone();
return self.transform(new TreeTransformer(function(node) {
if (node !== self) {
return node.clone(true);
}
}));
}
return new this.CTOR(this);
},
clone: function(deep) {
return this._clone(deep);
},
$documentation: "Base class of all AST nodes",
$propdoc: {
start: "[AST_Token] The first token of this node",
end: "[AST_Token] The last token of this node"
},
_walk: function(visitor) {
return visitor._visit(this);
},
walk: function(visitor) {
return this._walk(visitor); // not sure the indirection will be any help
}
}, null);
AST_Node.warn = function(txt, props) {
if (AST_Node.warn_function) AST_Node.warn_function(string_template(txt, props));
};
/* -----[ statements ]----- */
var AST_Statement = DEFNODE("Statement", null, {
$documentation: "Base class of all statements",
});
var AST_Debugger = DEFNODE("Debugger", null, {
$documentation: "Represents a debugger statement",
}, AST_Statement);
var AST_Directive = DEFNODE("Directive", "value quote", {
$documentation: "Represents a directive, like \"use strict\";",
$propdoc: {
value: "[string] The value of this directive as a plain string (it's not an AST_String!)",
quote: "[string] the original quote character"
},
}, AST_Statement);
var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
$documentation: "A statement consisting of an expression, i.e. a = 1 + 2",
$propdoc: {
body: "[AST_Node] an expression node (should not be instanceof AST_Statement)"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.body._walk(visitor);
});
}
}, AST_Statement);
function walk_body(node, visitor) {
var body = node.body;
if (body instanceof AST_Statement) {
body._walk(visitor);
} else body.forEach(function(node) {
node._walk(visitor);
});
}
var AST_Block = DEFNODE("Block", "body", {
$documentation: "A body of statements (usually braced)",
$propdoc: {
body: "[AST_Statement*] an array of statements"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
walk_body(this, visitor);
});
}
}, AST_Statement);
var AST_BlockStatement = DEFNODE("BlockStatement", null, {
$documentation: "A block statement",
}, AST_Block);
var AST_EmptyStatement = DEFNODE("EmptyStatement", null, {
$documentation: "The empty statement (empty block or simply a semicolon)"
}, AST_Statement);
var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
$documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`",
$propdoc: {
body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement"
}
}, AST_Statement);
var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
$documentation: "Statement with a label",
$propdoc: {
label: "[AST_Label] a label definition"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.label._walk(visitor);
this.body._walk(visitor);
});
},
clone: function(deep) {
var node = this._clone(deep);
if (deep) {
var label = node.label;
var def = this.label;
node.walk(new TreeWalker(function(node) {
if (node instanceof AST_LoopControl && node.label && node.label.thedef === def) {
node.label.thedef = label;
label.references.push(node);
}
}));
}
return node;
}
}, AST_StatementWithBody);
var AST_IterationStatement = DEFNODE("IterationStatement", null, {
$documentation: "Internal class. All loops inherit from it."
}, AST_StatementWithBody);
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
$documentation: "Base class for do/while statements",
$propdoc: {
condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement"
}
}, AST_IterationStatement);
var AST_Do = DEFNODE("Do", null, {
$documentation: "A `do` statement",
_walk: function(visitor) {
return visitor._visit(this, function() {
this.body._walk(visitor);
this.condition._walk(visitor);
});
}
}, AST_DWLoop);
var AST_While = DEFNODE("While", null, {
$documentation: "A `while` statement",
_walk: function(visitor) {
return visitor._visit(this, function() {
this.condition._walk(visitor);
this.body._walk(visitor);
});
}
}, AST_DWLoop);
var AST_For = DEFNODE("For", "init condition step", {
$documentation: "A `for` statement",
$propdoc: {
init: "[AST_Node?] the `for` initialization code, or null if empty",
condition: "[AST_Node?] the `for` termination clause, or null if empty",
step: "[AST_Node?] the `for` update clause, or null if empty"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
if (this.init) this.init._walk(visitor);
if (this.condition) this.condition._walk(visitor);
if (this.step) this.step._walk(visitor);
this.body._walk(visitor);
});
}
}, AST_IterationStatement);
var AST_ForIn = DEFNODE("ForIn", "init object", {
$documentation: "A `for ... in` statement",
$propdoc: {
init: "[AST_Node] the `for/in` initialization code",
object: "[AST_Node] the object that we're looping through"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.init._walk(visitor);
this.object._walk(visitor);
this.body._walk(visitor);
});
}
}, AST_IterationStatement);
var AST_With = DEFNODE("With", "expression", {
$documentation: "A `with` statement",
$propdoc: {
expression: "[AST_Node] the `with` expression"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
this.body._walk(visitor);
});
}
}, AST_StatementWithBody);
/* -----[ scope and functions ]----- */
var AST_Scope = DEFNODE("Scope", "variables functions uses_with uses_eval parent_scope enclosed cname", {
$documentation: "Base class for all statements introducing a lexical scope",
$propdoc: {
variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope",
functions: "[Object/S] like `variables`, but only lists function declarations",
uses_with: "[boolean/S] tells whether this scope uses the `with` statement",
uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`",
parent_scope: "[AST_Scope?/S] link to the parent scope",
enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes",
cname: "[integer/S] current index for mangling variables (used internally by the mangler)",
},
clone: function(deep) {
var node = this._clone(deep);
if (this.variables) node.variables = this.variables.clone();
if (this.functions) node.functions = this.functions.clone();
if (this.enclosed) node.enclosed = this.enclosed.slice();
return node;
},
pinned: function() {
return this.uses_eval || this.uses_with;
}
}, AST_Block);
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
$documentation: "The toplevel scope",
$propdoc: {
globals: "[Object/S] a map of name -> SymbolDef for all undeclared names",
},
wrap_commonjs: function(name) {
var body = this.body;
var wrapped_tl = "(function(exports){'$ORIG';})(typeof " + name + "=='undefined'?(" + name + "={}):" + name + ");";
wrapped_tl = parse(wrapped_tl);
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function(node) {
if (node instanceof AST_Directive && node.value == "$ORIG") {
return MAP.splice(body);
}
}));
return wrapped_tl;
},
wrap_enclose: function(args_values) {
if (typeof args_values != "string") args_values = "";
var index = args_values.indexOf(":");
if (index < 0) index = args_values.length;
var body = this.body;
return parse([
"(function(",
args_values.slice(0, index),
'){"$ORIG"})(',
args_values.slice(index + 1),
")"
].join("")).transform(new TreeTransformer(function(node) {
if (node instanceof AST_Directive && node.value == "$ORIG") {
return MAP.splice(body);
}
}));
}
}, AST_Scope);
var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
$documentation: "Base class for functions",
$propdoc: {
name: "[AST_SymbolDeclaration?] the name of this function",
argnames: "[AST_SymbolFunarg*] array of function arguments",
uses_arguments: "[boolean/S] tells whether this function accesses the arguments array"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
if (this.name) this.name._walk(visitor);
this.argnames.forEach(function(argname) {
argname._walk(visitor);
});
walk_body(this, visitor);
});
}
}, AST_Scope);
var AST_Accessor = DEFNODE("Accessor", null, {
$documentation: "A setter/getter function. The `name` property is always null."
}, AST_Lambda);
var AST_Function = DEFNODE("Function", "inlined", {
$documentation: "A function expression"
}, AST_Lambda);
var AST_Defun = DEFNODE("Defun", "inlined", {
$documentation: "A function definition"
}, AST_Lambda);
/* -----[ JUMPS ]----- */
var AST_Jump = DEFNODE("Jump", null, {
$documentation: "Base class for “jumps” (for now that's `return`, `throw`, `break` and `continue`)"
}, AST_Statement);
var AST_Exit = DEFNODE("Exit", "value", {
$documentation: "Base class for “exits” (`return` and `throw`)",
$propdoc: {
value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return"
},
_walk: function(visitor) {
return visitor._visit(this, this.value && function() {
this.value._walk(visitor);
});
}
}, AST_Jump);
var AST_Return = DEFNODE("Return", null, {
$documentation: "A `return` statement"
}, AST_Exit);
var AST_Throw = DEFNODE("Throw", null, {
$documentation: "A `throw` statement"
}, AST_Exit);
var AST_LoopControl = DEFNODE("LoopControl", "label", {
$documentation: "Base class for loop control statements (`break` and `continue`)",
$propdoc: {
label: "[AST_LabelRef?] the label, or null if none",
},
_walk: function(visitor) {
return visitor._visit(this, this.label && function() {
this.label._walk(visitor);
});
}
}, AST_Jump);
var AST_Break = DEFNODE("Break", null, {
$documentation: "A `break` statement"
}, AST_LoopControl);
var AST_Continue = DEFNODE("Continue", null, {
$documentation: "A `continue` statement"
}, AST_LoopControl);
/* -----[ IF ]----- */
var AST_If = DEFNODE("If", "condition alternative", {
$documentation: "A `if` statement",
$propdoc: {
condition: "[AST_Node] the `if` condition",
alternative: "[AST_Statement?] the `else` part, or null if not present"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.condition._walk(visitor);
this.body._walk(visitor);
if (this.alternative) this.alternative._walk(visitor);
});
}
}, AST_StatementWithBody);
/* -----[ SWITCH ]----- */
var AST_Switch = DEFNODE("Switch", "expression", {
$documentation: "A `switch` statement",
$propdoc: {
expression: "[AST_Node] the `switch` “discriminant”"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
walk_body(this, visitor);
});
}
}, AST_Block);
var AST_SwitchBranch = DEFNODE("SwitchBranch", null, {
$documentation: "Base class for `switch` branches",
}, AST_Block);
var AST_Default = DEFNODE("Default", null, {
$documentation: "A `default` switch branch",
}, AST_SwitchBranch);
var AST_Case = DEFNODE("Case", "expression", {
$documentation: "A `case` switch branch",
$propdoc: {
expression: "[AST_Node] the `case` expression"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
walk_body(this, visitor);
});
}
}, AST_SwitchBranch);
/* -----[ EXCEPTIONS ]----- */
var AST_Try = DEFNODE("Try", "bcatch bfinally", {
$documentation: "A `try` statement",
$propdoc: {
bcatch: "[AST_Catch?] the catch block, or null if not present",
bfinally: "[AST_Finally?] the finally block, or null if not present"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
walk_body(this, visitor);
if (this.bcatch) this.bcatch._walk(visitor);
if (this.bfinally) this.bfinally._walk(visitor);
});
}
}, AST_Block);
var AST_Catch = DEFNODE("Catch", "argname", {
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
$propdoc: {
argname: "[AST_SymbolCatch] symbol for the exception"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.argname._walk(visitor);
walk_body(this, visitor);
});
}
}, AST_Block);
var AST_Finally = DEFNODE("Finally", null, {
$documentation: "A `finally` node; only makes sense as part of a `try` statement"
}, AST_Block);
/* -----[ VAR ]----- */
var AST_Definitions = DEFNODE("Definitions", "definitions", {
$documentation: "Base class for `var` nodes (variable declarations/initializations)",
$propdoc: {
definitions: "[AST_VarDef*] array of variable definitions"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.definitions.forEach(function(defn) {
defn._walk(visitor);
});
});
}
}, AST_Statement);
var AST_Var = DEFNODE("Var", null, {
$documentation: "A `var` statement"
}, AST_Definitions);
var AST_VarDef = DEFNODE("VarDef", "name value", {
$documentation: "A variable declaration; only appears in a AST_Definitions node",
$propdoc: {
name: "[AST_SymbolVar] name of the variable",
value: "[AST_Node?] initializer, or null of there's no initializer"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.name._walk(visitor);
if (this.value) this.value._walk(visitor);
});
}
});
/* -----[ OTHER ]----- */
var AST_Call = DEFNODE("Call", "expression args", {
$documentation: "A function call expression",
$propdoc: {
expression: "[AST_Node] expression to invoke as function",
args: "[AST_Node*] array of arguments"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
this.args.forEach(function(node) {
node._walk(visitor);
});
});
}
});
var AST_New = DEFNODE("New", null, {
$documentation: "An object instantiation. Derives from a function call since it has exactly the same properties"
}, AST_Call);
var AST_Sequence = DEFNODE("Sequence", "expressions", {
$documentation: "A sequence expression (comma-separated expressions)",
$propdoc: {
expressions: "[AST_Node*] array of expressions (at least two)"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expressions.forEach(function(node) {
node._walk(visitor);
});
});
}
});
var AST_PropAccess = DEFNODE("PropAccess", "expression property", {
$documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`",
$propdoc: {
expression: "[AST_Node] the “container” expression",
property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node"
}
});
var AST_Dot = DEFNODE("Dot", null, {
$documentation: "A dotted property access expression",
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
});
}
}, AST_PropAccess);
var AST_Sub = DEFNODE("Sub", null, {
$documentation: "Index-style property access, i.e. `a[\"foo\"]`",
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
this.property._walk(visitor);
});
}
}, AST_PropAccess);
var AST_Unary = DEFNODE("Unary", "operator expression", {
$documentation: "Base class for unary expressions",
$propdoc: {
operator: "[string] the operator",
expression: "[AST_Node] expression that this unary operator applies to"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.expression._walk(visitor);
});
}
});
var AST_UnaryPrefix = DEFNODE("UnaryPrefix", null, {
$documentation: "Unary prefix expression, i.e. `typeof i` or `++i`"
}, AST_Unary);
var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, {
$documentation: "Unary postfix expression, i.e. `i++`"
}, AST_Unary);
var AST_Binary = DEFNODE("Binary", "operator left right", {
$documentation: "Binary expression, i.e. `a + b`",
$propdoc: {
left: "[AST_Node] left-hand side expression",
operator: "[string] the operator",
right: "[AST_Node] right-hand side expression"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.left._walk(visitor);
this.right._walk(visitor);
});
}
});
var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", {
$documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`",
$propdoc: {
condition: "[AST_Node]",
consequent: "[AST_Node]",
alternative: "[AST_Node]"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.condition._walk(visitor);
this.consequent._walk(visitor);
this.alternative._walk(visitor);
});
}
});
var AST_Assign = DEFNODE("Assign", null, {
$documentation: "An assignment expression — `a = b + 5`",
}, AST_Binary);
/* -----[ LITERALS ]----- */
var AST_Array = DEFNODE("Array", "elements", {
$documentation: "An array literal",
$propdoc: {
elements: "[AST_Node*] array of elements"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.elements.forEach(function(element) {
element._walk(visitor);
});
});
}
});
var AST_Object = DEFNODE("Object", "properties", {
$documentation: "An object literal",
$propdoc: {
properties: "[AST_ObjectProperty*] array of properties"
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.properties.forEach(function(prop) {
prop._walk(visitor);
});
});
}
});
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
$documentation: "Base class for literal object properties",
$propdoc: {
key: "[string|AST_SymbolAccessor] property name. For ObjectKeyVal this is a string. For getters and setters this is an AST_SymbolAccessor.",
value: "[AST_Node] property value. For getters and setters this is an AST_Accessor."
},
_walk: function(visitor) {
return visitor._visit(this, function() {
this.value._walk(visitor);
});
}
});
var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", "quote", {
$documentation: "A key: value object property",
$propdoc: {
quote: "[string] the original quote character"
}
}, AST_ObjectProperty);
var AST_ObjectSetter = DEFNODE("ObjectSetter", null, {
$documentation: "An object setter property",
}, AST_ObjectProperty);
var AST_ObjectGetter = DEFNODE("ObjectGetter", null, {
$documentation: "An object getter property",
}, AST_ObjectProperty);
var AST_Symbol = DEFNODE("Symbol", "scope name thedef", {
$propdoc: {
name: "[string] name of this symbol",
scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)",
thedef: "[SymbolDef/S] the definition of this symbol"
},
$documentation: "Base class for all symbols",
});
var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, {
$documentation: "The name of a property accessor (setter/getter function)"
}, AST_Symbol);
var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", {
$documentation: "A declaration symbol (symbol in var, function name or argument, symbol in catch)",
}, AST_Symbol);
var AST_SymbolVar = DEFNODE("SymbolVar", null, {
$documentation: "Symbol defining a variable",
}, AST_SymbolDeclaration);
var AST_SymbolFunarg = DEFNODE("SymbolFunarg", null, {
$documentation: "Symbol naming a function argument",
}, AST_SymbolVar);
var AST_SymbolDefun = DEFNODE("SymbolDefun", null, {
$documentation: "Symbol defining a function",
}, AST_SymbolDeclaration);
var AST_SymbolLambda = DEFNODE("SymbolLambda", null, {
$documentation: "Symbol naming a function expression",
}, AST_SymbolDeclaration);
var AST_SymbolCatch = DEFNODE("SymbolCatch", null, {
$documentation: "Symbol naming the exception in catch",
}, AST_SymbolDeclaration);
var AST_Label = DEFNODE("Label", "references", {
$documentation: "Symbol naming a label (declaration)",
$propdoc: {
references: "[AST_LoopControl*] a list of nodes referring to this label"
},
initialize: function() {
this.references = [];
this.thedef = this;
}
}, AST_Symbol);
var AST_SymbolRef = DEFNODE("SymbolRef", null, {
$documentation: "Reference to some symbol (not definition/declaration)",
}, AST_Symbol);
var AST_LabelRef = DEFNODE("LabelRef", null, {
$documentation: "Reference to a label symbol",
}, AST_Symbol);
var AST_This = DEFNODE("This", null, {
$documentation: "The `this` symbol",
}, AST_Symbol);
var AST_Constant = DEFNODE("Constant", null, {
$documentation: "Base class for all constants",
getValue: function() {
return this.value;
}
});
var AST_String = DEFNODE("String", "value quote", {
$documentation: "A string literal",
$propdoc: {
value: "[string] the contents of this string",
quote: "[string] the original quote character"
}
}, AST_Constant);
var AST_Number = DEFNODE("Number", "value literal", {
$documentation: "A number literal",
$propdoc: {
value: "[number] the numeric value",
literal: "[string] numeric value as string (optional)"
}
}, AST_Constant);
var AST_RegExp = DEFNODE("RegExp", "value", {
$documentation: "A regexp literal",
$propdoc: {
value: "[RegExp] the actual regexp"
}
}, AST_Constant);
var AST_Atom = DEFNODE("Atom", null, {
$documentation: "Base class for atoms",
}, AST_Constant);
var AST_Null = DEFNODE("Null", null, {
$documentation: "The `null` atom",
value: null
}, AST_Atom);
var AST_NaN = DEFNODE("NaN", null, {
$documentation: "The impossible value",
value: 0/0
}, AST_Atom);
var AST_Undefined = DEFNODE("Undefined", null, {
$documentation: "The `undefined` value",
value: function(){}()
}, AST_Atom);
var AST_Hole = DEFNODE("Hole", null, {
$documentation: "A hole in an array",
value: function(){}()
}, AST_Atom);
var AST_Infinity = DEFNODE("Infinity", null, {
$documentation: "The `Infinity` value",
value: 1/0
}, AST_Atom);
var AST_Boolean = DEFNODE("Boolean", null, {
$documentation: "Base class for booleans",
}, AST_Atom);
var AST_False = DEFNODE("False", null, {
$documentation: "The `false` atom",
value: false
}, AST_Boolean);
var AST_True = DEFNODE("True", null, {
$documentation: "The `true` atom",
value: true
}, AST_Boolean);
/* -----[ TreeWalker ]----- */
function TreeWalker(callback) {
this.visit = callback;
this.stack = [];
this.directives = Object.create(null);
}
TreeWalker.prototype = {
_visit: function(node, descend) {
this.push(node);
var ret = this.visit(node, descend ? function() {
descend.call(node);
} : noop);
if (!ret && descend) {
descend.call(node);
}
this.pop();
return ret;
},
parent: function(n) {
return this.stack[this.stack.length - 2 - (n || 0)];
},
push: function(node) {
if (node instanceof AST_Lambda) {
this.directives = Object.create(this.directives);
} else if (node instanceof AST_Directive && !this.directives[node.value]) {
this.directives[node.value] = node;
}
this.stack.push(node);
},
pop: function() {
if (this.stack.pop() instanceof AST_Lambda) {
this.directives = Object.getPrototypeOf(this.directives);
}
},
self: function() {
return this.stack[this.stack.length - 1];
},
find_parent: function(type) {
var stack = this.stack;
for (var i = stack.length; --i >= 0;) {
var x = stack[i];
if (x instanceof type) return x;
}
},
has_directive: function(type) {
var dir = this.directives[type];
if (dir) return dir;
var node = this.stack[this.stack.length - 1];
if (node instanceof AST_Scope) {
for (var i = 0; i < node.body.length; ++i) {
var st = node.body[i];
if (!(st instanceof AST_Directive)) break;
if (st.value == type) return st;
}
}
},
loopcontrol_target: function(node) {
var stack = this.stack;
if (node.label) for (var i = stack.length; --i >= 0;) {
var x = stack[i];
if (x instanceof AST_LabeledStatement && x.label.name == node.label.name)
return x.body;
} else for (var i = stack.length; --i >= 0;) {
var x = stack[i];
if (x instanceof AST_IterationStatement
|| node instanceof AST_Break && x instanceof AST_Switch)
return x;
}
},
in_boolean_context: function() {
var self = this.self();
for (var i = 0, p; p = this.parent(i); i++) {
if (p instanceof AST_SimpleStatement
|| p instanceof AST_Conditional && p.condition === self
|| p instanceof AST_DWLoop && p.condition === self
|| p instanceof AST_For && p.condition === self
|| p instanceof AST_If && p.condition === self
|| p instanceof AST_UnaryPrefix && p.operator == "!" && p.expression === self) {
return true;
}
if (p instanceof AST_Binary && (p.operator == "&&" || p.operator == "||")
|| p instanceof AST_Conditional
|| p.tail_node() === self) {
self = p;
} else {
return false;
}
}
}
};
| {
"pile_set_name": "Github"
} |
---
layout: page
title: "Q31297: A GOTO Inside SELECT CASE Incorrectly Executes ELSE Block"
permalink: /pubs/pc/reference/microsoft/kb/Q31297/
---
## Q31297: A GOTO Inside SELECT CASE Incorrectly Executes ELSE Block
Article: Q31297
Version(s): 4.00 4.00b
Operating System: MS-DOS
Flags: ENDUSER | buglist4.00 buglist4.00b fixlist4.50 B_BasicCom
Last Modified: 5-DEC-1989
In a compiled .EXE program, if a GOTO statement is executed inside of
a SELECT CASE block, when that CASE is executed as expected, the CASE
ELSE is also (wrongly) executed.
However, the program behaves correctly when run in the QB.EXE editor.
Microsoft has confirmed this to be a problem in QuickBASIC Versions
4.00 and 4.00b and in the Microsoft BASIC Compiler Versions 6.00 and
6.00b for MS-DOS and OS/2 (buglist6.00, buglist6.00b). This problem
was corrected in QuickBASIC Version 4.50 and in the Microsoft BASIC
Compiler Version 7.00 (fixlist7.00).
The following code example demonstrates this problem:
x=1
select case x
case 1
print "one"
goto 100
100: print "hundred"
case 2
print "two"
case else
print "else"
end select
The (incorrect) output from the above program as an .EXE is as
follows:
one
hundred
else
The (correct) output when run in the QuickBASIC environment is as
follows:
one
hundred
You can work around the problem of using GOTO and a label in a CASE by
ending that CASE with a GOTO to a label that is after the END SELECT
statement, as follows:
x=1
select case x
case 1
goto 100
print "never prints"
100: print "hundred"
goto 200 ' This GOTO is the workaround solution.
case 2
print "two"
case else
print "else"
end select
200: print "After END SELECT"
| {
"pile_set_name": "Github"
} |
//----------------------------------------------------------------------
// File: kd_pr_search.h
// Programmer: Sunil Arya and David Mount
// Description: Priority kd-tree search
// Last modified: 01/04/05 (Version 1.0)
//----------------------------------------------------------------------
// Copyright (c) 1997-2005 University of Maryland and Sunil Arya and
// David Mount. All Rights Reserved.
//
// This software and related documentation is part of the Approximate
// Nearest Neighbor Library (ANN). This software is provided under
// the provisions of the Lesser GNU Public License (LGPL). See the
// file ../ReadMe.txt for further information.
//
// The University of Maryland (U.M.) and the authors make no
// representations about the suitability or fitness of this software for
// any purpose. It is provided "as is" without express or implied
// warranty.
//----------------------------------------------------------------------
// History:
// Revision 0.1 03/04/98
// Initial release
//----------------------------------------------------------------------
#ifndef ANN_kd_pr_search_H
#define ANN_kd_pr_search_H
#include "kd_tree.h" // kd-tree declarations
#include "kd_util.h" // kd-tree utilities
#include "pr_queue.h" // priority queue declarations
#include "pr_queue_k.h" // k-element priority queue
#include <ANN/ANNperf.h> // performance evaluation
//----------------------------------------------------------------------
// Global variables
// Active for the life of each call to Appx_Near_Neigh() or
// Appx_k_Near_Neigh().
//----------------------------------------------------------------------
extern double ANNprEps; // the error bound
extern int ANNprDim; // dimension of space
extern ANNpoint ANNprQ; // query point
extern double ANNprMaxErr; // max tolerable squared error
extern ANNpointArray ANNprPts; // the points
extern ANNpr_queue *ANNprBoxPQ; // priority queue for boxes
extern ANNmin_k *ANNprPointMK; // set of k closest points
#endif
| {
"pile_set_name": "Github"
} |
# Source.actorRefWithBackpressure
Materialize an `ActorRef` of the classic actors API; sending messages to it will emit them on the stream. The source acknowledges reception after emitting a message, to provide back pressure from the source.
@ref[Actor interop operators](../index.md#actor-interop-operators)
## Signature
@apidoc[Source.actorRefWithBackpressure](Source$) { scala="#actorRefWithBackpressure[T](ackMessage:Any,completionMatcher:PartialFunction[Any,akka.stream.CompletionStrategy],failureMatcher:PartialFunction[Any,Throwable]):akka.stream.scaladsl.Source[T,akka.actor.ActorRef]" java="#actorRefWithBackpressure(java.lang.Object,akka.japi.function.Function,akka.japi.function.Function)" }
## Description
Materialize an `ActorRef`, sending messages to it will emit them on the stream. The actor responds with the provided ack message
once the element could be emitted allowing for backpressure from the source. Sending another message before the previous one has been acknowledged will fail the stream.
See also:
* @ref[Source.actorRef](../Source/actorRef.md) This operator without backpressure control
* @ref[ActorSource.actorRef](actorRef.md) The operator for the new actors API without backpressure control
* @ref[ActorSource.actorRefWithBackpressure](actorRefWithBackpressure.md) The corresponding operator for the new actors API
* @ref[Source.queue](../Source/queue.md) Materialize a `SourceQueue` onto which elements can be pushed for emitting from the source
## Examples
Scala
: @@snip [actorRef.scala](/akka-docs/src/test/scala/docs/stream/operators/SourceOperators.scala) { #actorRefWithBackpressure }
Java
: @@snip [actorRef.java](/akka-docs/src/test/java/jdocs/stream/operators/SourceDocExamples.java) { #actor-ref-imports #actorRefWithBackpressure }
## Reactive Streams semantics
@@@div { .callout }
**emits** when there is demand and there are messages in the buffer or a message is sent to the `ActorRef`
**completes** when the passed completion matcher returns a `CompletionStrategy` or fails if the passed failure matcher returns an exception
@@@
| {
"pile_set_name": "Github"
} |
cd C:\adt32\sdk\tools
tools emulator -avd avd_api_14 &
cd C:\android\workspace\AppCameraDemo2
| {
"pile_set_name": "Github"
} |
/*
* Copyright 1999-2020 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package naming_client
import (
"encoding/json"
"errors"
"reflect"
"time"
"github.com/nacos-group/nacos-sdk-go/clients/cache"
"github.com/nacos-group/nacos-sdk-go/common/logger"
"github.com/nacos-group/nacos-sdk-go/model"
"github.com/nacos-group/nacos-sdk-go/util"
)
type HostReactor struct {
serviceInfoMap cache.ConcurrentMap
cacheDir string
updateThreadNum int
serviceProxy NamingProxy
pushReceiver PushReceiver
subCallback SubscribeCallback
updateTimeMap cache.ConcurrentMap
updateCacheWhenEmpty bool
}
const Default_Update_Thread_Num = 20
func NewHostReactor(serviceProxy NamingProxy, cacheDir string, updateThreadNum int, notLoadCacheAtStart bool, subCallback SubscribeCallback, updateCacheWhenEmpty bool) HostReactor {
if updateThreadNum <= 0 {
updateThreadNum = Default_Update_Thread_Num
}
hr := HostReactor{
serviceProxy: serviceProxy,
cacheDir: cacheDir,
updateThreadNum: updateThreadNum,
serviceInfoMap: cache.NewConcurrentMap(),
subCallback: subCallback,
updateTimeMap: cache.NewConcurrentMap(),
updateCacheWhenEmpty: updateCacheWhenEmpty,
}
pr := NewPushRecevier(&hr)
hr.pushReceiver = *pr
if !notLoadCacheAtStart {
hr.loadCacheFromDisk()
}
go hr.asyncUpdateService()
return hr
}
func (hr *HostReactor) loadCacheFromDisk() {
serviceMap := cache.ReadServicesFromFile(hr.cacheDir)
if serviceMap == nil || len(serviceMap) == 0 {
return
}
for k, v := range serviceMap {
hr.serviceInfoMap.Set(k, v)
}
}
func (hr *HostReactor) ProcessServiceJson(result string) {
service := util.JsonToService(result)
if service == nil {
return
}
cacheKey := util.GetServiceCacheKey(service.Name, service.Clusters)
oldDomain, ok := hr.serviceInfoMap.Get(cacheKey)
if ok && !hr.updateCacheWhenEmpty {
//if instance list is empty,not to update cache
if service.Hosts == nil || len(service.Hosts) == 0 {
logger.Errorf("do not have useful host, ignore it, name:%s", service.Name)
return
}
}
hr.updateTimeMap.Set(cacheKey, uint64(util.CurrentMillis()))
hr.serviceInfoMap.Set(cacheKey, *service)
if !ok || ok && !reflect.DeepEqual(service.Hosts, oldDomain.(model.Service).Hosts) {
if !ok {
logger.Info("service not found in cache " + cacheKey)
} else {
logger.Info("service key:%s was updated to:%s", cacheKey, util.ToJsonString(service))
}
cache.WriteServicesToFile(*service, hr.cacheDir)
hr.subCallback.ServiceChanged(service)
}
}
func (hr *HostReactor) GetServiceInfo(serviceName string, clusters string) (model.Service, error) {
key := util.GetServiceCacheKey(serviceName, clusters)
cacheService, ok := hr.serviceInfoMap.Get(key)
if !ok {
hr.updateServiceNow(serviceName, clusters)
if cacheService, ok = hr.serviceInfoMap.Get(key); !ok {
return model.Service{}, errors.New("get service info failed")
}
}
return cacheService.(model.Service), nil
}
func (hr *HostReactor) GetAllServiceInfo(nameSpace, groupName string, pageNo, pageSize uint32) model.ServiceList {
data := model.ServiceList{}
result, err := hr.serviceProxy.GetAllServiceInfoList(nameSpace, groupName, pageNo, pageSize)
if err != nil {
logger.Errorf("GetAllServiceInfoList return error!nameSpace:%s groupName:%s pageNo:%d, pageSize:%d err:%+v",
nameSpace, groupName, pageNo, pageSize, err)
return data
}
if result == "" {
logger.Errorf("GetAllServiceInfoList result is empty!nameSpace:%s groupName:%s pageNo:%d, pageSize:%d",
nameSpace, groupName, pageNo, pageSize)
return data
}
err = json.Unmarshal([]byte(result), &data)
if err != nil {
logger.Errorf("GetAllServiceInfoList result json.Unmarshal error!nameSpace:%s groupName:%s pageNo:%d, pageSize:%d",
nameSpace, groupName, pageNo, pageSize)
return data
}
return data
}
func (hr *HostReactor) updateServiceNow(serviceName, clusters string) {
result, err := hr.serviceProxy.QueryList(serviceName, clusters, hr.pushReceiver.port, false)
if err != nil {
logger.Errorf("QueryList return error!serviceName:%s cluster:%s err:%+v", serviceName, clusters, err)
return
}
if result == "" {
logger.Errorf("QueryList result is empty!serviceName:%s cluster:%s", serviceName, clusters)
return
}
hr.ProcessServiceJson(result)
}
func (hr *HostReactor) asyncUpdateService() {
sema := util.NewSemaphore(hr.updateThreadNum)
for {
for _, v := range hr.serviceInfoMap.Items() {
service := v.(model.Service)
lastRefTime, ok := hr.updateTimeMap.Get(util.GetServiceCacheKey(service.Name, service.Clusters))
if !ok {
lastRefTime = uint64(0)
}
if uint64(util.CurrentMillis())-lastRefTime.(uint64) > service.CacheMillis {
sema.Acquire()
go func() {
hr.updateServiceNow(service.Name, service.Clusters)
sema.Release()
}()
}
}
time.Sleep(1 * time.Second)
}
}
| {
"pile_set_name": "Github"
} |
GL_SGIX_texture_add_env
http://www.opengl.org/registry/specs/SGIX/texture_env_add.txt
GL_SGIX_texture_add_env
| {
"pile_set_name": "Github"
} |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib.externals import six
import io
import os
import sys
import warnings
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.tests import assert_str_equal
from matplotlib.testing.decorators import cleanup, knownfailureif
import matplotlib.colors as mcolors
from nose.tools import assert_true, assert_raises, assert_equal
from nose.plugins.skip import SkipTest
import nose
from itertools import chain
import numpy as np
from matplotlib.rcsetup import (validate_bool_maybe_none,
validate_stringlist,
validate_bool,
validate_nseq_int,
validate_nseq_float)
mpl.rc('text', usetex=False)
mpl.rc('lines', linewidth=22)
fname = os.path.join(os.path.dirname(__file__), 'test_rcparams.rc')
def test_rcparams():
usetex = mpl.rcParams['text.usetex']
linewidth = mpl.rcParams['lines.linewidth']
# test context given dictionary
with mpl.rc_context(rc={'text.usetex': not usetex}):
assert mpl.rcParams['text.usetex'] == (not usetex)
assert mpl.rcParams['text.usetex'] == usetex
# test context given filename (mpl.rc sets linewdith to 33)
with mpl.rc_context(fname=fname):
assert mpl.rcParams['lines.linewidth'] == 33
assert mpl.rcParams['lines.linewidth'] == linewidth
# test context given filename and dictionary
with mpl.rc_context(fname=fname, rc={'lines.linewidth': 44}):
assert mpl.rcParams['lines.linewidth'] == 44
assert mpl.rcParams['lines.linewidth'] == linewidth
# test rc_file
try:
mpl.rc_file(fname)
assert mpl.rcParams['lines.linewidth'] == 33
finally:
mpl.rcParams['lines.linewidth'] = linewidth
def test_RcParams_class():
rc = mpl.RcParams({'font.cursive': ['Apple Chancery',
'Textile',
'Zapf Chancery',
'cursive'],
'font.family': 'sans-serif',
'font.weight': 'normal',
'font.size': 12})
if six.PY3:
expected_repr = """
RcParams({'font.cursive': ['Apple Chancery',
'Textile',
'Zapf Chancery',
'cursive'],
'font.family': ['sans-serif'],
'font.size': 12.0,
'font.weight': 'normal'})""".lstrip()
else:
expected_repr = """
RcParams({u'font.cursive': [u'Apple Chancery',
u'Textile',
u'Zapf Chancery',
u'cursive'],
u'font.family': [u'sans-serif'],
u'font.size': 12.0,
u'font.weight': u'normal'})""".lstrip()
assert_str_equal(expected_repr, repr(rc))
if six.PY3:
expected_str = """
font.cursive: ['Apple Chancery', 'Textile', 'Zapf Chancery', 'cursive']
font.family: ['sans-serif']
font.size: 12.0
font.weight: normal""".lstrip()
else:
expected_str = """
font.cursive: [u'Apple Chancery', u'Textile', u'Zapf Chancery', u'cursive']
font.family: [u'sans-serif']
font.size: 12.0
font.weight: normal""".lstrip()
assert_str_equal(expected_str, str(rc))
# test the find_all functionality
assert ['font.cursive', 'font.size'] == sorted(rc.find_all('i[vz]').keys())
assert ['font.family'] == list(six.iterkeys(rc.find_all('family')))
def test_rcparams_update():
if sys.version_info[:2] < (2, 7):
raise nose.SkipTest("assert_raises as context manager "
"not supported with Python < 2.7")
rc = mpl.RcParams({'figure.figsize': (3.5, 42)})
bad_dict = {'figure.figsize': (3.5, 42, 1)}
# make sure validation happens on input
with assert_raises(ValueError):
with warnings.catch_warnings():
warnings.filterwarnings('ignore',
message='.*(validate)',
category=UserWarning)
rc.update(bad_dict)
def test_rcparams_init():
if sys.version_info[:2] < (2, 7):
raise nose.SkipTest("assert_raises as context manager "
"not supported with Python < 2.7")
with assert_raises(ValueError):
with warnings.catch_warnings():
warnings.filterwarnings('ignore',
message='.*(validate)',
category=UserWarning)
mpl.RcParams({'figure.figsize': (3.5, 42, 1)})
@cleanup
def test_Bug_2543():
# Test that it possible to add all values to itself / deepcopy
# This was not possible because validate_bool_maybe_none did not
# accept None as an argument.
# https://github.com/matplotlib/matplotlib/issues/2543
# We filter warnings at this stage since a number of them are raised
# for deprecated rcparams as they should. We dont want these in the
# printed in the test suite.
with warnings.catch_warnings():
warnings.filterwarnings('ignore',
message='.*(deprecated|obsolete)',
category=UserWarning)
with mpl.rc_context():
_copy = mpl.rcParams.copy()
for key in six.iterkeys(_copy):
mpl.rcParams[key] = _copy[key]
mpl.rcParams['text.dvipnghack'] = None
with mpl.rc_context():
from copy import deepcopy
_deep_copy = deepcopy(mpl.rcParams)
# real test is that this does not raise
assert_true(validate_bool_maybe_none(None) is None)
assert_true(validate_bool_maybe_none("none") is None)
_fonttype = mpl.rcParams['svg.fonttype']
assert_true(_fonttype == mpl.rcParams['svg.embed_char_paths'])
with mpl.rc_context():
mpl.rcParams['svg.embed_char_paths'] = False
assert_true(mpl.rcParams['svg.fonttype'] == "none")
@cleanup
def test_Bug_2543_newer_python():
# only split from above because of the usage of assert_raises
# as a context manager, which only works in 2.7 and above
if sys.version_info[:2] < (2, 7):
raise nose.SkipTest("assert_raises as context manager not supported with Python < 2.7")
from matplotlib.rcsetup import validate_bool_maybe_none, validate_bool
with assert_raises(ValueError):
validate_bool_maybe_none("blah")
with assert_raises(ValueError):
validate_bool(None)
with assert_raises(ValueError):
with mpl.rc_context():
mpl.rcParams['svg.fonttype'] = True
@cleanup
def _legend_rcparam_helper(param_dict, target, get_func):
with mpl.rc_context(param_dict):
_, ax = plt.subplots()
ax.plot(range(3), label='test')
leg = ax.legend()
assert_equal(getattr(leg.legendPatch, get_func)(), target)
def test_legend_facecolor():
get_func = 'get_facecolor'
rcparam = 'legend.facecolor'
test_values = [({rcparam: 'r'},
mcolors.colorConverter.to_rgba('r')),
({rcparam: 'inherit',
'axes.facecolor': 'r'
},
mcolors.colorConverter.to_rgba('r')),
({rcparam: 'g',
'axes.facecolor': 'r'},
mcolors.colorConverter.to_rgba('g'))
]
for rc_dict, target in test_values:
yield _legend_rcparam_helper, rc_dict, target, get_func
def test_legend_edgecolor():
get_func = 'get_edgecolor'
rcparam = 'legend.edgecolor'
test_values = [({rcparam: 'r'},
mcolors.colorConverter.to_rgba('r')),
({rcparam: 'inherit',
'axes.edgecolor': 'r'
},
mcolors.colorConverter.to_rgba('r')),
({rcparam: 'g',
'axes.facecolor': 'r'},
mcolors.colorConverter.to_rgba('g'))
]
for rc_dict, target in test_values:
yield _legend_rcparam_helper, rc_dict, target, get_func
def test_Issue_1713():
utf32_be = os.path.join(os.path.dirname(__file__),
'test_utf32_be_rcparams.rc')
old_lang = os.environ.get('LANG', None)
os.environ['LANG'] = 'en_US.UTF-32-BE'
rc = mpl.rc_params_from_file(utf32_be, True)
if old_lang:
os.environ['LANG'] = old_lang
else:
del os.environ['LANG']
assert rc.get('timezone') == 'UTC'
if __name__ == '__main__':
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
def _validation_test_helper(validator, arg, target):
res = validator(arg)
assert_equal(res, target)
def _validation_fail_helper(validator, arg, exception_type):
if sys.version_info[:2] < (2, 7):
raise nose.SkipTest("assert_raises as context manager not "
"supported with Python < 2.7")
with assert_raises(exception_type):
validator(arg)
def test_validators():
validation_tests = (
{'validator': validate_bool,
'success': chain(((_, True) for _ in
('t', 'y', 'yes', 'on', 'true', '1', 1, True)),
((_, False) for _ in
('f', 'n', 'no', 'off', 'false', '0', 0, False))),
'fail': ((_, ValueError)
for _ in ('aardvark', 2, -1, [], ))},
{'validator': validate_stringlist,
'success': (('', []),
('a,b', ['a', 'b']),
('aardvark', ['aardvark']),
('aardvark, ', ['aardvark']),
('aardvark, ,', ['aardvark']),
(['a', 'b'], ['a', 'b']),
(('a', 'b'), ['a', 'b']),
((1, 2), ['1', '2'])),
'fail': ((dict(), ValueError),
(1, ValueError),)
},
{'validator': validate_nseq_int(2),
'success': ((_, [1, 2])
for _ in ('1, 2', [1.5, 2.5], [1, 2],
(1, 2), np.array((1, 2)))),
'fail': ((_, ValueError)
for _ in ('aardvark', ('a', 1),
(1, 2, 3)
))
},
{'validator': validate_nseq_float(2),
'success': ((_, [1.5, 2.5])
for _ in ('1.5, 2.5', [1.5, 2.5], [1.5, 2.5],
(1.5, 2.5), np.array((1.5, 2.5)))),
'fail': ((_, ValueError)
for _ in ('aardvark', ('a', 1),
(1, 2, 3)
))
}
)
for validator_dict in validation_tests:
validator = validator_dict['validator']
for arg, target in validator_dict['success']:
yield _validation_test_helper, validator, arg, target
for arg, error_type in validator_dict['fail']:
yield _validation_fail_helper, validator, arg, error_type
def test_keymaps():
key_list = [k for k in mpl.rcParams if 'keymap' in k]
for k in key_list:
assert(isinstance(mpl.rcParams[k], list))
def test_rcparams_reset_after_fail():
# There was previously a bug that meant that if rc_context failed and
# raised an exception due to issues in the supplied rc parameters, the
# global rc parameters were left in a modified state.
if sys.version_info[:2] >= (2, 7):
from collections import OrderedDict
else:
raise SkipTest("Test can only be run in Python >= 2.7 as it requires OrderedDict")
with mpl.rc_context(rc={'text.usetex': False}):
assert mpl.rcParams['text.usetex'] is False
with assert_raises(KeyError):
with mpl.rc_context(rc=OrderedDict([('text.usetex', True),('test.blah', True)])):
pass
assert mpl.rcParams['text.usetex'] is False
| {
"pile_set_name": "Github"
} |
#ifndef SASS_TO_VALUE_H
#define SASS_TO_VALUE_H
#include "operation.hpp"
#include "sass/values.h"
#include "ast_fwd_decl.hpp"
namespace Sass {
class To_Value : public Operation_CRTP<Value_Ptr, To_Value> {
Value_Ptr fallback_impl(AST_Node_Ptr n);
private:
Context& ctx;
public:
To_Value(Context& ctx)
: ctx(ctx)
{ }
~To_Value() { }
using Operation<Value_Ptr>::operator();
Value_Ptr operator()(Argument_Ptr);
Value_Ptr operator()(Boolean_Ptr);
Value_Ptr operator()(Number_Ptr);
Value_Ptr operator()(Color_Ptr);
Value_Ptr operator()(String_Constant_Ptr);
Value_Ptr operator()(String_Quoted_Ptr);
Value_Ptr operator()(Custom_Warning_Ptr);
Value_Ptr operator()(Custom_Error_Ptr);
Value_Ptr operator()(List_Ptr);
Value_Ptr operator()(Map_Ptr);
Value_Ptr operator()(Null_Ptr);
// convert to string via `To_String`
Value_Ptr operator()(Selector_List_Ptr);
Value_Ptr operator()(Binary_Expression_Ptr);
// fallback throws error
template <typename U>
Value_Ptr fallback(U x) { return fallback_impl(x); }
};
}
#endif
| {
"pile_set_name": "Github"
} |
package com.enonic.xp.data;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.SignStyle;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalAccessor;
import com.enonic.xp.content.ContentId;
import com.enonic.xp.util.BinaryReference;
import com.enonic.xp.util.GeoPoint;
import com.enonic.xp.util.Link;
import com.enonic.xp.util.Reference;
import static java.time.temporal.ChronoField.HOUR_OF_DAY;
import static java.time.temporal.ChronoField.MINUTE_OF_HOUR;
import static java.time.temporal.ChronoField.NANO_OF_SECOND;
import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
final class JavaTypeConverters
{
private static final DateTimeFormatter LOCAL_DATE_FORMATTER =
new java.time.format.DateTimeFormatterBuilder().appendValue( ChronoField.YEAR, 4 ).appendLiteral( '-' ).appendValue(
ChronoField.MONTH_OF_YEAR, 2 ).appendLiteral( '-' ).appendValue( ChronoField.DAY_OF_MONTH, 2 ).toFormatter();
private static final DateTimeFormatter LOCAL_DATE_TIME_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ISO_OFFSET_DATE_TIME;
private static final DateTimeFormatter LOCAL_TIME_FORMATTER = new DateTimeFormatterBuilder().
appendValue( HOUR_OF_DAY, 1, 2, SignStyle.NORMAL ).
appendLiteral( ':' ).
appendValue( MINUTE_OF_HOUR, 2 ).
optionalStart().
appendLiteral( ':' ).
appendValue( SECOND_OF_MINUTE, 2 ).
optionalStart().
appendFraction( NANO_OF_SECOND, 0, 9, true ).
toFormatter();
public static final JavaTypeConverter<String> STRING = newString();
public static final JavaTypeConverter<Long> LONG = newLong();
public static final JavaTypeConverter<Double> DOUBLE = newDouble();
public static final JavaTypeConverter<Boolean> BOOLEAN = newBoolean();
public static final JavaTypeConverter<PropertySet> DATA = newData();
public static final JavaTypeConverter<ContentId> CONTENT_ID = newContentId();
public static final JavaTypeConverter<Instant> DATE_TIME = newInstant();
public static final JavaTypeConverter<LocalDate> LOCAL_DATE = newLocalDate();
public static final JavaTypeConverter<LocalDateTime> LOCAL_DATE_TIME = newLocalDateTime();
public static final JavaTypeConverter<LocalTime> LOCAL_TIME = newLocalTime();
public static final JavaTypeConverter<GeoPoint> GEO_POINT = newGeoPoint();
public static final JavaTypeConverter<Reference> REFERENCE = newReference();
public static final JavaTypeConverter<BinaryReference> BINARY_REFERENCE = newBinaryReference();
public static final JavaTypeConverter<Link> LINK = newLink();
private static String convertToString( final Object value )
{
if ( value instanceof String )
{
return (String) value;
}
else if ( value instanceof LocalDateTime )
{
return ( (LocalDateTime) value ).format( LOCAL_DATE_TIME_FORMATTER );
}
else if ( value instanceof LocalDate )
{
return ( (LocalDate) value ).format( LOCAL_DATE_FORMATTER );
}
else
{
return value.toString();
}
}
private static Long convertToLong( final Object value )
{
if ( value instanceof Number )
{
return ( (Number) value ).longValue();
}
else if ( value instanceof String )
{
final Double number = parseNumber( value.toString() );
if ( number != null )
{
return number.longValue();
}
}
return null;
}
private static Double convertToDouble( final Object value )
{
if ( value instanceof Number )
{
return ( (Number) value ).doubleValue();
}
else if ( value instanceof String )
{
return parseNumber( value.toString() );
}
return null;
}
public static boolean isNumber( String strNum )
{
return parseNumber( strNum ) != null;
}
private static Double parseNumber( String strNum )
{
try
{
return Double.parseDouble( strNum );
}
catch ( NumberFormatException e )
{
return null;
}
}
private static Boolean convertToBoolean( final Object value )
{
if ( value instanceof Boolean )
{
return (Boolean) value;
}
else if ( value instanceof String )
{
return Boolean.parseBoolean( (String) value );
}
return null;
}
private static PropertySet convertToData( final Object value )
{
if ( value instanceof PropertySet )
{
return (PropertySet) value;
}
else
{
return null;
}
}
private static ContentId convertToContentId( final Object value )
{
if ( value instanceof ContentId )
{
return (ContentId) value;
}
else if ( value instanceof String )
{
return ContentId.from( (String) value );
}
else
{
return null;
}
}
private static Instant convertToInstant( final Object value )
{
if ( value instanceof LocalDate )
{
return ( (LocalDate) value ).atStartOfDay().toInstant( ZoneOffset.UTC );
}
if ( value instanceof LocalTime )
{
return ( (LocalTime) value ).atDate( LocalDate.now() ).toInstant( ZoneOffset.UTC );
}
if ( value instanceof LocalDateTime )
{
return ( (LocalDateTime) value ).toInstant( ZoneOffset.UTC );
}
if ( value instanceof Instant )
{
return ( (Instant) value );
}
else if ( value instanceof String )
{
final TemporalAccessor temporalAccessor = DATE_TIME_FORMATTER.parse( (String) value );
return Instant.from( temporalAccessor );
}
else
{
return null;
}
}
private static LocalTime convertToLocalTime( final Object value )
{
if ( value instanceof Instant )
{
return LocalDateTime.ofInstant( (Instant) value, ZoneOffset.UTC ).toLocalTime().truncatedTo( ChronoUnit.MINUTES );
}
if ( value instanceof LocalTime )
{
return (LocalTime) value;
}
if ( value instanceof LocalDate )
{
return ( (LocalDate) value ).atStartOfDay().toLocalTime();
}
if ( value instanceof LocalDateTime )
{
return LocalTime.of( ( (LocalDateTime) value ).getHour(), ( (LocalDateTime) value ).getMinute(),
( (LocalDateTime) value ).getSecond() );
}
else if ( value instanceof String )
{
return LocalTime.parse( (String) value, LOCAL_TIME_FORMATTER );
}
else
{
return null;
}
}
private static LocalDateTime convertToLocalDateTime( final Object value )
{
if ( value instanceof Instant )
{
return LocalDateTime.ofInstant( (Instant) value, ZoneOffset.UTC );
}
if ( value instanceof LocalDate )
{
return ( (LocalDate) value ).atStartOfDay();
}
if ( value instanceof LocalDateTime )
{
return (LocalDateTime) value;
}
if ( value instanceof LocalTime )
{
return ( (LocalTime) value ).atDate( LocalDate.now() );
}
else if ( value instanceof String )
{
return LocalDateTime.parse( (String) value, LOCAL_DATE_TIME_FORMATTER );
}
else
{
return null;
}
}
private static LocalDate convertToLocalDate( final Object value )
{
if ( value instanceof Instant )
{
return LocalDateTime.ofInstant( (Instant) value, ZoneOffset.UTC ).toLocalDate();
}
if ( value instanceof LocalDate )
{
return (LocalDate) value;
}
if ( value instanceof LocalDateTime )
{
return LocalDate.of( ( (LocalDateTime) value ).getYear(), ( (LocalDateTime) value ).getMonth(),
( (LocalDateTime) value ).getDayOfMonth() );
}
else if ( value instanceof String )
{
return LocalDate.parse( (String) value, LOCAL_DATE_FORMATTER );
}
else
{
return null;
}
}
private static GeoPoint convertToGeoPoint( final Object value )
{
if ( value instanceof GeoPoint )
{
return (GeoPoint) value;
}
else if ( value instanceof String )
{
return GeoPoint.from( (String) value );
}
else
{
return null;
}
}
private static Reference convertToReference( final Object value )
{
if ( value instanceof Reference )
{
return (Reference) value;
}
else if ( value instanceof String )
{
return Reference.from( (String) value );
}
else
{
return null;
}
}
private static BinaryReference convertToBinaryReference( final Object value )
{
if ( value instanceof BinaryReference )
{
return (BinaryReference) value;
}
else if ( value instanceof String )
{
return BinaryReference.from( (String) value );
}
else
{
return null;
}
}
private static Link convertToLink( final Object value )
{
if ( value instanceof Link )
{
return (Link) value;
}
else if ( value instanceof String )
{
return Link.from( (String) value );
}
else
{
return null;
}
}
private static JavaTypeConverter<String> newString()
{
return new JavaTypeConverter<>( String.class, JavaTypeConverters::convertToString );
}
private static JavaTypeConverter<Long> newLong()
{
return new JavaTypeConverter<>( Long.class, JavaTypeConverters::convertToLong );
}
private static JavaTypeConverter<Double> newDouble()
{
return new JavaTypeConverter<>( Double.class, JavaTypeConverters::convertToDouble );
}
private static JavaTypeConverter<Boolean> newBoolean()
{
return new JavaTypeConverter<>( Boolean.class, JavaTypeConverters::convertToBoolean );
}
private static JavaTypeConverter<PropertySet> newData()
{
return new JavaTypeConverter<>( PropertySet.class, JavaTypeConverters::convertToData );
}
private static JavaTypeConverter<ContentId> newContentId()
{
return new JavaTypeConverter<>( ContentId.class, JavaTypeConverters::convertToContentId );
}
private static JavaTypeConverter<Instant> newInstant()
{
return new JavaTypeConverter<>( Instant.class, JavaTypeConverters::convertToInstant );
}
private static JavaTypeConverter<LocalDate> newLocalDate()
{
return new JavaTypeConverter<>( LocalDate.class, JavaTypeConverters::convertToLocalDate );
}
private static JavaTypeConverter<LocalDateTime> newLocalDateTime()
{
return new JavaTypeConverter<>( LocalDateTime.class, JavaTypeConverters::convertToLocalDateTime );
}
private static JavaTypeConverter<LocalTime> newLocalTime()
{
return new JavaTypeConverter<>( LocalTime.class, JavaTypeConverters::convertToLocalTime );
}
private static JavaTypeConverter<GeoPoint> newGeoPoint()
{
return new JavaTypeConverter<>( GeoPoint.class, JavaTypeConverters::convertToGeoPoint );
}
private static JavaTypeConverter<Reference> newReference()
{
return new JavaTypeConverter<>( Reference.class, JavaTypeConverters::convertToReference );
}
private static JavaTypeConverter<BinaryReference> newBinaryReference()
{
return new JavaTypeConverter<>( BinaryReference.class, JavaTypeConverters::convertToBinaryReference );
}
private static JavaTypeConverter<Link> newLink()
{
return new JavaTypeConverter<>( Link.class, JavaTypeConverters::convertToLink );
}
}
| {
"pile_set_name": "Github"
} |
//
// Close icons
// --------------------------------------------------
.close {
float: right;
font-size: (@font-size-base * 1.5);
font-weight: @close-font-weight;
line-height: 1;
color: @close-color;
text-shadow: @close-text-shadow;
.opacity(.2);
&:hover,
&:focus {
color: @close-color;
text-decoration: none;
cursor: pointer;
.opacity(.5);
}
// Additional properties for button version
// iOS requires the button element instead of an anchor tag.
// If you want the anchor version, it requires `href="#"`.
button& {
padding: 0;
cursor: pointer;
background: transparent;
border: 0;
-webkit-appearance: none;
}
}
| {
"pile_set_name": "Github"
} |
{
"navigationBarTitleText": "校园兼职"
} | {
"pile_set_name": "Github"
} |
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package ifc holds miscellaneous interfaces used by kustomize.
package ifc
import (
"sigs.k8s.io/kustomize/pkg/gvk"
"sigs.k8s.io/kustomize/pkg/types"
)
// Validator provides functions to validate annotations and labels
type Validator interface {
MakeAnnotationValidator() func(map[string]string) error
MakeLabelValidator() func(map[string]string) error
ValidateNamespace(string) []string
}
// Loader interface exposes methods to read bytes.
type Loader interface {
// Root returns the root location for this Loader.
Root() string
// New returns Loader located at newRoot.
New(newRoot string) (Loader, error)
// Load returns the bytes read from the location or an error.
Load(location string) ([]byte, error)
// Cleanup cleans the loader
Cleanup() error
}
// Kunstructured allows manipulation of k8s objects
// that do not have Golang structs.
type Kunstructured interface {
Map() map[string]interface{}
SetMap(map[string]interface{})
Copy() Kunstructured
GetFieldValue(string) (string, error)
MarshalJSON() ([]byte, error)
UnmarshalJSON([]byte) error
GetGvk() gvk.Gvk
GetKind() string
GetName() string
SetName(string)
GetLabels() map[string]string
SetLabels(map[string]string)
GetAnnotations() map[string]string
SetAnnotations(map[string]string)
}
// KunstructuredFactory makes instances of Kunstructured.
type KunstructuredFactory interface {
SliceFromBytes([]byte) ([]Kunstructured, error)
FromMap(m map[string]interface{}) Kunstructured
MakeConfigMap(args *types.ConfigMapArgs, options *types.GeneratorOptions) (Kunstructured, error)
MakeSecret(args *types.SecretArgs, options *types.GeneratorOptions) (Kunstructured, error)
Set(ldr Loader)
}
// See core.v1.SecretTypeOpaque
const SecretTypeOpaque = "Opaque"
| {
"pile_set_name": "Github"
} |
<?php
/**
* Copyright since 2007 PrestaShop SA and Contributors
* PrestaShop is an International Registered Trademark & Property of PrestaShop SA
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.md.
* It is also available through the world-wide-web at this URL:
* https://opensource.org/licenses/OSL-3.0
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to [email protected] so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade PrestaShop to newer
* versions in the future. If you wish to customize PrestaShop for your
* needs please refer to https://devdocs.prestashop.com/ for more information.
*
* @author PrestaShop SA and Contributors <[email protected]>
* @copyright Since 2007 PrestaShop SA and Contributors
* @license https://opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0)
*/
/*
return array(
array('server' => '192.168.0.15', 'user' => 'rep', 'password' => '123456', 'database' => 'rep'),
array('server' => '192.168.0.3', 'user' => 'myuser', 'password' => 'mypassword', 'database' => 'mydatabase'),
);
*/
return array();
| {
"pile_set_name": "Github"
} |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "mozilla/BasePrincipal.h"
#include "nsTLSSocketProvider.h"
#include "nsNSSIOLayer.h"
#include "nsError.h"
using mozilla::NeckoOriginAttributes;
nsTLSSocketProvider::nsTLSSocketProvider()
{
}
nsTLSSocketProvider::~nsTLSSocketProvider()
{
}
NS_IMPL_ISUPPORTS(nsTLSSocketProvider, nsISocketProvider)
NS_IMETHODIMP
nsTLSSocketProvider::NewSocket(int32_t family,
const char *host,
int32_t port,
nsIProxyInfo *proxy,
const NeckoOriginAttributes &originAttributes,
uint32_t flags,
PRFileDesc **_result,
nsISupports **securityInfo)
{
nsresult rv = nsSSLIOLayerNewSocket(family,
host,
port,
proxy,
originAttributes,
_result,
securityInfo,
true,
flags);
return (NS_FAILED(rv)) ? NS_ERROR_SOCKET_CREATE_FAILED : NS_OK;
}
// Add the SSL IO layer to an existing socket
NS_IMETHODIMP
nsTLSSocketProvider::AddToSocket(int32_t family,
const char *host,
int32_t port,
nsIProxyInfo *proxy,
const NeckoOriginAttributes &originAttributes,
uint32_t flags,
PRFileDesc *aSocket,
nsISupports **securityInfo)
{
nsresult rv = nsSSLIOLayerAddToSocket(family,
host,
port,
proxy,
originAttributes,
aSocket,
securityInfo,
true,
flags);
return (NS_FAILED(rv)) ? NS_ERROR_SOCKET_CREATE_FAILED : NS_OK;
}
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0-only
/// Correct the size argument to alloc functions
///
//# This makes an effort to find cases where the argument to sizeof is wrong
//# in memory allocation functions by checking the type of the allocated memory
//# when it is a double pointer and ensuring the sizeof argument takes a pointer
//# to the the memory being allocated. There are false positives in cases the
//# sizeof argument is not used in constructing the return value. The result
//# may need some reformatting.
//
// Confidence: Moderate
// Copyright: (C) 2014 Himangi Saraogi.
// Comments:
// Options:
virtual patch
virtual context
virtual org
virtual report
//----------------------------------------------------------
// For context mode
//----------------------------------------------------------
@depends on context disable sizeof_type_expr@
type T;
T **x;
@@
x =
<+...sizeof(
* T
)...+>
//----------------------------------------------------------
// For patch mode
//----------------------------------------------------------
@depends on patch disable sizeof_type_expr@
type T;
T **x;
@@
x =
<+...sizeof(
- T
+ *x
)...+>
//----------------------------------------------------------
// For org and report mode
//----------------------------------------------------------
@r depends on (org || report) disable sizeof_type_expr@
type T;
T **x;
position p;
@@
x =
<+...sizeof(
T@p
)...+>
@script:python depends on org@
p << r.p;
@@
coccilib.org.print_todo(p[0], "WARNING sizeof argument should be pointer type, not structure type")
@script:python depends on report@
p << r.p;
@@
msg="WARNING: Use correct pointer type argument for sizeof"
coccilib.report.print_report(p[0], msg)
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc -->
<title>Uses of Class com.google.gwt.dom.builder.client.DomLegendBuilder (GWT Javadoc)</title>
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.google.gwt.dom.builder.client.DomLegendBuilder (GWT Javadoc)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">GWT 2.9.0</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?com/google/gwt/dom/builder/client/class-use/DomLegendBuilder.html" target="_top">Frames</a></li>
<li><a href="DomLegendBuilder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class com.google.gwt.dom.builder.client.DomLegendBuilder" class="title">Uses of Class<br>com.google.gwt.dom.builder.client.DomLegendBuilder</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">DomLegendBuilder</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#com.google.gwt.dom.builder.client">com.google.gwt.dom.builder.client</a></td>
<td class="colLast">
<div class="block">Classes used to build DOM elements.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="com.google.gwt.dom.builder.client">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">DomLegendBuilder</a> in <a href="../../../../../../../com/google/gwt/dom/builder/client/package-summary.html">com.google.gwt.dom.builder.client</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../com/google/gwt/dom/builder/client/package-summary.html">com.google.gwt.dom.builder.client</a> that return <a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">DomLegendBuilder</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">DomLegendBuilder</a></code></td>
<td class="colLast"><span class="typeNameLabel">DomBuilderFactory.</span><code><span class="memberNameLink"><a href="../../../../../../../com/google/gwt/dom/builder/client/DomBuilderFactory.html#createLegendBuilder--">createLegendBuilder</a></span>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">DomLegendBuilder</a></code></td>
<td class="colLast"><span class="typeNameLabel">DomBuilderImpl.</span><code><span class="memberNameLink"><a href="../../../../../../../com/google/gwt/dom/builder/client/DomBuilderImpl.html#startLegend--">startLegend</a></span>()</code> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../com/google/gwt/dom/builder/client/DomLegendBuilder.html" title="class in com.google.gwt.dom.builder.client">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">GWT 2.9.0</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?com/google/gwt/dom/builder/client/class-use/DomLegendBuilder.html" target="_top">Frames</a></li>
<li><a href="DomLegendBuilder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"pile_set_name": "Github"
} |
// Created on: 1993-07-29
// Created by: Remi LEQUETTE
// Copyright (c) 1993-1999 Matra Datavision
// Copyright (c) 1999-2014 OPEN CASCADE SAS
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#include <BRepLib_MakePolygon.ixx>
#include <BRepLib.hxx>
#include <BRepLib_MakeEdge.hxx>
#include <BRepTools.hxx>
#include <BRep_Tool.hxx>
#include <BRep_Builder.hxx>
#include <TopoDS.hxx>
#include <Precision.hxx>
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon()
{
}
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon(const gp_Pnt& P1, const gp_Pnt& P2)
{
Add(P1);
Add(P2);
}
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon(const gp_Pnt& P1,
const gp_Pnt& P2,
const gp_Pnt& P3,
const Standard_Boolean Cl)
{
Add(P1);
Add(P2);
Add(P3);
if (Cl) Close();
}
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon(const gp_Pnt& P1,
const gp_Pnt& P2,
const gp_Pnt& P3,
const gp_Pnt& P4,
const Standard_Boolean Cl)
{
Add(P1);
Add(P2);
Add(P3);
Add(P4);
if (Cl) Close();
}
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon(const TopoDS_Vertex& V1,
const TopoDS_Vertex& V2)
{
Add(V1);
Add(V2);
}
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon(const TopoDS_Vertex& V1,
const TopoDS_Vertex& V2,
const TopoDS_Vertex& V3,
const Standard_Boolean Cl)
{
Add(V1);
Add(V2);
Add(V3);
if (Cl) Close();
}
//=======================================================================
//function : BRepLib_MakePolygon
//purpose :
//=======================================================================
BRepLib_MakePolygon::BRepLib_MakePolygon(const TopoDS_Vertex& V1,
const TopoDS_Vertex& V2,
const TopoDS_Vertex& V3,
const TopoDS_Vertex& V4,
const Standard_Boolean Cl)
{
Add(V1);
Add(V2);
Add(V3);
Add(V4);
if (Cl) Close();
}
//=======================================================================
//function : Add
//purpose :
//=======================================================================
void BRepLib_MakePolygon::Add(const gp_Pnt& P)
{
BRep_Builder B;
TopoDS_Vertex V;
B.MakeVertex(V,P,Precision::Confusion());
Add(V);
}
//=======================================================================
//function : Add
//purpose :
//=======================================================================
void BRepLib_MakePolygon::Add(const TopoDS_Vertex& V)
{
if (myFirstVertex.IsNull()) {
myFirstVertex = V;
}
else {
myEdge.Nullify();
BRep_Builder B;
TopoDS_Vertex last;
Standard_Boolean second = myLastVertex.IsNull();
if (second) {
last = myFirstVertex;
myLastVertex = V;
B.MakeWire(TopoDS::Wire(myShape));
myShape.Closed(Standard_False);
myShape.Orientable(Standard_True);
}
else {
last = myLastVertex;
if (BRepTools::Compare(V,myFirstVertex)) {
myLastVertex = myFirstVertex;
myShape.Closed(Standard_True);
}
else
myLastVertex = V;
}
BRepLib_MakeEdge ME(last,myLastVertex);
if (ME.IsDone()) {
myEdge = ME;
B.Add(myShape,myEdge);
Done();
}
else {
// restore the previous last vertex
if (second)
myLastVertex.Nullify();
else
myLastVertex = last;
}
}
}
//=======================================================================
//function : Added
//purpose :
//=======================================================================
Standard_Boolean BRepLib_MakePolygon::Added()const
{
return !myEdge.IsNull();
}
//=======================================================================
//function : Close
//purpose :
//=======================================================================
void BRepLib_MakePolygon::Close()
{
if (myFirstVertex.IsNull() || myLastVertex.IsNull())
return;
// check not already closed
if (myShape.Closed())
return;
// build the last edge
BRep_Builder B;
myEdge.Nullify();
BRepLib_MakeEdge ME(myLastVertex,myFirstVertex);
if (ME.IsDone()) {
myEdge = ME;
B.Add(myShape,myEdge);
myShape.Closed(Standard_True);
}
}
//=======================================================================
//function : FirstVertex
//purpose :
//=======================================================================
const TopoDS_Vertex& BRepLib_MakePolygon::FirstVertex()const
{
return myFirstVertex;
}
//=======================================================================
//function : LastVertex
//purpose :
//=======================================================================
const TopoDS_Vertex& BRepLib_MakePolygon::LastVertex()const
{
return myLastVertex;
}
//=======================================================================
//function : Edge
//purpose :
//=======================================================================
const TopoDS_Edge& BRepLib_MakePolygon::Edge()const
{
return myEdge;
}
//=======================================================================
//function : Wire
//purpose :
//=======================================================================
const TopoDS_Wire& BRepLib_MakePolygon::Wire()const
{
return TopoDS::Wire(Shape());
}
//=======================================================================
//function : operator
//purpose :
//=======================================================================
BRepLib_MakePolygon::operator TopoDS_Edge() const
{
return Edge();
}
//=======================================================================
//function : operator
//purpose :
//=======================================================================
BRepLib_MakePolygon::operator TopoDS_Wire() const
{
return Wire();
}
| {
"pile_set_name": "Github"
} |
E.LOADER FF1900 FF8023 00010D
| {
"pile_set_name": "Github"
} |
/*
* RISC-V translation routines for the RV64M Standard Extension.
*
* Copyright (c) 2016-2017 Sagar Karandikar, [email protected]
* Copyright (c) 2018 Peer Adelt, [email protected]
* Bastian Koppelmann, [email protected]
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2 or later, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
static bool trans_mul(DisasContext *ctx, arg_mul *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &tcg_gen_mul_tl);
}
static bool trans_mulh(DisasContext *ctx, arg_mulh *a)
{
REQUIRE_EXT(ctx, RVM);
TCGv source1 = tcg_temp_new();
TCGv source2 = tcg_temp_new();
gen_get_gpr(source1, a->rs1);
gen_get_gpr(source2, a->rs2);
tcg_gen_muls2_tl(source2, source1, source1, source2);
gen_set_gpr(a->rd, source1);
tcg_temp_free(source1);
tcg_temp_free(source2);
return true;
}
static bool trans_mulhsu(DisasContext *ctx, arg_mulhsu *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &gen_mulhsu);
}
static bool trans_mulhu(DisasContext *ctx, arg_mulhu *a)
{
REQUIRE_EXT(ctx, RVM);
TCGv source1 = tcg_temp_new();
TCGv source2 = tcg_temp_new();
gen_get_gpr(source1, a->rs1);
gen_get_gpr(source2, a->rs2);
tcg_gen_mulu2_tl(source2, source1, source1, source2);
gen_set_gpr(a->rd, source1);
tcg_temp_free(source1);
tcg_temp_free(source2);
return true;
}
static bool trans_div(DisasContext *ctx, arg_div *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &gen_div);
}
static bool trans_divu(DisasContext *ctx, arg_divu *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &gen_divu);
}
static bool trans_rem(DisasContext *ctx, arg_rem *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &gen_rem);
}
static bool trans_remu(DisasContext *ctx, arg_remu *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &gen_remu);
}
#ifdef TARGET_RISCV64
static bool trans_mulw(DisasContext *ctx, arg_mulw *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith(ctx, a, &gen_mulw);
}
static bool trans_divw(DisasContext *ctx, arg_divw *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith_div_w(ctx, a, &gen_div);
}
static bool trans_divuw(DisasContext *ctx, arg_divuw *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith_div_uw(ctx, a, &gen_divu);
}
static bool trans_remw(DisasContext *ctx, arg_remw *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith_div_w(ctx, a, &gen_rem);
}
static bool trans_remuw(DisasContext *ctx, arg_remuw *a)
{
REQUIRE_EXT(ctx, RVM);
return gen_arith_div_uw(ctx, a, &gen_remu);
}
#endif
| {
"pile_set_name": "Github"
} |
/* ----------------------------------------------------------------------
* Copyright (C) 2010-2014 ARM Limited. All rights reserved.
*
* $Date: 19. March 2015
* $Revision: V.1.4.5
*
* Project: CMSIS DSP Library
* Title: arm_cfft_radix4_f32.c
*
* Description: Radix-4 Decimation in Frequency CFFT & CIFFT Floating point processing function
*
*
* Target Processor: Cortex-M4/Cortex-M3/Cortex-M0
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* - Neither the name of ARM LIMITED nor the names of its contributors
* may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* -------------------------------------------------------------------- */
#include "arm_math.h"
extern void arm_bitreversal_f32(
float32_t * pSrc,
uint16_t fftSize,
uint16_t bitRevFactor,
uint16_t * pBitRevTab);
/**
* @ingroup groupTransforms
*/
/* ----------------------------------------------------------------------
** Internal helper function used by the FFTs
** ------------------------------------------------------------------- */
/*
* @brief Core function for the floating-point CFFT butterfly process.
* @param[in, out] *pSrc points to the in-place buffer of floating-point data type.
* @param[in] fftLen length of the FFT.
* @param[in] *pCoef points to the twiddle coefficient buffer.
* @param[in] twidCoefModifier twiddle coefficient modifier that supports different size FFTs with the same twiddle factor table.
* @return none.
*/
void arm_radix4_butterfly_f32(
float32_t * pSrc,
uint16_t fftLen,
float32_t * pCoef,
uint16_t twidCoefModifier)
{
float32_t co1, co2, co3, si1, si2, si3;
uint32_t ia1, ia2, ia3;
uint32_t i0, i1, i2, i3;
uint32_t n1, n2, j, k;
#ifndef ARM_MATH_CM0_FAMILY_FAMILY
/* Run the below code for Cortex-M4 and Cortex-M3 */
float32_t xaIn, yaIn, xbIn, ybIn, xcIn, ycIn, xdIn, ydIn;
float32_t Xaplusc, Xbplusd, Yaplusc, Ybplusd, Xaminusc, Xbminusd, Yaminusc,
Ybminusd;
float32_t Xb12C_out, Yb12C_out, Xc12C_out, Yc12C_out, Xd12C_out, Yd12C_out;
float32_t Xb12_out, Yb12_out, Xc12_out, Yc12_out, Xd12_out, Yd12_out;
float32_t *ptr1;
float32_t p0,p1,p2,p3,p4,p5;
float32_t a0,a1,a2,a3,a4,a5,a6,a7;
/* Initializations for the first stage */
n2 = fftLen;
n1 = n2;
/* n2 = fftLen/4 */
n2 >>= 2u;
i0 = 0u;
ia1 = 0u;
j = n2;
/* Calculation of first stage */
do
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
xaIn = pSrc[(2u * i0)];
yaIn = pSrc[(2u * i0) + 1u];
xbIn = pSrc[(2u * i1)];
ybIn = pSrc[(2u * i1) + 1u];
xcIn = pSrc[(2u * i2)];
ycIn = pSrc[(2u * i2) + 1u];
xdIn = pSrc[(2u * i3)];
ydIn = pSrc[(2u * i3) + 1u];
/* xa + xc */
Xaplusc = xaIn + xcIn;
/* xb + xd */
Xbplusd = xbIn + xdIn;
/* ya + yc */
Yaplusc = yaIn + ycIn;
/* yb + yd */
Ybplusd = ybIn + ydIn;
/* index calculation for the coefficients */
ia2 = ia1 + ia1;
co2 = pCoef[ia2 * 2u];
si2 = pCoef[(ia2 * 2u) + 1u];
/* xa - xc */
Xaminusc = xaIn - xcIn;
/* xb - xd */
Xbminusd = xbIn - xdIn;
/* ya - yc */
Yaminusc = yaIn - ycIn;
/* yb - yd */
Ybminusd = ybIn - ydIn;
/* xa' = xa + xb + xc + xd */
pSrc[(2u * i0)] = Xaplusc + Xbplusd;
/* ya' = ya + yb + yc + yd */
pSrc[(2u * i0) + 1u] = Yaplusc + Ybplusd;
/* (xa - xc) + (yb - yd) */
Xb12C_out = (Xaminusc + Ybminusd);
/* (ya - yc) + (xb - xd) */
Yb12C_out = (Yaminusc - Xbminusd);
/* (xa + xc) - (xb + xd) */
Xc12C_out = (Xaplusc - Xbplusd);
/* (ya + yc) - (yb + yd) */
Yc12C_out = (Yaplusc - Ybplusd);
/* (xa - xc) - (yb - yd) */
Xd12C_out = (Xaminusc - Ybminusd);
/* (ya - yc) + (xb - xd) */
Yd12C_out = (Xbminusd + Yaminusc);
co1 = pCoef[ia1 * 2u];
si1 = pCoef[(ia1 * 2u) + 1u];
/* index calculation for the coefficients */
ia3 = ia2 + ia1;
co3 = pCoef[ia3 * 2u];
si3 = pCoef[(ia3 * 2u) + 1u];
Xb12_out = Xb12C_out * co1;
Yb12_out = Yb12C_out * co1;
Xc12_out = Xc12C_out * co2;
Yc12_out = Yc12C_out * co2;
Xd12_out = Xd12C_out * co3;
Yd12_out = Yd12C_out * co3;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
//Xb12_out -= Yb12C_out * si1;
p0 = Yb12C_out * si1;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
//Yb12_out += Xb12C_out * si1;
p1 = Xb12C_out * si1;
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
//Xc12_out -= Yc12C_out * si2;
p2 = Yc12C_out * si2;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
//Yc12_out += Xc12C_out * si2;
p3 = Xc12C_out * si2;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
//Xd12_out -= Yd12C_out * si3;
p4 = Yd12C_out * si3;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
//Yd12_out += Xd12C_out * si3;
p5 = Xd12C_out * si3;
Xb12_out += p0;
Yb12_out -= p1;
Xc12_out += p2;
Yc12_out -= p3;
Xd12_out += p4;
Yd12_out -= p5;
/* xc' = (xa-xb+xc-xd)co2 + (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = Xc12_out;
/* yc' = (ya-yb+yc-yd)co2 - (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = Yc12_out;
/* xb' = (xa+yb-xc-yd)co1 + (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = Xb12_out;
/* yb' = (ya-xb-yc+xd)co1 - (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = Yb12_out;
/* xd' = (xa-yb-xc+yd)co3 + (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = Xd12_out;
/* yd' = (ya+xb-yc-xd)co3 - (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = Yd12_out;
/* Twiddle coefficients index modifier */
ia1 += twidCoefModifier;
/* Updating input index */
i0++;
}
while(--j);
twidCoefModifier <<= 2u;
/* Calculation of second stage to excluding last stage */
for (k = fftLen >> 2u; k > 4u; k >>= 2u)
{
/* Initializations for the first stage */
n1 = n2;
n2 >>= 2u;
ia1 = 0u;
/* Calculation of first stage */
j = 0;
do
{
/* index calculation for the coefficients */
ia2 = ia1 + ia1;
ia3 = ia2 + ia1;
co1 = pCoef[ia1 * 2u];
si1 = pCoef[(ia1 * 2u) + 1u];
co2 = pCoef[ia2 * 2u];
si2 = pCoef[(ia2 * 2u) + 1u];
co3 = pCoef[ia3 * 2u];
si3 = pCoef[(ia3 * 2u) + 1u];
/* Twiddle coefficients index modifier */
ia1 += twidCoefModifier;
i0 = j;
do
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
xaIn = pSrc[(2u * i0)];
yaIn = pSrc[(2u * i0) + 1u];
xbIn = pSrc[(2u * i1)];
ybIn = pSrc[(2u * i1) + 1u];
xcIn = pSrc[(2u * i2)];
ycIn = pSrc[(2u * i2) + 1u];
xdIn = pSrc[(2u * i3)];
ydIn = pSrc[(2u * i3) + 1u];
/* xa - xc */
Xaminusc = xaIn - xcIn;
/* (xb - xd) */
Xbminusd = xbIn - xdIn;
/* ya - yc */
Yaminusc = yaIn - ycIn;
/* (yb - yd) */
Ybminusd = ybIn - ydIn;
/* xa + xc */
Xaplusc = xaIn + xcIn;
/* xb + xd */
Xbplusd = xbIn + xdIn;
/* ya + yc */
Yaplusc = yaIn + ycIn;
/* yb + yd */
Ybplusd = ybIn + ydIn;
/* (xa - xc) + (yb - yd) */
Xb12C_out = (Xaminusc + Ybminusd);
/* (ya - yc) - (xb - xd) */
Yb12C_out = (Yaminusc - Xbminusd);
/* xa + xc -(xb + xd) */
Xc12C_out = (Xaplusc - Xbplusd);
/* (ya + yc) - (yb + yd) */
Yc12C_out = (Yaplusc - Ybplusd);
/* (xa - xc) - (yb - yd) */
Xd12C_out = (Xaminusc - Ybminusd);
/* (ya - yc) + (xb - xd) */
Yd12C_out = (Xbminusd + Yaminusc);
pSrc[(2u * i0)] = Xaplusc + Xbplusd;
pSrc[(2u * i0) + 1u] = Yaplusc + Ybplusd;
Xb12_out = Xb12C_out * co1;
Yb12_out = Yb12C_out * co1;
Xc12_out = Xc12C_out * co2;
Yc12_out = Yc12C_out * co2;
Xd12_out = Xd12C_out * co3;
Yd12_out = Yd12C_out * co3;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
//Xb12_out -= Yb12C_out * si1;
p0 = Yb12C_out * si1;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
//Yb12_out += Xb12C_out * si1;
p1 = Xb12C_out * si1;
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
//Xc12_out -= Yc12C_out * si2;
p2 = Yc12C_out * si2;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
//Yc12_out += Xc12C_out * si2;
p3 = Xc12C_out * si2;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
//Xd12_out -= Yd12C_out * si3;
p4 = Yd12C_out * si3;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
//Yd12_out += Xd12C_out * si3;
p5 = Xd12C_out * si3;
Xb12_out += p0;
Yb12_out -= p1;
Xc12_out += p2;
Yc12_out -= p3;
Xd12_out += p4;
Yd12_out -= p5;
/* xc' = (xa-xb+xc-xd)co2 + (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = Xc12_out;
/* yc' = (ya-yb+yc-yd)co2 - (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = Yc12_out;
/* xb' = (xa+yb-xc-yd)co1 + (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = Xb12_out;
/* yb' = (ya-xb-yc+xd)co1 - (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = Yb12_out;
/* xd' = (xa-yb-xc+yd)co3 + (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = Xd12_out;
/* yd' = (ya+xb-yc-xd)co3 - (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = Yd12_out;
i0 += n1;
} while(i0 < fftLen);
j++;
} while(j <= (n2 - 1u));
twidCoefModifier <<= 2u;
}
j = fftLen >> 2;
ptr1 = &pSrc[0];
/* Calculations of last stage */
do
{
xaIn = ptr1[0];
yaIn = ptr1[1];
xbIn = ptr1[2];
ybIn = ptr1[3];
xcIn = ptr1[4];
ycIn = ptr1[5];
xdIn = ptr1[6];
ydIn = ptr1[7];
/* xa + xc */
Xaplusc = xaIn + xcIn;
/* xa - xc */
Xaminusc = xaIn - xcIn;
/* ya + yc */
Yaplusc = yaIn + ycIn;
/* ya - yc */
Yaminusc = yaIn - ycIn;
/* xb + xd */
Xbplusd = xbIn + xdIn;
/* yb + yd */
Ybplusd = ybIn + ydIn;
/* (xb-xd) */
Xbminusd = xbIn - xdIn;
/* (yb-yd) */
Ybminusd = ybIn - ydIn;
/* xa' = xa + xb + xc + xd */
a0 = (Xaplusc + Xbplusd);
/* ya' = ya + yb + yc + yd */
a1 = (Yaplusc + Ybplusd);
/* xc' = (xa-xb+xc-xd) */
a2 = (Xaplusc - Xbplusd);
/* yc' = (ya-yb+yc-yd) */
a3 = (Yaplusc - Ybplusd);
/* xb' = (xa+yb-xc-yd) */
a4 = (Xaminusc + Ybminusd);
/* yb' = (ya-xb-yc+xd) */
a5 = (Yaminusc - Xbminusd);
/* xd' = (xa-yb-xc+yd)) */
a6 = (Xaminusc - Ybminusd);
/* yd' = (ya+xb-yc-xd) */
a7 = (Xbminusd + Yaminusc);
ptr1[0] = a0;
ptr1[1] = a1;
ptr1[2] = a2;
ptr1[3] = a3;
ptr1[4] = a4;
ptr1[5] = a5;
ptr1[6] = a6;
ptr1[7] = a7;
/* increment pointer by 8 */
ptr1 += 8u;
} while(--j);
#else
float32_t t1, t2, r1, r2, s1, s2;
/* Run the below code for Cortex-M0 */
/* Initializations for the fft calculation */
n2 = fftLen;
n1 = n2;
for (k = fftLen; k > 1u; k >>= 2u)
{
/* Initializations for the fft calculation */
n1 = n2;
n2 >>= 2u;
ia1 = 0u;
/* FFT Calculation */
j = 0;
do
{
/* index calculation for the coefficients */
ia2 = ia1 + ia1;
ia3 = ia2 + ia1;
co1 = pCoef[ia1 * 2u];
si1 = pCoef[(ia1 * 2u) + 1u];
co2 = pCoef[ia2 * 2u];
si2 = pCoef[(ia2 * 2u) + 1u];
co3 = pCoef[ia3 * 2u];
si3 = pCoef[(ia3 * 2u) + 1u];
/* Twiddle coefficients index modifier */
ia1 = ia1 + twidCoefModifier;
i0 = j;
do
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
/* xa + xc */
r1 = pSrc[(2u * i0)] + pSrc[(2u * i2)];
/* xa - xc */
r2 = pSrc[(2u * i0)] - pSrc[(2u * i2)];
/* ya + yc */
s1 = pSrc[(2u * i0) + 1u] + pSrc[(2u * i2) + 1u];
/* ya - yc */
s2 = pSrc[(2u * i0) + 1u] - pSrc[(2u * i2) + 1u];
/* xb + xd */
t1 = pSrc[2u * i1] + pSrc[2u * i3];
/* xa' = xa + xb + xc + xd */
pSrc[2u * i0] = r1 + t1;
/* xa + xc -(xb + xd) */
r1 = r1 - t1;
/* yb + yd */
t2 = pSrc[(2u * i1) + 1u] + pSrc[(2u * i3) + 1u];
/* ya' = ya + yb + yc + yd */
pSrc[(2u * i0) + 1u] = s1 + t2;
/* (ya + yc) - (yb + yd) */
s1 = s1 - t2;
/* (yb - yd) */
t1 = pSrc[(2u * i1) + 1u] - pSrc[(2u * i3) + 1u];
/* (xb - xd) */
t2 = pSrc[2u * i1] - pSrc[2u * i3];
/* xc' = (xa-xb+xc-xd)co2 + (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = (r1 * co2) + (s1 * si2);
/* yc' = (ya-yb+yc-yd)co2 - (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = (s1 * co2) - (r1 * si2);
/* (xa - xc) + (yb - yd) */
r1 = r2 + t1;
/* (xa - xc) - (yb - yd) */
r2 = r2 - t1;
/* (ya - yc) - (xb - xd) */
s1 = s2 - t2;
/* (ya - yc) + (xb - xd) */
s2 = s2 + t2;
/* xb' = (xa+yb-xc-yd)co1 + (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = (r1 * co1) + (s1 * si1);
/* yb' = (ya-xb-yc+xd)co1 - (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = (s1 * co1) - (r1 * si1);
/* xd' = (xa-yb-xc+yd)co3 + (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = (r2 * co3) + (s2 * si3);
/* yd' = (ya+xb-yc-xd)co3 - (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = (s2 * co3) - (r2 * si3);
i0 += n1;
} while( i0 < fftLen);
j++;
} while(j <= (n2 - 1u));
twidCoefModifier <<= 2u;
}
#endif /* #ifndef ARM_MATH_CM0_FAMILY_FAMILY */
}
/*
* @brief Core function for the floating-point CIFFT butterfly process.
* @param[in, out] *pSrc points to the in-place buffer of floating-point data type.
* @param[in] fftLen length of the FFT.
* @param[in] *pCoef points to twiddle coefficient buffer.
* @param[in] twidCoefModifier twiddle coefficient modifier that supports different size FFTs with the same twiddle factor table.
* @param[in] onebyfftLen value of 1/fftLen.
* @return none.
*/
void arm_radix4_butterfly_inverse_f32(
float32_t * pSrc,
uint16_t fftLen,
float32_t * pCoef,
uint16_t twidCoefModifier,
float32_t onebyfftLen)
{
float32_t co1, co2, co3, si1, si2, si3;
uint32_t ia1, ia2, ia3;
uint32_t i0, i1, i2, i3;
uint32_t n1, n2, j, k;
#ifndef ARM_MATH_CM0_FAMILY_FAMILY
float32_t xaIn, yaIn, xbIn, ybIn, xcIn, ycIn, xdIn, ydIn;
float32_t Xaplusc, Xbplusd, Yaplusc, Ybplusd, Xaminusc, Xbminusd, Yaminusc,
Ybminusd;
float32_t Xb12C_out, Yb12C_out, Xc12C_out, Yc12C_out, Xd12C_out, Yd12C_out;
float32_t Xb12_out, Yb12_out, Xc12_out, Yc12_out, Xd12_out, Yd12_out;
float32_t *ptr1;
float32_t p0,p1,p2,p3,p4,p5,p6,p7;
float32_t a0,a1,a2,a3,a4,a5,a6,a7;
/* Initializations for the first stage */
n2 = fftLen;
n1 = n2;
/* n2 = fftLen/4 */
n2 >>= 2u;
i0 = 0u;
ia1 = 0u;
j = n2;
/* Calculation of first stage */
do
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
/* Butterfly implementation */
xaIn = pSrc[(2u * i0)];
yaIn = pSrc[(2u * i0) + 1u];
xcIn = pSrc[(2u * i2)];
ycIn = pSrc[(2u * i2) + 1u];
xbIn = pSrc[(2u * i1)];
ybIn = pSrc[(2u * i1) + 1u];
xdIn = pSrc[(2u * i3)];
ydIn = pSrc[(2u * i3) + 1u];
/* xa + xc */
Xaplusc = xaIn + xcIn;
/* xb + xd */
Xbplusd = xbIn + xdIn;
/* ya + yc */
Yaplusc = yaIn + ycIn;
/* yb + yd */
Ybplusd = ybIn + ydIn;
/* index calculation for the coefficients */
ia2 = ia1 + ia1;
co2 = pCoef[ia2 * 2u];
si2 = pCoef[(ia2 * 2u) + 1u];
/* xa - xc */
Xaminusc = xaIn - xcIn;
/* xb - xd */
Xbminusd = xbIn - xdIn;
/* ya - yc */
Yaminusc = yaIn - ycIn;
/* yb - yd */
Ybminusd = ybIn - ydIn;
/* xa' = xa + xb + xc + xd */
pSrc[(2u * i0)] = Xaplusc + Xbplusd;
/* ya' = ya + yb + yc + yd */
pSrc[(2u * i0) + 1u] = Yaplusc + Ybplusd;
/* (xa - xc) - (yb - yd) */
Xb12C_out = (Xaminusc - Ybminusd);
/* (ya - yc) + (xb - xd) */
Yb12C_out = (Yaminusc + Xbminusd);
/* (xa + xc) - (xb + xd) */
Xc12C_out = (Xaplusc - Xbplusd);
/* (ya + yc) - (yb + yd) */
Yc12C_out = (Yaplusc - Ybplusd);
/* (xa - xc) + (yb - yd) */
Xd12C_out = (Xaminusc + Ybminusd);
/* (ya - yc) - (xb - xd) */
Yd12C_out = (Yaminusc - Xbminusd);
co1 = pCoef[ia1 * 2u];
si1 = pCoef[(ia1 * 2u) + 1u];
/* index calculation for the coefficients */
ia3 = ia2 + ia1;
co3 = pCoef[ia3 * 2u];
si3 = pCoef[(ia3 * 2u) + 1u];
Xb12_out = Xb12C_out * co1;
Yb12_out = Yb12C_out * co1;
Xc12_out = Xc12C_out * co2;
Yc12_out = Yc12C_out * co2;
Xd12_out = Xd12C_out * co3;
Yd12_out = Yd12C_out * co3;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
//Xb12_out -= Yb12C_out * si1;
p0 = Yb12C_out * si1;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
//Yb12_out += Xb12C_out * si1;
p1 = Xb12C_out * si1;
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
//Xc12_out -= Yc12C_out * si2;
p2 = Yc12C_out * si2;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
//Yc12_out += Xc12C_out * si2;
p3 = Xc12C_out * si2;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
//Xd12_out -= Yd12C_out * si3;
p4 = Yd12C_out * si3;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
//Yd12_out += Xd12C_out * si3;
p5 = Xd12C_out * si3;
Xb12_out -= p0;
Yb12_out += p1;
Xc12_out -= p2;
Yc12_out += p3;
Xd12_out -= p4;
Yd12_out += p5;
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = Xc12_out;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = Yc12_out;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = Xb12_out;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = Yb12_out;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = Xd12_out;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = Yd12_out;
/* Twiddle coefficients index modifier */
ia1 = ia1 + twidCoefModifier;
/* Updating input index */
i0 = i0 + 1u;
} while(--j);
twidCoefModifier <<= 2u;
/* Calculation of second stage to excluding last stage */
for (k = fftLen >> 2u; k > 4u; k >>= 2u)
{
/* Initializations for the first stage */
n1 = n2;
n2 >>= 2u;
ia1 = 0u;
/* Calculation of first stage */
j = 0;
do
{
/* index calculation for the coefficients */
ia2 = ia1 + ia1;
ia3 = ia2 + ia1;
co1 = pCoef[ia1 * 2u];
si1 = pCoef[(ia1 * 2u) + 1u];
co2 = pCoef[ia2 * 2u];
si2 = pCoef[(ia2 * 2u) + 1u];
co3 = pCoef[ia3 * 2u];
si3 = pCoef[(ia3 * 2u) + 1u];
/* Twiddle coefficients index modifier */
ia1 = ia1 + twidCoefModifier;
i0 = j;
do
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
xaIn = pSrc[(2u * i0)];
yaIn = pSrc[(2u * i0) + 1u];
xbIn = pSrc[(2u * i1)];
ybIn = pSrc[(2u * i1) + 1u];
xcIn = pSrc[(2u * i2)];
ycIn = pSrc[(2u * i2) + 1u];
xdIn = pSrc[(2u * i3)];
ydIn = pSrc[(2u * i3) + 1u];
/* xa - xc */
Xaminusc = xaIn - xcIn;
/* (xb - xd) */
Xbminusd = xbIn - xdIn;
/* ya - yc */
Yaminusc = yaIn - ycIn;
/* (yb - yd) */
Ybminusd = ybIn - ydIn;
/* xa + xc */
Xaplusc = xaIn + xcIn;
/* xb + xd */
Xbplusd = xbIn + xdIn;
/* ya + yc */
Yaplusc = yaIn + ycIn;
/* yb + yd */
Ybplusd = ybIn + ydIn;
/* (xa - xc) - (yb - yd) */
Xb12C_out = (Xaminusc - Ybminusd);
/* (ya - yc) + (xb - xd) */
Yb12C_out = (Yaminusc + Xbminusd);
/* xa + xc -(xb + xd) */
Xc12C_out = (Xaplusc - Xbplusd);
/* (ya + yc) - (yb + yd) */
Yc12C_out = (Yaplusc - Ybplusd);
/* (xa - xc) + (yb - yd) */
Xd12C_out = (Xaminusc + Ybminusd);
/* (ya - yc) - (xb - xd) */
Yd12C_out = (Yaminusc - Xbminusd);
pSrc[(2u * i0)] = Xaplusc + Xbplusd;
pSrc[(2u * i0) + 1u] = Yaplusc + Ybplusd;
Xb12_out = Xb12C_out * co1;
Yb12_out = Yb12C_out * co1;
Xc12_out = Xc12C_out * co2;
Yc12_out = Yc12C_out * co2;
Xd12_out = Xd12C_out * co3;
Yd12_out = Yd12C_out * co3;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
//Xb12_out -= Yb12C_out * si1;
p0 = Yb12C_out * si1;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
//Yb12_out += Xb12C_out * si1;
p1 = Xb12C_out * si1;
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
//Xc12_out -= Yc12C_out * si2;
p2 = Yc12C_out * si2;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
//Yc12_out += Xc12C_out * si2;
p3 = Xc12C_out * si2;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
//Xd12_out -= Yd12C_out * si3;
p4 = Yd12C_out * si3;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
//Yd12_out += Xd12C_out * si3;
p5 = Xd12C_out * si3;
Xb12_out -= p0;
Yb12_out += p1;
Xc12_out -= p2;
Yc12_out += p3;
Xd12_out -= p4;
Yd12_out += p5;
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = Xc12_out;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = Yc12_out;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = Xb12_out;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = Yb12_out;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = Xd12_out;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = Yd12_out;
i0 += n1;
} while(i0 < fftLen);
j++;
} while(j <= (n2 - 1u));
twidCoefModifier <<= 2u;
}
/* Initializations of last stage */
j = fftLen >> 2;
ptr1 = &pSrc[0];
/* Calculations of last stage */
do
{
xaIn = ptr1[0];
yaIn = ptr1[1];
xbIn = ptr1[2];
ybIn = ptr1[3];
xcIn = ptr1[4];
ycIn = ptr1[5];
xdIn = ptr1[6];
ydIn = ptr1[7];
/* Butterfly implementation */
/* xa + xc */
Xaplusc = xaIn + xcIn;
/* xa - xc */
Xaminusc = xaIn - xcIn;
/* ya + yc */
Yaplusc = yaIn + ycIn;
/* ya - yc */
Yaminusc = yaIn - ycIn;
/* xb + xd */
Xbplusd = xbIn + xdIn;
/* yb + yd */
Ybplusd = ybIn + ydIn;
/* (xb-xd) */
Xbminusd = xbIn - xdIn;
/* (yb-yd) */
Ybminusd = ybIn - ydIn;
/* xa' = (xa+xb+xc+xd) * onebyfftLen */
a0 = (Xaplusc + Xbplusd);
/* ya' = (ya+yb+yc+yd) * onebyfftLen */
a1 = (Yaplusc + Ybplusd);
/* xc' = (xa-xb+xc-xd) * onebyfftLen */
a2 = (Xaplusc - Xbplusd);
/* yc' = (ya-yb+yc-yd) * onebyfftLen */
a3 = (Yaplusc - Ybplusd);
/* xb' = (xa-yb-xc+yd) * onebyfftLen */
a4 = (Xaminusc - Ybminusd);
/* yb' = (ya+xb-yc-xd) * onebyfftLen */
a5 = (Yaminusc + Xbminusd);
/* xd' = (xa-yb-xc+yd) * onebyfftLen */
a6 = (Xaminusc + Ybminusd);
/* yd' = (ya-xb-yc+xd) * onebyfftLen */
a7 = (Yaminusc - Xbminusd);
p0 = a0 * onebyfftLen;
p1 = a1 * onebyfftLen;
p2 = a2 * onebyfftLen;
p3 = a3 * onebyfftLen;
p4 = a4 * onebyfftLen;
p5 = a5 * onebyfftLen;
p6 = a6 * onebyfftLen;
p7 = a7 * onebyfftLen;
/* xa' = (xa+xb+xc+xd) * onebyfftLen */
ptr1[0] = p0;
/* ya' = (ya+yb+yc+yd) * onebyfftLen */
ptr1[1] = p1;
/* xc' = (xa-xb+xc-xd) * onebyfftLen */
ptr1[2] = p2;
/* yc' = (ya-yb+yc-yd) * onebyfftLen */
ptr1[3] = p3;
/* xb' = (xa-yb-xc+yd) * onebyfftLen */
ptr1[4] = p4;
/* yb' = (ya+xb-yc-xd) * onebyfftLen */
ptr1[5] = p5;
/* xd' = (xa-yb-xc+yd) * onebyfftLen */
ptr1[6] = p6;
/* yd' = (ya-xb-yc+xd) * onebyfftLen */
ptr1[7] = p7;
/* increment source pointer by 8 for next calculations */
ptr1 = ptr1 + 8u;
} while(--j);
#else
float32_t t1, t2, r1, r2, s1, s2;
/* Run the below code for Cortex-M0 */
/* Initializations for the first stage */
n2 = fftLen;
n1 = n2;
/* Calculation of first stage */
for (k = fftLen; k > 4u; k >>= 2u)
{
/* Initializations for the first stage */
n1 = n2;
n2 >>= 2u;
ia1 = 0u;
/* Calculation of first stage */
j = 0;
do
{
/* index calculation for the coefficients */
ia2 = ia1 + ia1;
ia3 = ia2 + ia1;
co1 = pCoef[ia1 * 2u];
si1 = pCoef[(ia1 * 2u) + 1u];
co2 = pCoef[ia2 * 2u];
si2 = pCoef[(ia2 * 2u) + 1u];
co3 = pCoef[ia3 * 2u];
si3 = pCoef[(ia3 * 2u) + 1u];
/* Twiddle coefficients index modifier */
ia1 = ia1 + twidCoefModifier;
i0 = j;
do
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
/* xa + xc */
r1 = pSrc[(2u * i0)] + pSrc[(2u * i2)];
/* xa - xc */
r2 = pSrc[(2u * i0)] - pSrc[(2u * i2)];
/* ya + yc */
s1 = pSrc[(2u * i0) + 1u] + pSrc[(2u * i2) + 1u];
/* ya - yc */
s2 = pSrc[(2u * i0) + 1u] - pSrc[(2u * i2) + 1u];
/* xb + xd */
t1 = pSrc[2u * i1] + pSrc[2u * i3];
/* xa' = xa + xb + xc + xd */
pSrc[2u * i0] = r1 + t1;
/* xa + xc -(xb + xd) */
r1 = r1 - t1;
/* yb + yd */
t2 = pSrc[(2u * i1) + 1u] + pSrc[(2u * i3) + 1u];
/* ya' = ya + yb + yc + yd */
pSrc[(2u * i0) + 1u] = s1 + t2;
/* (ya + yc) - (yb + yd) */
s1 = s1 - t2;
/* (yb - yd) */
t1 = pSrc[(2u * i1) + 1u] - pSrc[(2u * i3) + 1u];
/* (xb - xd) */
t2 = pSrc[2u * i1] - pSrc[2u * i3];
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = (r1 * co2) - (s1 * si2);
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = (s1 * co2) + (r1 * si2);
/* (xa - xc) - (yb - yd) */
r1 = r2 - t1;
/* (xa - xc) + (yb - yd) */
r2 = r2 + t1;
/* (ya - yc) + (xb - xd) */
s1 = s2 + t2;
/* (ya - yc) - (xb - xd) */
s2 = s2 - t2;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = (r1 * co1) - (s1 * si1);
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = (s1 * co1) + (r1 * si1);
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = (r2 * co3) - (s2 * si3);
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = (s2 * co3) + (r2 * si3);
i0 += n1;
} while( i0 < fftLen);
j++;
} while(j <= (n2 - 1u));
twidCoefModifier <<= 2u;
}
/* Initializations of last stage */
n1 = n2;
n2 >>= 2u;
/* Calculations of last stage */
for (i0 = 0u; i0 <= (fftLen - n1); i0 += n1)
{
/* index calculation for the input as, */
/* pSrc[i0 + 0], pSrc[i0 + fftLen/4], pSrc[i0 + fftLen/2], pSrc[i0 + 3fftLen/4] */
i1 = i0 + n2;
i2 = i1 + n2;
i3 = i2 + n2;
/* Butterfly implementation */
/* xa + xc */
r1 = pSrc[2u * i0] + pSrc[2u * i2];
/* xa - xc */
r2 = pSrc[2u * i0] - pSrc[2u * i2];
/* ya + yc */
s1 = pSrc[(2u * i0) + 1u] + pSrc[(2u * i2) + 1u];
/* ya - yc */
s2 = pSrc[(2u * i0) + 1u] - pSrc[(2u * i2) + 1u];
/* xc + xd */
t1 = pSrc[2u * i1] + pSrc[2u * i3];
/* xa' = xa + xb + xc + xd */
pSrc[2u * i0] = (r1 + t1) * onebyfftLen;
/* (xa + xb) - (xc + xd) */
r1 = r1 - t1;
/* yb + yd */
t2 = pSrc[(2u * i1) + 1u] + pSrc[(2u * i3) + 1u];
/* ya' = ya + yb + yc + yd */
pSrc[(2u * i0) + 1u] = (s1 + t2) * onebyfftLen;
/* (ya + yc) - (yb + yd) */
s1 = s1 - t2;
/* (yb-yd) */
t1 = pSrc[(2u * i1) + 1u] - pSrc[(2u * i3) + 1u];
/* (xb-xd) */
t2 = pSrc[2u * i1] - pSrc[2u * i3];
/* xc' = (xa-xb+xc-xd)co2 - (ya-yb+yc-yd)(si2) */
pSrc[2u * i1] = r1 * onebyfftLen;
/* yc' = (ya-yb+yc-yd)co2 + (xa-xb+xc-xd)(si2) */
pSrc[(2u * i1) + 1u] = s1 * onebyfftLen;
/* (xa - xc) - (yb-yd) */
r1 = r2 - t1;
/* (xa - xc) + (yb-yd) */
r2 = r2 + t1;
/* (ya - yc) + (xb-xd) */
s1 = s2 + t2;
/* (ya - yc) - (xb-xd) */
s2 = s2 - t2;
/* xb' = (xa+yb-xc-yd)co1 - (ya-xb-yc+xd)(si1) */
pSrc[2u * i2] = r1 * onebyfftLen;
/* yb' = (ya-xb-yc+xd)co1 + (xa+yb-xc-yd)(si1) */
pSrc[(2u * i2) + 1u] = s1 * onebyfftLen;
/* xd' = (xa-yb-xc+yd)co3 - (ya+xb-yc-xd)(si3) */
pSrc[2u * i3] = r2 * onebyfftLen;
/* yd' = (ya+xb-yc-xd)co3 + (xa-yb-xc+yd)(si3) */
pSrc[(2u * i3) + 1u] = s2 * onebyfftLen;
}
#endif /* #ifndef ARM_MATH_CM0_FAMILY_FAMILY */
}
/**
* @addtogroup ComplexFFT
* @{
*/
/**
* @details
* @brief Processing function for the floating-point Radix-4 CFFT/CIFFT.
* @deprecated Do not use this function. It has been superseded by \ref arm_cfft_f32 and will be removed
* in the future.
* @param[in] *S points to an instance of the floating-point Radix-4 CFFT/CIFFT structure.
* @param[in, out] *pSrc points to the complex data buffer of size <code>2*fftLen</code>. Processing occurs in-place.
* @return none.
*/
void arm_cfft_radix4_f32(
const arm_cfft_radix4_instance_f32 * S,
float32_t * pSrc)
{
if(S->ifftFlag == 1u)
{
/* Complex IFFT radix-4 */
arm_radix4_butterfly_inverse_f32(pSrc, S->fftLen, S->pTwiddle,
S->twidCoefModifier, S->onebyfftLen);
}
else
{
/* Complex FFT radix-4 */
arm_radix4_butterfly_f32(pSrc, S->fftLen, S->pTwiddle,
S->twidCoefModifier);
}
if(S->bitReverseFlag == 1u)
{
/* Bit Reversal */
arm_bitreversal_f32(pSrc, S->fftLen, S->bitRevFactor, S->pBitRevTable);
}
}
/**
* @} end of ComplexFFT group
*/
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/activity_kotlin"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin">
<EditText
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="New edit text"
android:id="@+id/editText"/>
<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="No custom pressed"/>
<Button
android:id="@+id/button_custom"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="with custom pressed"/>
</LinearLayout>
| {
"pile_set_name": "Github"
} |
package com.macro.mall.dto;
import com.macro.mall.model.SmsCoupon;
import com.macro.mall.model.SmsCouponProductCategoryRelation;
import com.macro.mall.model.SmsCouponProductRelation;
import io.swagger.annotations.ApiModelProperty;
import lombok.Getter;
import lombok.Setter;
import java.util.List;
/**
* 优惠券信息封装,包括绑定商品和绑定分类
* Created by macro on 2018/8/28.
*/
public class SmsCouponParam extends SmsCoupon {
@Getter
@Setter
@ApiModelProperty("优惠券绑定的商品")
private List<SmsCouponProductRelation> productRelationList;
@Getter
@Setter
@ApiModelProperty("优惠券绑定的商品分类")
private List<SmsCouponProductCategoryRelation> productCategoryRelationList;
}
| {
"pile_set_name": "Github"
} |
var selftest = require('../tool-testing/selftest.js');
var Sandbox = selftest.Sandbox;
var MONGO_LISTENING =
{ stdout: " [initandlisten] waiting for connections on port" };
function startRun(sandbox) {
var run = sandbox.run();
run.match("myapp");
run.match("proxy");
run.tellMongo(MONGO_LISTENING);
run.match("MongoDB");
return run;
};
// Test that an app can properly read assets with unicode based filenames
selftest.define("assets - unicode asset names are allowed", () => {
const s = new Sandbox({ fakeMongo: true });
s.createApp('myapp', 'unicode-asset-app');
s.cd('myapp');
const run = startRun(s);
run.match('1 - getText: Hello world!');
run.match('2 - getText: Hello world!');
run.match('3 - getText: Hello world!');
run.match(/1 - absoluteFilePath:(.*)ma_a_verde.txt/);
run.match(/2 - absoluteFilePath:(.*)ma_a_verde.txt/);
run.match(/3 - absoluteFilePath:(.*)ma_a_verde.txt/);
run.stop();
});
// Verify path strings can be Unicode normalized through the
// tools/static-assets/server/mini-files.ts#unicodeNormalizePath helper
selftest.define(
"assets - helper exists to unicode normalize path strings",
() => {
const files = require('../static-assets/server/mini-files.ts');
selftest.expectEqual(null, files.unicodeNormalizePath(null));
const unicodeNormalizedPath = '/path/maça verde.txt'.normalize('NFC');
const testPaths = [
'/path/maça verde.txt',
'/path/mac\u0327a verde.txt',
'/path/ma\xE7a verde.txt',
];
testPaths.forEach((path) => {
selftest.expectEqual(
unicodeNormalizedPath,
files.unicodeNormalizePath(path)
);
});
}
);
| {
"pile_set_name": "Github"
} |
/**
* H2GIS is a library that brings spatial support to the H2 Database Engine
* <http://www.h2database.com>. H2GIS is developed by CNRS
* <http://www.cnrs.fr/>.
*
* This code is part of the H2GIS project. H2GIS is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* Lesser General Public License as published by the Free Software Foundation;
* version 3.0 of the License.
*
* H2GIS is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details <http://www.gnu.org/licenses/>.
*
*
* For more information, please consult: <http://www.h2gis.org/>
* or contact directly: info_at_h2gis.org
*/
package org.h2gis.functions.spatial.topology;
import org.h2gis.api.DeterministicScalarFunction;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.operation.polygonize.Polygonizer;
import java.util.Collection;
/**
*
* @author Erwan Bocher
*/
public class ST_Polygonize extends DeterministicScalarFunction {
private static final GeometryFactory FACTORY = new GeometryFactory();
public ST_Polygonize(){
addProperty(PROP_REMARKS, "Polygonizes a set of Geometry which contain linework "
+ "that represents the edges of a planar graph");
}
@Override
public String getJavaStaticMethod() {
return "polygonize";
}
/**
* Creates a GeometryCollection containing possible polygons formed
* from the constituent linework of a set of geometries.
*
* @param geometry
* @return
*/
public static Geometry polygonize(Geometry geometry) {
if(geometry == null){
return null;
}
Polygonizer polygonizer = new Polygonizer();
polygonizer.add(geometry);
Collection pols = polygonizer.getPolygons();
if(pols.isEmpty()){
return null;
}
return geometry.getFactory().createMultiPolygon(GeometryFactory.toPolygonArray(pols));
}
}
| {
"pile_set_name": "Github"
} |
University of Kragujevac
| {
"pile_set_name": "Github"
} |
/*
* @Author: czy0729
* @Date: 2020-03-11 11:32:31
* @Last Modified by: czy0729
* @Last Modified time: 2020-07-09 15:45:56
*/
import React from 'react'
import { AppState, Clipboard } from 'react-native'
import { matchBgmUrl } from '@utils/match'
import { navigationReference, appNavigate } from '@utils/app'
import { confirm } from '@utils/ui'
let lastUrl = ''
class ListenClipboard extends React.Component {
state = {
appState: AppState.currentState
}
componentDidMount() {
AppState.addEventListener('change', this.onAppStateChange)
setTimeout(() => {
this.checkContent()
}, 1200)
}
componentWillUnmount() {
AppState.removeEventListener('change', this.onAppStateChange)
}
onAppStateChange = nextAppState => {
const { appState } = this.state
if (appState.match(/inactive|background/) && nextAppState === 'active') {
this.checkContent()
}
this.setState({
appState: nextAppState
})
}
checkContent = async () => {
const content = await Clipboard.getString()
const url = matchBgmUrl(content)
if (url && url !== lastUrl) {
lastUrl = url
confirm(`检测到链接${url}, 前往页面?`, () => {
appNavigate(url, navigationReference())
})
Clipboard.setString('')
}
}
render() {
return null
}
}
export default ListenClipboard
| {
"pile_set_name": "Github"
} |
'use strict';
const expect = require('chai').expect
, nupnp = require('./nupnp')
;
describe('N-UPnP', () => {
it('should discover a bridge on the network', async () => {
const results = await nupnp.nupnp();
expect(results).to.be.instanceOf(Array);
expect(results[0]).to.have.property('id');
expect(results[0]).to.have.property('internalipaddress');
});
}); | {
"pile_set_name": "Github"
} |
using GeometryTypes, GLVisualize, GLAbstraction, ImageMagick
using FileIO, ColorTypes, Reactive
if !isdefined(:runtests)
window = glscreen()
end
const description = """
You can move mario around with the arrow keys
"""
const record_interactive = true
mutable struct Mario{T}
x ::T
y ::T
vx ::T
vy ::T
direction ::Symbol
end
gravity(dt, mario) = (mario.vy = (mario.y > 0.0 ? mario.vy - (dt/4.0) : 0.0); mario)
function physics(dt, mario)
mario.x = mario.x + dt * mario.vx
mario.y = max(0.0, mario.y + dt * mario.vy)
mario
end
function walk(keys, mario)
mario.vx = keys[1]
mario.direction = keys[1] < 0.0 ? :left : keys[1] > 0.0 ? :right : mario.direction
mario
end
function jump(keys, mario)
if keys[2] > 0.0 && mario.vy == 0.0
mario.vy = 6.0
end
mario
end
function update(dt, keys, mario)
mario = gravity(dt, mario)
mario = jump(keys, mario)
mario = walk(keys, mario)
mario = physics(dt, mario)
mario
end
mario2model(mario) = translationmatrix(Vec3f0(mario.x, mario.y, 0f0))*scalematrix(Vec3f0(5f0))
const mario_images = Dict()
function play(x::Vector)
const_lift(getindex, x, loop(1:length(x)))
end
function read_sequence(path)
if isdir(path)
return map(load, sort(map(x->joinpath(path, x), readdir(path))))
else
return fill(load(path), 1)
end
end
for verb in ["jump", "walk", "stand"], dir in ["left", "right"]
pic = dir
if verb != "walk" # not a sequemce
pic *= ".png"
end
path = assetpath("mario", verb, pic)
sequence = read_sequence(path)
gif = map(img->convert(Matrix{RGBA{N0f8}}, img), sequence)
mario_images[verb*dir] = play(gif)
end
function mario2image(mario, images=mario_images)
verb = mario.y > 0.0 ? "jump" : mario.vx != 0.0 ? "walk" : "stand"
mario_images[verb*string(mario.direction)].value # is a signal of pictures itself (animation), so .value samples the current image
end
function arrows2vec(direction)
direction == :up && return Vec2f0( 0.0, 1.0)
direction == :down && return Vec2f0( 0.0, -1.0)
direction == :right && return Vec2f0( 3.0, 0.0)
direction == :left && return Vec2f0(-3.0, 0.0)
Vec2f0(0.0)
end
# Put everything together
arrows = sampleon(bounce(1:10), window.inputs[:arrow_navigation])
keys = const_lift(arrows2vec, arrows)
mario_signal = const_lift(update, 8.0, keys, Mario(0.0, 0.0, 0.0, 0.0, :right))
image_stream = const_lift(mario2image, mario_signal)
modelmatrix = const_lift(mario2model, mario_signal)
mario = visualize(image_stream, model=modelmatrix)
_view(mario, window, camera=:fixed_pixel)
if !isdefined(:runtests)
renderloop(window)
end
| {
"pile_set_name": "Github"
} |
/*
* Broadcom specific AMBA
* PCI Core
*
* Copyright 2005, 2011, Broadcom Corporation
* Copyright 2006, 2007, Michael Buesch <[email protected]>
* Copyright 2011, 2012, Hauke Mehrtens <[email protected]>
*
* Licensed under the GNU/GPL. See COPYING for details.
*/
#include "bcma_private.h"
#include <linux/export.h>
#include <linux/bcma/bcma.h>
/**************************************************
* R/W ops.
**************************************************/
u32 bcma_pcie_read(struct bcma_drv_pci *pc, u32 address)
{
pcicore_write32(pc, BCMA_CORE_PCI_PCIEIND_ADDR, address);
pcicore_read32(pc, BCMA_CORE_PCI_PCIEIND_ADDR);
return pcicore_read32(pc, BCMA_CORE_PCI_PCIEIND_DATA);
}
static void bcma_pcie_write(struct bcma_drv_pci *pc, u32 address, u32 data)
{
pcicore_write32(pc, BCMA_CORE_PCI_PCIEIND_ADDR, address);
pcicore_read32(pc, BCMA_CORE_PCI_PCIEIND_ADDR);
pcicore_write32(pc, BCMA_CORE_PCI_PCIEIND_DATA, data);
}
static void bcma_pcie_mdio_set_phy(struct bcma_drv_pci *pc, u16 phy)
{
u32 v;
int i;
v = BCMA_CORE_PCI_MDIODATA_START;
v |= BCMA_CORE_PCI_MDIODATA_WRITE;
v |= (BCMA_CORE_PCI_MDIODATA_DEV_ADDR <<
BCMA_CORE_PCI_MDIODATA_DEVADDR_SHF);
v |= (BCMA_CORE_PCI_MDIODATA_BLK_ADDR <<
BCMA_CORE_PCI_MDIODATA_REGADDR_SHF);
v |= BCMA_CORE_PCI_MDIODATA_TA;
v |= (phy << 4);
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_DATA, v);
udelay(10);
for (i = 0; i < 200; i++) {
v = pcicore_read32(pc, BCMA_CORE_PCI_MDIO_CONTROL);
if (v & BCMA_CORE_PCI_MDIOCTL_ACCESS_DONE)
break;
usleep_range(1000, 2000);
}
}
static u16 bcma_pcie_mdio_read(struct bcma_drv_pci *pc, u16 device, u8 address)
{
int max_retries = 10;
u16 ret = 0;
u32 v;
int i;
/* enable mdio access to SERDES */
v = BCMA_CORE_PCI_MDIOCTL_PREAM_EN;
v |= BCMA_CORE_PCI_MDIOCTL_DIVISOR_VAL;
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_CONTROL, v);
if (pc->core->id.rev >= 10) {
max_retries = 200;
bcma_pcie_mdio_set_phy(pc, device);
v = (BCMA_CORE_PCI_MDIODATA_DEV_ADDR <<
BCMA_CORE_PCI_MDIODATA_DEVADDR_SHF);
v |= (address << BCMA_CORE_PCI_MDIODATA_REGADDR_SHF);
} else {
v = (device << BCMA_CORE_PCI_MDIODATA_DEVADDR_SHF_OLD);
v |= (address << BCMA_CORE_PCI_MDIODATA_REGADDR_SHF_OLD);
}
v = BCMA_CORE_PCI_MDIODATA_START;
v |= BCMA_CORE_PCI_MDIODATA_READ;
v |= BCMA_CORE_PCI_MDIODATA_TA;
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_DATA, v);
/* Wait for the device to complete the transaction */
udelay(10);
for (i = 0; i < max_retries; i++) {
v = pcicore_read32(pc, BCMA_CORE_PCI_MDIO_CONTROL);
if (v & BCMA_CORE_PCI_MDIOCTL_ACCESS_DONE) {
udelay(10);
ret = pcicore_read32(pc, BCMA_CORE_PCI_MDIO_DATA);
break;
}
usleep_range(1000, 2000);
}
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_CONTROL, 0);
return ret;
}
static void bcma_pcie_mdio_write(struct bcma_drv_pci *pc, u16 device,
u8 address, u16 data)
{
int max_retries = 10;
u32 v;
int i;
/* enable mdio access to SERDES */
v = BCMA_CORE_PCI_MDIOCTL_PREAM_EN;
v |= BCMA_CORE_PCI_MDIOCTL_DIVISOR_VAL;
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_CONTROL, v);
if (pc->core->id.rev >= 10) {
max_retries = 200;
bcma_pcie_mdio_set_phy(pc, device);
v = (BCMA_CORE_PCI_MDIODATA_DEV_ADDR <<
BCMA_CORE_PCI_MDIODATA_DEVADDR_SHF);
v |= (address << BCMA_CORE_PCI_MDIODATA_REGADDR_SHF);
} else {
v = (device << BCMA_CORE_PCI_MDIODATA_DEVADDR_SHF_OLD);
v |= (address << BCMA_CORE_PCI_MDIODATA_REGADDR_SHF_OLD);
}
v = BCMA_CORE_PCI_MDIODATA_START;
v |= BCMA_CORE_PCI_MDIODATA_WRITE;
v |= BCMA_CORE_PCI_MDIODATA_TA;
v |= data;
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_DATA, v);
/* Wait for the device to complete the transaction */
udelay(10);
for (i = 0; i < max_retries; i++) {
v = pcicore_read32(pc, BCMA_CORE_PCI_MDIO_CONTROL);
if (v & BCMA_CORE_PCI_MDIOCTL_ACCESS_DONE)
break;
usleep_range(1000, 2000);
}
pcicore_write32(pc, BCMA_CORE_PCI_MDIO_CONTROL, 0);
}
static u16 bcma_pcie_mdio_writeread(struct bcma_drv_pci *pc, u16 device,
u8 address, u16 data)
{
bcma_pcie_mdio_write(pc, device, address, data);
return bcma_pcie_mdio_read(pc, device, address);
}
/**************************************************
* Early init.
**************************************************/
static void bcma_core_pci_fixcfg(struct bcma_drv_pci *pc)
{
struct bcma_device *core = pc->core;
u16 val16, core_index;
uint regoff;
regoff = BCMA_CORE_PCI_SPROM(BCMA_CORE_PCI_SPROM_PI_OFFSET);
core_index = (u16)core->core_index;
val16 = pcicore_read16(pc, regoff);
if (((val16 & BCMA_CORE_PCI_SPROM_PI_MASK) >> BCMA_CORE_PCI_SPROM_PI_SHIFT)
!= core_index) {
val16 = (core_index << BCMA_CORE_PCI_SPROM_PI_SHIFT) |
(val16 & ~BCMA_CORE_PCI_SPROM_PI_MASK);
pcicore_write16(pc, regoff, val16);
}
}
/*
* Apply some early fixes required before accessing SPROM.
* See also si_pci_fixcfg.
*/
void bcma_core_pci_early_init(struct bcma_drv_pci *pc)
{
if (pc->early_setup_done)
return;
pc->hostmode = bcma_core_pci_is_in_hostmode(pc);
if (pc->hostmode)
goto out;
bcma_core_pci_fixcfg(pc);
out:
pc->early_setup_done = true;
}
/**************************************************
* Workarounds.
**************************************************/
static u8 bcma_pcicore_polarity_workaround(struct bcma_drv_pci *pc)
{
u32 tmp;
tmp = bcma_pcie_read(pc, BCMA_CORE_PCI_PLP_STATUSREG);
if (tmp & BCMA_CORE_PCI_PLP_POLARITYINV_STAT)
return BCMA_CORE_PCI_SERDES_RX_CTRL_FORCE |
BCMA_CORE_PCI_SERDES_RX_CTRL_POLARITY;
else
return BCMA_CORE_PCI_SERDES_RX_CTRL_FORCE;
}
static void bcma_pcicore_serdes_workaround(struct bcma_drv_pci *pc)
{
u16 tmp;
bcma_pcie_mdio_write(pc, BCMA_CORE_PCI_MDIODATA_DEV_RX,
BCMA_CORE_PCI_SERDES_RX_CTRL,
bcma_pcicore_polarity_workaround(pc));
tmp = bcma_pcie_mdio_read(pc, BCMA_CORE_PCI_MDIODATA_DEV_PLL,
BCMA_CORE_PCI_SERDES_PLL_CTRL);
if (tmp & BCMA_CORE_PCI_PLL_CTRL_FREQDET_EN)
bcma_pcie_mdio_write(pc, BCMA_CORE_PCI_MDIODATA_DEV_PLL,
BCMA_CORE_PCI_SERDES_PLL_CTRL,
tmp & ~BCMA_CORE_PCI_PLL_CTRL_FREQDET_EN);
}
/* Fix MISC config to allow coming out of L2/L3-Ready state w/o PRST */
/* Needs to happen when coming out of 'standby'/'hibernate' */
static void bcma_core_pci_config_fixup(struct bcma_drv_pci *pc)
{
u16 val16;
uint regoff;
regoff = BCMA_CORE_PCI_SPROM(BCMA_CORE_PCI_SPROM_MISC_CONFIG);
val16 = pcicore_read16(pc, regoff);
if (!(val16 & BCMA_CORE_PCI_SPROM_L23READY_EXIT_NOPERST)) {
val16 |= BCMA_CORE_PCI_SPROM_L23READY_EXIT_NOPERST;
pcicore_write16(pc, regoff, val16);
}
}
/**************************************************
* Init.
**************************************************/
static void bcma_core_pci_clientmode_init(struct bcma_drv_pci *pc)
{
bcma_pcicore_serdes_workaround(pc);
bcma_core_pci_config_fixup(pc);
}
void bcma_core_pci_init(struct bcma_drv_pci *pc)
{
if (pc->setup_done)
return;
bcma_core_pci_early_init(pc);
if (pc->hostmode)
bcma_core_pci_hostmode_init(pc);
else
bcma_core_pci_clientmode_init(pc);
}
void bcma_core_pci_power_save(struct bcma_bus *bus, bool up)
{
struct bcma_drv_pci *pc;
u16 data;
if (bus->hosttype != BCMA_HOSTTYPE_PCI)
return;
pc = &bus->drv_pci[0];
if (pc->core->id.rev >= 15 && pc->core->id.rev <= 20) {
data = up ? 0x74 : 0x7C;
bcma_pcie_mdio_writeread(pc, BCMA_CORE_PCI_MDIO_BLK1,
BCMA_CORE_PCI_MDIO_BLK1_MGMT1, 0x7F64);
bcma_pcie_mdio_writeread(pc, BCMA_CORE_PCI_MDIO_BLK1,
BCMA_CORE_PCI_MDIO_BLK1_MGMT3, data);
} else if (pc->core->id.rev >= 21 && pc->core->id.rev <= 22) {
data = up ? 0x75 : 0x7D;
bcma_pcie_mdio_writeread(pc, BCMA_CORE_PCI_MDIO_BLK1,
BCMA_CORE_PCI_MDIO_BLK1_MGMT1, 0x7E65);
bcma_pcie_mdio_writeread(pc, BCMA_CORE_PCI_MDIO_BLK1,
BCMA_CORE_PCI_MDIO_BLK1_MGMT3, data);
}
}
EXPORT_SYMBOL_GPL(bcma_core_pci_power_save);
static void bcma_core_pci_extend_L1timer(struct bcma_drv_pci *pc, bool extend)
{
u32 w;
w = bcma_pcie_read(pc, BCMA_CORE_PCI_DLLP_PMTHRESHREG);
if (extend)
w |= BCMA_CORE_PCI_ASPMTIMER_EXTEND;
else
w &= ~BCMA_CORE_PCI_ASPMTIMER_EXTEND;
bcma_pcie_write(pc, BCMA_CORE_PCI_DLLP_PMTHRESHREG, w);
bcma_pcie_read(pc, BCMA_CORE_PCI_DLLP_PMTHRESHREG);
}
void bcma_core_pci_up(struct bcma_drv_pci *pc)
{
bcma_core_pci_extend_L1timer(pc, true);
}
void bcma_core_pci_down(struct bcma_drv_pci *pc)
{
bcma_core_pci_extend_L1timer(pc, false);
}
| {
"pile_set_name": "Github"
} |
'name': 'test'
'scopeName': 'source.test'
'injectionSelector': 'comment'
'patterns': [{'include': 'source.sql'}]
| {
"pile_set_name": "Github"
} |
module.exports = require('./some');
| {
"pile_set_name": "Github"
} |
var cache = {};
/**
* All that we require for search space is the following form:
* { keyWords: 'Blah Blah hi',
* name: 'Joe Dude',
* *** anything else you want ***
* }
*/
exports.makeSearcher = function(searchSpace) {
return function(text, cb) {
var res = [], searchEntry, i, useAsKeywords;
if (cache[text]) {
cb({matchingEntities: cache[text], text: text});
return;
}
for (i=0; i < searchSpace.length; i=i+1) {
searchEntry = searchSpace[i];
useAsKeywords = searchEntry.keyWords || searchEntry.name;
if (!text || (text.length &&
useAsKeywords.toLowerCase().indexOf(text.toLowerCase()) !== -1)) {
res.push(searchEntry.entity);
}
}
window.setTimeout(function() {
cache[text] = res;
cb({matchingEntities: res, text: text});
}, 100);
};
};
| {
"pile_set_name": "Github"
} |
@import './../../../scss/variables';
.c-logo {
text-align: left;
padding: $gutter-double;
}
| {
"pile_set_name": "Github"
} |
(module I2C_5-3V (layer F.Cu) (tedit 54CF85C1)
(fp_text reference I2C_5-3V (at 0 1.27) (layer F.SilkS)
(effects (font (size 0.5 0.5) (thickness 0.125)))
)
(fp_text value VAL** (at 0 -1.27) (layer F.SilkS)
(effects (font (size 0.5 0.5) (thickness 0.125)))
)
(fp_line (start -7.62 -5.08) (end 7.62 -5.08) (layer F.SilkS) (width 0.15))
(fp_line (start 7.62 -5.08) (end 7.62 5.08) (layer F.SilkS) (width 0.15))
(fp_line (start 7.62 5.08) (end -7.62 5.08) (layer F.SilkS) (width 0.15))
(fp_line (start -7.62 5.08) (end -7.62 -5.08) (layer F.SilkS) (width 0.15))
(pad 3 thru_hole circle (at -6.35 1.27) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 2 thru_hole circle (at -6.35 -1.27) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 1 thru_hole circle (at -6.35 -3.81) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 4 thru_hole circle (at -6.35 3.81) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 5 thru_hole circle (at 6.35 -3.81) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 6 thru_hole circle (at 6.35 -1.27) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 7 thru_hole circle (at 6.35 1.27) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
(pad 8 thru_hole circle (at 6.35 3.81) (size 1.524 1.524) (drill 0.762) (layers *.Cu *.Mask F.SilkS))
)
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 1999, 2001, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.omg.CORBA;
/**
* The Helper for <tt>FloatSeq</tt>. For more information on
* Helper files, see <a href="doc-files/generatedfiles.html#helper">
* "Generated Files: Helper Files"</a>.<P>
* org/omg/CORBA/FloatSeqHelper.java
* Generated by the IDL-to-Java compiler (portable), version "3.0"
* from streams.idl
* 13 May 1999 22:41:37 o'clock GMT+00:00
*
* The class definition has been modified to conform to the following
* OMG specifications :
* <ul>
* <li> ORB core as defined by CORBA 2.3.1
* (<a href="http://cgi.omg.org/cgi-bin/doc?formal/99-10-07">formal/99-10-07</a>)
* </li>
*
* <li> IDL/Java Language Mapping as defined in
* <a href="http://cgi.omg.org/cgi-bin/doc?ptc/00-01-08">ptc/00-01-08</a>
* </li>
* </ul>
*/
public abstract class FloatSeqHelper
{
private static String _id = "IDL:omg.org/CORBA/FloatSeq:1.0";
public static void insert (org.omg.CORBA.Any a, float[] that)
{
org.omg.CORBA.portable.OutputStream out = a.create_output_stream ();
a.type (type ());
write (out, that);
a.read_value (out.create_input_stream (), type ());
}
public static float[] extract (org.omg.CORBA.Any a)
{
return read (a.create_input_stream ());
}
private static org.omg.CORBA.TypeCode __typeCode = null;
synchronized public static org.omg.CORBA.TypeCode type ()
{
if (__typeCode == null)
{
__typeCode = org.omg.CORBA.ORB.init ().get_primitive_tc (org.omg.CORBA.TCKind.tk_float);
__typeCode = org.omg.CORBA.ORB.init ().create_sequence_tc (0, __typeCode);
__typeCode = org.omg.CORBA.ORB.init ().create_alias_tc (org.omg.CORBA.FloatSeqHelper.id (), "FloatSeq", __typeCode);
}
return __typeCode;
}
public static String id ()
{
return _id;
}
public static float[] read (org.omg.CORBA.portable.InputStream istream)
{
float value[] = null;
int _len0 = istream.read_long ();
value = new float[_len0];
istream.read_float_array (value, 0, _len0);
return value;
}
public static void write (org.omg.CORBA.portable.OutputStream ostream, float[] value)
{
ostream.write_long (value.length);
ostream.write_float_array (value, 0, value.length);
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2010-2020 Mark Allen, Norbert Bartels.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.restfb.types.send.buybutton;
import com.restfb.Facebook;
import com.restfb.types.AbstractFacebookType;
import lombok.Getter;
public class PriceListItem extends AbstractFacebookType {
@Getter
@Facebook
private String label;
@Getter
@Facebook
private String amount;
public PriceListItem(String label, String amount) {
this.label = label;
this.amount = amount;
}
}
| {
"pile_set_name": "Github"
} |
imports:
- path: software_status.py
properties:
timeout:
type: integer
waiterDependsOn:
type: array
successNumber:
type: integer
failureNumber:
type: integer
outputs:
config-url:
type: string
variable-path:
type: string
| {
"pile_set_name": "Github"
} |
#ifndef crypto_hash_sha512_H
#define crypto_hash_sha512_H
/*
* WARNING: Unless you absolutely need to use SHA512 for interoperatibility,
* purposes, you might want to consider crypto_generichash() instead.
* Unlike SHA512, crypto_generichash() is not vulnerable to length
* extension attacks.
*/
#include <stddef.h>
#include <stdint.h>
#include <stdlib.h>
#include "export.h"
#ifdef __cplusplus
# if __GNUC__
# pragma GCC diagnostic ignored "-Wlong-long"
# endif
extern "C" {
#endif
typedef struct crypto_hash_sha512_state {
uint64_t state[8];
uint64_t count[2];
unsigned char buf[128];
} crypto_hash_sha512_state;
SODIUM_EXPORT
size_t crypto_hash_sha512_statebytes(void);
#define crypto_hash_sha512_BYTES 64U
SODIUM_EXPORT
size_t crypto_hash_sha512_bytes(void);
SODIUM_EXPORT
int crypto_hash_sha512(unsigned char *out, const unsigned char *in,
unsigned long long inlen);
SODIUM_EXPORT
int crypto_hash_sha512_init(crypto_hash_sha512_state *state);
SODIUM_EXPORT
int crypto_hash_sha512_update(crypto_hash_sha512_state *state,
const unsigned char *in,
unsigned long long inlen);
SODIUM_EXPORT
int crypto_hash_sha512_final(crypto_hash_sha512_state *state,
unsigned char *out);
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
Feature: Compile Time Information
In order to allow a developer to have access to context
information at compile time
As a Joxa Developer
I want to be able to call the functions (module) (function) (line) and have
them evaluate to the correct result
Scenario: Write a function that evaluates to the module name
Given a module that has a function that calls module
When joxa is called on this module
Then a beam binary is produced
And the described function returns the name of the module
| {
"pile_set_name": "Github"
} |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !gccgo
#include "textflag.h"
//
// System call support for 386, NetBSD
//
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
TEXT ·Syscall(SB),NOSPLIT,$0-28
JMP syscall·Syscall(SB)
TEXT ·Syscall6(SB),NOSPLIT,$0-40
JMP syscall·Syscall6(SB)
TEXT ·Syscall9(SB),NOSPLIT,$0-52
JMP syscall·Syscall9(SB)
TEXT ·RawSyscall(SB),NOSPLIT,$0-28
JMP syscall·RawSyscall(SB)
TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
JMP syscall·RawSyscall6(SB)
| {
"pile_set_name": "Github"
} |
/* Copyright (c) 2016-2018, Linaro Limited
* All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
* @file
*
* ODP packet IO stats
*/
#ifndef ODP_API_PACKET_IO_STATS_H_
#define ODP_API_PACKET_IO_STATS_H_
#ifdef __cplusplus
extern "C" {
#endif
#include <odp/api/spec/packet_io_stats.h>
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build amd64,openbsd
package unix
func setTimespec(sec, nsec int64) Timespec {
return Timespec{Sec: sec, Nsec: nsec}
}
func setTimeval(sec, usec int64) Timeval {
return Timeval{Sec: sec, Usec: usec}
}
func SetKevent(k *Kevent_t, fd, mode, flags int) {
k.Ident = uint64(fd)
k.Filter = int16(mode)
k.Flags = uint16(flags)
}
func (iov *Iovec) SetLen(length int) {
iov.Len = uint64(length)
}
func (msghdr *Msghdr) SetControllen(length int) {
msghdr.Controllen = uint32(length)
}
func (cmsg *Cmsghdr) SetLen(length int) {
cmsg.Len = uint32(length)
}
// SYS___SYSCTL is used by syscall_bsd.go for all BSDs, but in modern versions
// of openbsd/amd64 the syscall is called sysctl instead of __sysctl.
const SYS___SYSCTL = SYS_SYSCTL
| {
"pile_set_name": "Github"
} |
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
from pip.index import PackageFinder
from pip.log import logger
from pip.req import InstallRequirement
from pip.util import get_installed_distributions, dist_is_editable
from pip.cmdoptions import make_option_group, index_group
class ListCommand(Command):
"""List installed packages, including editables."""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
# distributions to skip (python itself is reported by pkg_resources.working_set)
skip = ['python']
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages (excluding editables)')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages (excluding editables)')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help='If in a virtualenv that has global access, do not list globally-installed packages.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help="Include pre-release and development versions. By default, pip only finds stable versions.")
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=
options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
elif options.editable:
self.run_editables(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
if remote_version_parsed > dist.parsed_version:
logger.notify('%s (Current: %s Latest: %s)' % (dist.project_name,
dist.version, remote_version_raw))
def find_packages_latests_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated("1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
if options.mirrors:
logger.deprecated("1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
index_urls += options.mirrors
dependency_links = []
for dist in get_installed_distributions(local_only=options.local, skip=self.skip):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
session = self._build_session(options)
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(local_only=options.local, include_editables=False, skip=self.skip)
for dist in installed_packages:
req = InstallRequirement.from_line(dist.key, None)
try:
link = finder.find_requirement(req, True)
# If link is None, means installed version is most up-to-date
if link is None:
continue
except DistributionNotFound:
continue
except BestVersionAlreadyInstalled:
remote_version = req.installed_version
else:
# It might be a good idea that link or finder had a public method
# that returned version
remote_version = finder._link_package_versions(link, req.name)[0]
remote_version_raw = remote_version[2]
remote_version_parsed = remote_version[0]
yield dist, remote_version_raw, remote_version_parsed
def run_listing(self, options):
installed_packages = get_installed_distributions(local_only=options.local, skip=self.skip)
self.output_package_listing(installed_packages)
def run_editables(self, options):
installed_packages = get_installed_distributions(local_only=options.local, editables_only=True)
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower())
for dist in installed_packages:
if dist_is_editable(dist):
line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.notify(line)
def run_uptodate(self, options):
uptodate = []
for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
if dist.parsed_version == remote_version_parsed:
uptodate.append(dist)
self.output_package_listing(uptodate)
| {
"pile_set_name": "Github"
} |
*,
*::after,
*::before {
box-sizing: border-box;
}
html, body {
width: 100%;
overflow-x: hidden;
}
body {
font-family: 'Inconsolata', monospace;
color: #141417;
background: #d0cfc5;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
h2, .menu__link {
font-family: 'Poppins', sans-serif;
}
a {
text-decoration: none;
color: #000;
outline: none;
}
a:hover,
a:focus {
color: #2d2d30;
}
.hidden {
position: absolute;
overflow: hidden;
width: 0;
height: 0;
pointer-events: none;
}
/* Icons */
.icon {
display: block;
width: 1.5em;
height: 1.5em;
margin: 0 auto;
fill: currentColor;
}
.icon--inline {
display: inline-block;
width: 1em;
height: 100%;
margin: 0 0.25em 0 0;
}
.form__close {
margin: 1.25em 0 0 0;
position: relative;
display: inline-block;
height: 0.65em;
float: left;
font-weight: bold;
cursor: pointer;
}
/* Page Loader */
.js .loading::before {
content: '';
position: fixed;
z-index: 100;
top: 0;
left: 0;
display: flex;
width: 100%;
height: 100%;
text-align: center;
background: #7f40f1;
}
.js .loading::after {
content: '';
position: fixed;
z-index: 10000;
top: 50%;
left: 50%;
width: 50px;
height: 50px;
margin: -25px 0 0 -25px;
pointer-events: none;
background: #fff;
animation: loaderAnim 0.8s ease-out infinite alternate forwards;
}
@keyframes loaderAnim {
to {
transform: translate3d(0,-100px,0);
}
}
.content.intro {
height: 93vh;
}
/* Header */
.codrops-header {
position: relative;
display: flex;
flex-direction: row;
flex-wrap: wrap;
align-items: center;
width: 100%;
padding: 5vw;
pointer-events: none;
}
.codrops-header__title {
font-size: 1.5em;
margin: 0;
padding: 0 1em;
}
/* Top Navigation Style */
.codrops-links {
position: relative;
display: flex;
justify-content: center;
text-align: center;
white-space: nowrap;
font-size: 0.85em;
border: 2px solid;
}
.codrops-links::after {
content: '';
height: 100%;
width: 2px;
background: currentColor;
position: absolute;
left: 50%;
margin-left: -1px;
top: 0;
}
.codrops-icon {
pointer-events: visible;
display: inline-block;
padding: 0.6em;
}
/* Demo links */
.demos {
flex: none;
width: 100%;
padding: 0.75em 0 0 0;
}
.demo {
font-weight: bold;
margin: 0 1em 0.5em 0;
pointer-events: visible;
}
.github {
pointer-events: visible;
}
.demo--current {
text-decoration: underline;
}
/* Content */
.content {
position: relative;
min-height: 300px;
overflow-x: hidden;
}
.content--full {
height: 100vh;
min-height: 600px;
}
.flexy {
display: flex;
flex-wrap: wrap;
flex-direction: column;
align-items: center;
}
.flexy--center {
justify-content: center;
}
.flexy--row {
flex-direction: row;
}
.content__heading {
flex: none;
width: 100%;
padding: 0 5vw;
}
.content__heading--center {
text-align: center;
}
.content__title {
font-size: 8vw;
line-height: 1.2;
padding: 0 5vw;
}
.content__title--half {
width: 50vw;
z-index: 2;
}
.content__title--enclosed {
padding: 0;
margin: 0;
}
.content__title--center {
margin: auto;
}
.content__title--right {
text-align: right;
padding: 0 0 0 2em;
}
.content__title--left {
margin-left: -18vw;
}
.content__title--medium {
font-size: 6vw;
}
.content__title--small {
font-size: 2vw;
}
.content__title__inner {
flex: none;
display: inline-block;
white-space: nowrap;
position: relative;
}
.content__title__inner--offset-1 {
top: -0.25em;
left: 13.6vw;
}
.content__title__inner--offset-2 {
top: -0.25em;
left: 1.75vw;
}
.content__title__inner--offset-3 {
left: -10vw;
top: -0.25em;
}
.content__image-wrap {
flex: none;
width: 25vw;
}
.content__image-wrap--half {
width: 50vw;
}
.content__image {
width: 100%;
display: block;
}
.triggers {
border: 2px solid;
padding: 3em;
margin: 0 5vw;
width: 40vw;
}
/* Related demos */
.content--related {
padding: 8em 5vw;
font-weight: bold;
text-align: center;
}
.content--related h2 {
font-family: 'Inconsolata', monospace;
font-size: 1.25em;
}
.content--related .demos {
padding-bottom: 3em;
}
.media-item {
display: inline-block;
padding: 1em;
vertical-align: top;
transition: color 0.3s;
}
.media-item__img {
max-width: 100%;
opacity: 0.8;
transition: opacity 0.3s;
}
.media-item:hover .media-item__img,
.media-item:focus .media-item__img {
opacity: 1;
}
.media-item__title {
font-size: 1em;
margin: 0;
padding: 0.5em;
}
/* Demo specific styles */
/* Buttons */
.btn {
border: 0;
padding: 0;
margin: 0;
background: none;
color: inherit;
}
.btn:focus {
outline: none;
}
.btn--trigger {
margin: 1em;
display: block;
white-space: nowrap;
}
.btn--default,
.btn--trigger::before {
font-weight: bold;
border: 2px solid;
text-align: center;
text-indent: 0;
display: inline-block;
line-height: 1.7;
}
.btn--trigger::before {
content: 'Press';
width: 4em;
height: 2em;
margin: 0 0.75em;
}
.btn--trigger:active::before {
content: 'Ouch!';
}
.btn--default {
padding: 0 1em;
margin: 2vh 0 0;
}
.btn--tool {
display: block;
margin: 1em 0;
font-size: 1.3em;
color: #8a7c78;
}
.btn--menu {
position: absolute;
top: 3vw;
right: 3vw;
color: #939391;
pointer-events: visible;
}
.btn--close {
position: absolute;
top: 3vw;
right: 3vw;
font-size: 0.75em;
z-index: 100;
}
/* Menu */
.menu {
position: fixed;
top: 0;
right: 0;
height: 100vh;
width: 320px;
z-index: 100;
}
.js .menu {
pointer-events: none;
}
.js .menu--open {
pointer-events: visible;
}
.menu .block-revealer__content {
height: 100%;
position: relative;
}
.menu__inner {
width: 100%;
height: 100%;
list-style-type: none;
padding: 20vh 3em;
margin: 0;
background: #fcf652;
overflow: hidden;
}
.menu__item {
padding: 0.15em;
}
.js .menu__item {
transform: translate3d(-13em,0,0);
transition: transform 0s 0.4s;
}
.btn--close {
opacity: 0;
}
.menu--open .menu__item,
.menu--open .btn--close {
opacity: 1;
transform: translate3d(0,0,0);
transition: opacity 0.4s 0.135s, transform 0.4s 0.135s cubic-bezier(0.645, 0.045, 0.355, 1);
}
.menu--open .menu__item:nth-child(2) {
transition-delay: 0.165s;
}
.menu--open .menu__item:nth-child(3) {
transition-delay: 0.195s;
}
.menu--open .menu__item:nth-child(4) {
transition-delay: 0.225s;
}
.menu .menu__link {
font-size: 3.5vw;
color: #4f4f54;
font-weight: bold;
}
.menu .menu__link:hover,
.menu .menu__link:focus {
outline: none;
color: #000;
text-decoration: line-through;
}
.box {
max-width: 600px;
width: 100%;
font-size: 1.25em;
margin: 16vh 0 0 0;
}
.box--small {
max-width: 500px;
}
.box--border {
padding: 2em 3em;
border: 2px solid;
}
.form {
width: 100%;
height: 100vh;
position: fixed;
top: 0;
left: 0;
z-index: 3;
}
.js .form {
pointer-events: none;
}
.js .form--open {
pointer-events: visible;
}
.form__inner {
position: absolute;
width: 100%;
height: 100%;
min-height: 400px;
top: 0;
left: 0;
padding: 5vw;
background: #fcf652;
}
.form__section {
font-size: 1.25em;
text-align: left;
padding: 0.5em 0;
width: 80%;
max-width: 380px;
margin: 0 auto;
}
.form__section--title {
font-size: 2.35vw;
padding: 0 0 1em;
line-height: 1.1;
max-width: 380px;
margin: 0 auto;
}
.js .form__section {
transform: translate3d(0,100px,0);
opacity: 0;
transition: transform 0s 0.6s, opacity 0s 0.6s;
}
.form--open .form__section {
opacity: 1;
transform: translate3d(0,0,0);
transition: transform 1s 0.3s, opacity 1s 0.3s;
transition-timing-function: cubic-bezier(0.2,1,0.2,1);
}
.form--open .form__section:nth-child(2) {
transition-delay: 0.35s;
}
.form--open .form__section:nth-child(3) {
transition-delay: 0.40s;
}
.form--open .form__section:nth-child(4) {
transition-delay: 0.45s;
}
.form--open .form__section:nth-child(5) {
transition-delay: 0.50s;
}
.form--open .form__section:nth-child(6) {
transition-delay: 0.55s;
}
.form__label {
display: block;
flex: none;
width: 100%;
padding: 0.25em 0;
}
.form__input {
padding: 1vh;
font-size: 1.5em;
}
.form__input,
.form__select {
width: 100%;
border: 2px solid;
font-weight: bold;
background: transparent;
}
.form__input:focus,
.form__select:focus {
outline: none;
}
.form__section--right {
text-align: right;
}
.modal {
position: fixed;
max-width: 500px;
width: 90%;
z-index: 100;
font-size: 1.1em;
pointer-events: none;
top: 50%;
left: 50%;
transform: translate3d(-50%,-50%,0);
}
.modal--open {
pointer-events: visible;
}
.modal__inner {
padding: 2.5em;
color: #fff;
background: #aaa;
}
.modal__title {
font-size: 1.5em;
margin: 0 0 1em 0;
}
.overlay {
position: fixed;
width: 100%;
height: 100vh;
top: 0;
left: 0;
background: rgba(0,0,0,0.5);
opacity: 0;
pointer-events: none;
transition: opacity 0.4s;
}
.modal--open + .overlay {
pointer-events: visible;
opacity: 1;
}
.dual {
width: 90vw;
margin: 8em auto 25vh;
position: relative;
}
.dual__inner {
position: absolute;
width: 100%;
height: 100%;
}
.dual__half {
position: absolute;
height: 100%;
width: 51%;
right: 0;
background: url(../img/4.jpg) no-repeat 50% 0%;
background-size: cover;
}
.dual__content {
position: relative;
width: 50%;
display: flex;
flex-direction: column;
justify-content: center;
min-height: 500px;
padding: 1.5em 2.5em;
font-size: 1.75em;
line-height: 1.4;
color: #8a7c78;
}
.js .dual__content {
opacity: 0;
pointer-events: none;
transform: translate3d(60px,0,0);
transition: opacity 0.55s 0.4s, transform 0.55s 0.4s;
}
.dual__content.dual__content--show {
opacity: 1;
pointer-events: visible;
transform: translate3d(0,0,0);
}
.author {
margin: 1em 0 0 0;
display: block;
font-size: 0.65em;
}
.media {
width: 90%;
max-width: 800px;
position: relative;
}
.media__inner {
display: block;
position: relative;
}
.media__image {
display: block;
max-width: 100%;
}
.media__toolbar {
position: absolute;
top: 0;
right: 0;
height: 100%;
padding: 2.15em;
display: flex;
overflow: hidden;
flex-direction: column;
justify-content: flex-end;
}
.js .btn--tool {
opacity: 0;
pointer-events: none;
transform: translate3d(200%,0,0);
transition: opacity 0.6s 0.28s, transform 0.6s 0.28s;
transition-timing-function: ease, cubic-bezier(0.785, 0.135, 0.15, 0.86);
}
.js .btn--tool:nth-child(2) {
transition-delay: 0.32s;
}
.js .btn--tool:nth-child(3) {
transition-delay: 0.34s;
}
.media__toolbar--show .btn--tool{
opacity: 1;
pointer-events: visible;
transform: translate3d(0,0,0);
}
/* Demo themes */
.demo-menu { background: #4f4f54; color: #828282; }
.demo-menu a { color: #c5c149; }
.demo-menu a:hover, .demo-menu a:focus { color: #fcf652; }
.demo-split { background: #ff784a; color: #fff; }
.js .demo-split.loading::before { background: #fff; }
.js .demo-split.loading::after { background: #ff784a; }
.demo-form { background: #767df9; color: #252527; }
.demo-form a:not(.pater) { color: #fff; }
.demo-form a:hover, .demo-form a:focus { color: #252527; }
.demo-modal { background: #6f6f6f; color: #bfbfbf; }
@media screen and (max-width: 56.250em) {
.btn--tool {
font-size: 3vw;
}
.media__toolbar {
padding: 3.5vw;
}
}
@media screen and (max-width: 50em) {
.codrops-header__title,
.codrops-header__tagline {
width: 100%;
padding: 1em 0 0;
}
.demo {
font-size: 1em;
margin: 0.5em 1em 0.25em 0;
}
.content--full {
min-height: 0;
}
.content__title {
font-size: 2em;
}
.content__title--medium {
font-size: 1.75em;
}
.content__title--small {
font-size: 1.15em;
}
.content__title--half {
width: 100%;
}
.content__title--right {
padding: 0 5vw;
}
.content__title--left {
margin-left: 0;
}
.flexy--row {
flex-direction: column;
}
.form__title,
.form__section {
font-size: 0.85em;
}
.triggers {
width: 100%;
border: 0;
padding: 0;
font-size: 0.85em;
}
.modal {
font-size: 0.85em;
}
.menu .menu__link {
font-size: 2em;
}
.box {
padding: 5vw;
font-size: 0.85em;
}
.box--border {
border: none;
}
.dual__content {
min-height: 200px;
font-size: 0.85em;
padding: 1em;
}
.dual {
margin-top: 0;
}
}
| {
"pile_set_name": "Github"
} |
#ifndef CAFFE_CROP_LAYER_HPP_
#define CAFFE_CROP_LAYER_HPP_
#include <utility>
#include <vector>
#include "caffe/blob.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"
namespace caffe {
/**
* @brief Takes a Blob and crop it, to the shape specified by the second input
* Blob, across all dimensions after the specified axis.
*
* TODO(dox): thorough documentation for Forward, Backward, and proto params.
*/
template <typename Dtype>
class CropLayer : public Layer<Dtype> {
public:
explicit CropLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual inline const char* type() const { return "Crop"; }
virtual inline int ExactNumBottomBlobs() const { return 2; }
virtual inline int ExactNumTopBlobs() const { return 1; }
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
Blob<int> offsets;
Blob<int> src_strides_;
Blob<int> dest_strides_;
private:
// Recursive copy function.
void crop_copy(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top,
const int* offsets,
vector<int> indices,
int cur_dim,
const Dtype* src_data,
Dtype* dest_data,
bool is_forward);
// Recursive copy function: this is similar to crop_copy() but loops over all
// but the last two dimensions to allow for ND cropping while still relying on
// a CUDA kernel for the innermost two dimensions for performance reasons. An
// alterantive implementation could rely on the kernel more by passing
// offsets, but this is problematic because of its variable length.
// Since in the standard (N,C,W,H) case N,C are usually not cropped a speedup
// could be achieved by not looping the application of the copy_kernel around
// these dimensions.
void crop_copy_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top,
const vector<int>& offsets,
vector<int> indices,
int cur_dim,
const Dtype* src_data,
Dtype* dest_data,
bool is_forward);
};
} // namespace caffe
#endif // CAFFE_CROP_LAYER_HPP_
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.resource.address.wsdraft;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static java.util.Arrays.asList;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.kaazing.gateway.resource.address.ResourceAddress.NEXT_PROTOCOL;
import static org.kaazing.gateway.resource.address.ResourceAddress.QUALIFIER;
import static org.kaazing.gateway.resource.address.ResourceAddress.TRANSPORT;
import static org.kaazing.gateway.resource.address.ResourceAddress.TRANSPORT_URI;
import static org.kaazing.gateway.resource.address.ws.WsResourceAddress.CODEC_REQUIRED;
import static org.kaazing.gateway.resource.address.ws.WsResourceAddress.INACTIVITY_TIMEOUT;
import static org.kaazing.gateway.resource.address.ws.WsResourceAddress.LIGHTWEIGHT;
import static org.kaazing.gateway.resource.address.ws.WsResourceAddress.MAX_MESSAGE_SIZE;
import static org.kaazing.gateway.resource.address.ws.WsResourceAddress.REQUIRED_PROTOCOLS;
import static org.kaazing.gateway.resource.address.ws.WsResourceAddress.SUPPORTED_PROTOCOLS;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.kaazing.gateway.resource.address.ResourceAddress;
public class WsDraftSslResourceAddressFactorySpiTest {
private WsDraftSslResourceAddressFactorySpi addressFactorySpi;
private String addressURI;
private Map<String, Object> options;
@Before
public void before() {
addressFactorySpi = new WsDraftSslResourceAddressFactorySpi();
addressURI = "ws-draft+ssl://localhost:2020/";
options = new HashMap<>();
options.put("ws.nextProtocol", "custom");
options.put("ws.qualifier", "random");
options.put("ws.codecRequired", FALSE);
options.put("ws.lightweight", TRUE);
options.put("ws.extensions", asList("x-kaazing-alpha", "x-kaazing-beta"));
options.put("ws.maxMessageSize", 1024);
options.put("ws.inactivityTimeout", SECONDS.toMillis(5));
options.put("ws.supportedProtocols", new String[] { "amqp/0.91", "amqp/1.0" });
options.put("ws.requiredProtocols", new String[] { "amqp/0.91", "amqp/1.0" });
options.put("ws.transport", "https://localhost:2121/");
}
@Test
public void shouldHaveWsDraft75SslSchemeName() throws Exception {
assertEquals("ws-draft+ssl", addressFactorySpi.getSchemeName());
}
@Test (expected = IllegalArgumentException.class)
public void shouldRequireWsDraft75SchemeName() throws Exception {
addressFactorySpi.newResourceAddress("test://opaque");
}
@Test (expected = IllegalArgumentException.class)
public void shouldRequireExplicitPath() throws Exception {
addressFactorySpi.newResourceAddress("ws-draft+ssl://localhost:443");
}
@Test
public void shouldNotRequireExplicitPort() throws Exception {
ResourceAddress address = addressFactorySpi.newResourceAddress("ws-draft+ssl://localhost/");
URI location = address.getResource();
assertEquals(location.getPort(), 443);
}
@Test
public void shouldCreateAddressWithDefaultOptions() throws Exception {
ResourceAddress address = addressFactorySpi.newResourceAddress(addressURI);
assertNull(address.getOption(NEXT_PROTOCOL));
assertNull(address.getOption(QUALIFIER));
assertNull(address.getOption(TRANSPORT));
assertTrue(address.getOption(CODEC_REQUIRED));
assertFalse(address.getOption(LIGHTWEIGHT));
assertEquals(0, address.getOption(MAX_MESSAGE_SIZE).intValue());
assertEquals(0L, address.getOption(INACTIVITY_TIMEOUT).longValue());
assertEmpty(address.getOption(SUPPORTED_PROTOCOLS));
assertEmpty(address.getOption(REQUIRED_PROTOCOLS));
}
@Test
public void shouldCreateAddressWithOptions() {
ResourceAddress address = addressFactorySpi.newResourceAddress(addressURI, options);
assertEquals("custom", address.getOption(NEXT_PROTOCOL));
assertEquals("random", address.getOption(QUALIFIER));
assertNull(address.getOption(TRANSPORT));
assertFalse(address.getOption(CODEC_REQUIRED));
assertTrue(address.getOption(LIGHTWEIGHT));
assertEquals(1024, address.getOption(MAX_MESSAGE_SIZE).intValue());
assertEquals(SECONDS.toMillis(5), address.getOption(INACTIVITY_TIMEOUT).longValue());
assertArrayEquals(new String[] { "amqp/0.91", "amqp/1.0" }, address.getOption(SUPPORTED_PROTOCOLS));
assertArrayEquals(new String[] { "amqp/0.91", "amqp/1.0" }, address.getOption(REQUIRED_PROTOCOLS));
}
@Test
public void shouldCreateAddressWithDefaultTransport() throws Exception {
ResourceAddress address = addressFactorySpi.newResourceAddress(addressURI);
assertNotNull(address.getOption(TRANSPORT_URI));
assertEquals("https://localhost:2020/", address.getOption(TRANSPORT_URI));
}
@Test
public void shouldCreateAddressWithTransport() throws Exception {
ResourceAddress address = addressFactorySpi.newResourceAddress(addressURI, options);
assertNotNull(address.getOption(TRANSPORT_URI));
assertEquals("https://localhost:2121/", address.getOption(TRANSPORT_URI));
}
private void assertEmpty(String[] objects) {
if (objects != null) {
assertEquals(0, objects.length);
}
}
}
| {
"pile_set_name": "Github"
} |
{
"id" : "3cb8ad8e-bf64-3779-b249-1c1ca33c783a",
"request" : {
"url" : "/rest/api/1.0/projects/TESTP/repos/empty-repo-test/branches/default",
"method" : "GET"
},
"response" : {
"status" : 404,
"bodyFileName" : "body-branches-default-C98AD.json",
"headers" : {
"X-AREQUESTID" : "@190KU8Bx1085x620x0",
"X-ASEN" : "SEN-L9817337",
"X-AUSERID" : "1",
"X-AUSERNAME" : "vivek",
"Cache-Control" : "no-cache, no-transform",
"Vary" : "X-AUSERNAME,Accept-Encoding",
"Transfer-Encoding" : "chunked",
"X-Content-Type-Options" : "nosniff",
"Content-Type" : "application/json;charset=UTF-8",
"Date" : "Wed, 21 Jun 2017 18:05:02 GMT"
}
},
"uuid" : "3cb8ad8e-bf64-3779-b249-1c1ca33c783a"
} | {
"pile_set_name": "Github"
} |
{
"created_at": "2015-02-27T22:28:24.689435",
"description": "Asset pipeline for the Harp Web Server.",
"fork": false,
"full_name": "sintaxi/terraform",
"language": "JavaScript",
"updated_at": "2015-02-27T23:42:38.208749"
} | {
"pile_set_name": "Github"
} |
{
"info" : {
"version" : 1,
"author" : "xcode"
},
"colors" : [
{
"idiom" : "universal",
"color" : {
"color-space" : "srgb",
"components" : {
"red" : "255",
"alpha" : "1.000",
"blue" : "255",
"green" : "255"
}
}
},
{
"idiom" : "universal",
"appearances" : [
{
"appearance" : "luminosity",
"value" : "dark"
}
],
"color" : {
"color-space" : "srgb",
"components" : {
"red" : "40",
"alpha" : "1.000",
"blue" : "74",
"green" : "54"
}
}
}
]
} | {
"pile_set_name": "Github"
} |
/**
@defgroup boards_nucleo-f303re STM32 Nucleo-F303RE
@ingroup boards_common_nucleo64
@brief Support for the STM32 Nucleo-F303RE
## Overview
The Nucleo-F303RE is a board from ST's Nucleo family supporting a ARM Cortex-M4
STM32F303RE
microcontroller with 64Kb of RAM and 512Kb of ROM.
## Hardware


### MCU
| MCU | STM32F303RE |
|:---------- |:----------------- |
| Family | ARM Cortex-M4 |
| Vendor | ST Microelectronics |
| RAM | 64Kb |
| Flash | 512Kb |
| Frequency | up to 72MHz |
| FPU | yes |
| Timers | 13 (9x 16-bit, 1x 32-bit [TIM2], 1x Systick, 2x watchdog) |
| ADCs | 4x 12-bit (22 channels) |
| UARTs | 5 |
| SPIs | 4 |
| I2Cs | 3 |
| RTC | 1 |
| CAN | 1 |
| USB | 1 |
| Vcc | 2.0V - 3.6V |
| Datasheet | [Datasheet](https://www.st.com/resource/en/datasheet/stm32f303re.pdf) |
| Reference Manual | [Reference Manual](http://www.st.com/web/en/resource/technical/document/reference_manual/DM00043574.pdf) |
| Programming Manual | [Programming Manual](http://www.st.com/web/en/resource/technical/document/programming_manual/DM00046982.pdf) |
| Board Manual | [Board Manual](http://www.st.com/st-web-ui/static/active/en/resource/technical/document/user_manual/DM00105823.pdf) |
## Implementation Status
| Device | ID | Supported | Comments |
|:------------- |:------------- |:------------- |:------------- |
| MCU | STM32F303RE | partly | Energy saving modes not fully utilized |
| Low-level driver | GPIO | yes | |
| | PWM | one PWM | |
| | UART | three UART | |
| | I2C | two I2C | |
| | SPI | two SPI | |
| | USB | no | |
| | Timer | one 32 timer | |
## Flashing the device
The ST Nucleo-F303RE board includes an on-board ST-LINK V2 programmer.
The easiest way to program the board is to use OpenOCD. Once you have
installed OpenOCD (look [here](https://github.com/RIOT-OS/RIOT/wiki/OpenOCD)
for installation instructions), you can flash the board simply by typing.
```
BOARD=nucleo-f303re make flash
```
and debug via GDB by simply typing
```
BOARD=nucleo-f303re make debug
```
## Supported Toolchains
For using the ST Nucleo-F303RE board we strongly recommend the usage of the
[GNU Tools for ARM Embedded Processors](https://launchpad.net/gcc-arm-embedded)
toolchain.
*/
| {
"pile_set_name": "Github"
} |
var jsp = require("./parse-js"),
pro = require("./process"),
slice = jsp.slice,
member = jsp.member,
PRECEDENCE = jsp.PRECEDENCE,
OPERATORS = jsp.OPERATORS;
function ast_squeeze_more(ast) {
var w = pro.ast_walker(), walk = w.walk;
return w.with_walkers({
"call": function(expr, args) {
if (expr[0] == "dot" && expr[2] == "toString" && args.length == 0) {
// foo.toString() ==> foo+""
return [ "binary", "+", expr[1], [ "string", "" ]];
}
}
}, function() {
return walk(ast);
});
};
exports.ast_squeeze_more = ast_squeeze_more;
| {
"pile_set_name": "Github"
} |
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../../../shared/process/exec', __FILE__)
describe "Process.exec" do
it_behaves_like :process_exec, :exec, Process
end
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
#include "tensorflow/contrib/tensor_forest/kernels/v4/params.h"
#include "tensorflow/contrib/tensor_forest/proto/tensor_forest_params.pb.h"
#include "tensorflow/core/platform/test.h"
namespace {
using tensorflow::tensorforest::DepthDependentParam;
using tensorflow::tensorforest::ResolveParam;
TEST(ParamsTest, TestConstant) {
DepthDependentParam param;
param.set_constant_value(10.0);
ASSERT_EQ(ResolveParam(param, 0), 10.0);
ASSERT_EQ(ResolveParam(param, 100), 10.0);
}
TEST(ParamsTest, TestLinear) {
DepthDependentParam param;
auto* linear = param.mutable_linear();
linear->set_y_intercept(100.0);
linear->set_slope(-10.0);
linear->set_min_val(23.0);
linear->set_max_val(90.0);
ASSERT_EQ(ResolveParam(param, 0), 90);
ASSERT_EQ(ResolveParam(param, 1), 90);
ASSERT_EQ(ResolveParam(param, 2), 80);
ASSERT_EQ(ResolveParam(param, 30), 23);
}
TEST(ParamsTest, TestExponential) {
DepthDependentParam param;
auto* expo = param.mutable_exponential();
expo->set_bias(100.0);
expo->set_base(10.0);
expo->set_multiplier(-1.0);
expo->set_depth_multiplier(1.0);
ASSERT_EQ(ResolveParam(param, 0), 99);
ASSERT_EQ(ResolveParam(param, 1), 90);
ASSERT_EQ(ResolveParam(param, 2), 0);
}
TEST(ParamsTest, TestThreshold) {
DepthDependentParam param;
auto* threshold = param.mutable_threshold();
threshold->set_on_value(100.0);
threshold->set_off_value(10.0);
threshold->set_threshold(5.0);
ASSERT_EQ(ResolveParam(param, 0), 10);
ASSERT_EQ(ResolveParam(param, 4), 10);
ASSERT_EQ(ResolveParam(param, 5), 100);
ASSERT_EQ(ResolveParam(param, 6), 100);
}
} // namespace
| {
"pile_set_name": "Github"
} |
/*
* This file is part of the SDWebImage package.
* (c) Olivier Poitrey <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
#import "SDWebImageCompat.h"
#import "SDWebImageManager.h"
/**
* Integrates SDWebImage async downloading and caching of remote images with UIImageView.
*
* Usage with a UITableViewCell sub-class:
*
* @code
#import <SDWebImage/UIImageView+WebCache.h>
...
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath
{
static NSString *MyIdentifier = @"MyIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:MyIdentifier];
if (cell == nil) {
cell = [[[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:MyIdentifier]
autorelease];
}
// Here we use the provided sd_setImageWithURL: method to load the web image
// Ensure you use a placeholder image otherwise cells will be initialized with no image
[cell.imageView sd_setImageWithURL:[NSURL URLWithString:@"http://example.com/image.jpg"]
placeholderImage:[UIImage imageNamed:@"placeholder"]];
cell.textLabel.text = @"My Text";
return cell;
}
* @endcode
*/
@interface UIImageView (WebCache)
/**
* Get the current image URL.
*
* Note that because of the limitations of categories this property can get out of sync
* if you use sd_setImage: directly.
*/
- (NSURL *)sd_imageURL;
/**
* Set the imageView `image` with an `url`.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
*/
- (void)sd_setImageWithURL:(NSURL *)url;
/**
* Set the imageView `image` with an `url` and a placeholder.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @see sd_setImageWithURL:placeholderImage:options:
*/
- (void)sd_setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
*/
- (void)sd_setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options;
/**
* Set the imageView `image` with an `url`.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrived from the local cache of from the network.
* The forth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(NSURL *)url completed:(SDWebImageCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrived from the local cache of from the network.
* The forth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder completed:(SDWebImageCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrived from the local cache of from the network.
* The forth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options completed:(SDWebImageCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url`, placeholder and custom options.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param progressBlock A block called while image is downloading
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrived from the local cache of from the network.
* The forth parameter is the original image url.
*/
- (void)sd_setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options progress:(SDWebImageDownloaderProgressBlock)progressBlock completed:(SDWebImageCompletionBlock)completedBlock;
/**
* Set the imageView `image` with an `url` and a optionaly placeholder image.
*
* The download is asynchronous and cached.
*
* @param url The url for the image.
* @param placeholder The image to be set initially, until the image request finishes.
* @param options The options to use when downloading the image. @see SDWebImageOptions for the possible values.
* @param progressBlock A block called while image is downloading
* @param completedBlock A block called when operation has been completed. This block has no return value
* and takes the requested UIImage as first parameter. In case of error the image parameter
* is nil and the second parameter may contain an NSError. The third parameter is a Boolean
* indicating if the image was retrived from the local cache of from the network.
* The forth parameter is the original image url.
*/
- (void)sd_setImageWithPreviousCachedImageWithURL:(NSURL *)url andPlaceholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options progress:(SDWebImageDownloaderProgressBlock)progressBlock completed:(SDWebImageCompletionBlock)completedBlock;
/**
* Download an array of images and starts them in an animation loop
*
* @param arrayOfURLs An array of NSURL
*/
- (void)sd_setAnimationImagesWithURLs:(NSArray *)arrayOfURLs;
/**
* Cancel the current download
*/
- (void)sd_cancelCurrentImageLoad;
- (void)sd_cancelCurrentAnimationImagesLoad;
@end
@interface UIImageView (WebCacheDeprecated)
- (NSURL *)imageURL __deprecated_msg("Use `sd_imageURL`");
- (void)setImageWithURL:(NSURL *)url __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:`");
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:placeholderImage:`");
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:placeholderImage:options`");
- (void)setImageWithURL:(NSURL *)url completed:(SDWebImageCompletedBlock)completedBlock __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:completed:`");
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder completed:(SDWebImageCompletedBlock)completedBlock __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:placeholderImage:completed:`");
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options completed:(SDWebImageCompletedBlock)completedBlock __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:placeholderImage:options:completed:`");
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder options:(SDWebImageOptions)options progress:(SDWebImageDownloaderProgressBlock)progressBlock completed:(SDWebImageCompletedBlock)completedBlock __deprecated_msg("Method deprecated. Use `sd_setImageWithURL:placeholderImage:options:progress:completed:`");
- (void)setAnimationImagesWithURLs:(NSArray *)arrayOfURLs __deprecated_msg("Use `sd_setAnimationImagesWithURLs:`");
- (void)cancelCurrentArrayLoad __deprecated_msg("Use `sd_cancelCurrentAnimationImagesLoad`");
- (void)cancelCurrentImageLoad __deprecated_msg("Use `sd_cancelCurrentImageLoad`");
@end
| {
"pile_set_name": "Github"
} |
<?php namespace Xethron\MigrationsGenerator\Syntax;
/**
* Class AddToTable
* @package Xethron\MigrationsGenerator\Syntax
*/
class AddToTable extends Table {
/**
* Return string for adding a column
*
* @param array $field
* @return string
*/
protected function getItem(array $field)
{
$property = $field['field'];
// If the field is an array,
// make it an array in the Migration
if (is_array($property)) {
$property = "['". implode("','", $property) ."']";
} else {
$property = $property ? "'$property'" : null;
}
$type = $field['type'];
$output = sprintf(
"\$table->%s(%s)",
$type,
$property
);
// If we have args, then it needs
// to be formatted a bit differently
if (isset($field['args'])) {
$output = sprintf(
"\$table->%s(%s, %s)",
$type,
$property,
$field['args']
);
}
if (isset($field['decorators'])) {
$output .= $this->addDecorators( $field['decorators'] );
}
return $output . ';';
}
}
| {
"pile_set_name": "Github"
} |
#include "mbed.h"
#include "test_env.h"
void ticker_callback_1(void);
void ticker_callback_2(void);
DigitalOut led0(LED1);
DigitalOut led1(LED2);
Ticker ticker;
void print_char(char c = '*')
{
printf("%c", c);
fflush(stdout);
}
void ticker_callback_2(void)
{
ticker.detach();
ticker.attach(ticker_callback_1, 1.0);
led1 = !led1;
print_char();
}
void ticker_callback_1(void)
{
ticker.detach();
ticker.attach(ticker_callback_2, 1.0);
led0 = !led0;
print_char();
}
int main(void)
{
MBED_HOSTTEST_TIMEOUT(15);
MBED_HOSTTEST_SELECT(wait_us_auto);
MBED_HOSTTEST_DESCRIPTION(Ticker Two callbacks);
MBED_HOSTTEST_START("MBED_34");
ticker.attach(ticker_callback_1, 1.0);
while(1);
}
| {
"pile_set_name": "Github"
} |
//////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// File: d3dx9shader.h
// Content: D3DX Shader APIs
//
//////////////////////////////////////////////////////////////////////////////
#include "d3dx9.h"
#ifndef __D3DX9SHADER_H__
#define __D3DX9SHADER_H__
//---------------------------------------------------------------------------
// D3DXTX_VERSION:
// --------------
// Version token used to create a procedural texture filler in effects
// Used by D3DXFill[]TX functions
//---------------------------------------------------------------------------
#define D3DXTX_VERSION(_Major,_Minor) (('T' << 24) | ('X' << 16) | ((_Major) << 8) | (_Minor))
//----------------------------------------------------------------------------
// D3DXSHADER flags:
// -----------------
// D3DXSHADER_DEBUG
// Insert debug file/line/type/symbol information.
//
// D3DXSHADER_SKIPVALIDATION
// Do not validate the generated code against known capabilities and
// constraints. This option is only recommended when compiling shaders
// you KNOW will work. (ie. have compiled before without this option.)
// Shaders are always validated by D3D before they are set to the device.
//
// D3DXSHADER_SKIPOPTIMIZATION (valid for D3DXCompileShader calls only)
// Instructs the compiler to skip optimization steps during code generation.
// Unless you are trying to isolate a problem in your code, and suspect the
// compiler, using this option is not recommended.
//
// D3DXSHADER_PACKMATRIX_ROWMAJOR
// Unless explicitly specified, matrices will be packed in row-major order
// on input and output from the shader.
//
// D3DXSHADER_PACKMATRIX_COLUMNMAJOR
// Unless explicitly specified, matrices will be packed in column-major
// order on input and output from the shader. This is generally more
// efficient, since it allows vector-matrix multiplication to be performed
// using a series of dot-products.
//----------------------------------------------------------------------------
#define D3DXSHADER_DEBUG (1 << 0)
#define D3DXSHADER_SKIPVALIDATION (1 << 2)
#define D3DXSHADER_SKIPOPTIMIZATION (1 << 3)
#define D3DXSHADER_PACKMATRIX_ROWMAJOR (1 << 4)
#define D3DXSHADER_PACKMATRIX_COLUMNMAJOR (1 << 5)
//----------------------------------------------------------------------------
// D3DXHANDLE:
// -----------
// Handle values used to efficiently reference shader and effect parameters.
// Strings can be used as handles. However, handles are not always strings.
//----------------------------------------------------------------------------
typedef LPCSTR D3DXHANDLE;
typedef D3DXHANDLE *LPD3DXHANDLE;
//----------------------------------------------------------------------------
// D3DXMACRO:
// ----------
// Preprocessor macro definition. The application pass in a NULL-terminated
// array of this structure to various D3DX APIs. This enables the application
// to #define tokens at runtime, before the file is parsed.
//----------------------------------------------------------------------------
typedef struct _D3DXMACRO
{
LPCSTR Name;
LPCSTR Definition;
} D3DXMACRO, *LPD3DXMACRO;
//----------------------------------------------------------------------------
// D3DXSEMANTIC:
//----------------------------------------------------------------------------
typedef struct _D3DXSEMANTIC
{
UINT Usage;
UINT UsageIndex;
} D3DXSEMANTIC, *LPD3DXSEMANTIC;
//----------------------------------------------------------------------------
// D3DXFRAGMENT_DESC:
//----------------------------------------------------------------------------
typedef struct _D3DXFRAGMENT_DESC
{
LPCSTR Name;
DWORD Target;
} D3DXFRAGMENT_DESC, *LPD3DXFRAGMENT_DESC;
//----------------------------------------------------------------------------
// D3DXREGISTER_SET:
//----------------------------------------------------------------------------
typedef enum _D3DXREGISTER_SET
{
D3DXRS_BOOL,
D3DXRS_INT4,
D3DXRS_FLOAT4,
D3DXRS_SAMPLER,
// force 32-bit size enum
D3DXRS_FORCE_DWORD = 0x7fffffff
} D3DXREGISTER_SET, *LPD3DXREGISTER_SET;
//----------------------------------------------------------------------------
// D3DXPARAMETER_CLASS:
//----------------------------------------------------------------------------
typedef enum _D3DXPARAMETER_CLASS
{
D3DXPC_SCALAR,
D3DXPC_VECTOR,
D3DXPC_MATRIX_ROWS,
D3DXPC_MATRIX_COLUMNS,
D3DXPC_OBJECT,
D3DXPC_STRUCT,
// force 32-bit size enum
D3DXPC_FORCE_DWORD = 0x7fffffff
} D3DXPARAMETER_CLASS, *LPD3DXPARAMETER_CLASS;
//----------------------------------------------------------------------------
// D3DXPARAMETER_TYPE:
//----------------------------------------------------------------------------
typedef enum _D3DXPARAMETER_TYPE
{
D3DXPT_VOID,
D3DXPT_BOOL,
D3DXPT_INT,
D3DXPT_FLOAT,
D3DXPT_STRING,
D3DXPT_TEXTURE,
D3DXPT_TEXTURE1D,
D3DXPT_TEXTURE2D,
D3DXPT_TEXTURE3D,
D3DXPT_TEXTURECUBE,
D3DXPT_SAMPLER,
D3DXPT_SAMPLER1D,
D3DXPT_SAMPLER2D,
D3DXPT_SAMPLER3D,
D3DXPT_SAMPLERCUBE,
D3DXPT_PIXELSHADER,
D3DXPT_VERTEXSHADER,
D3DXPT_PIXELFRAGMENT,
D3DXPT_VERTEXFRAGMENT,
// force 32-bit size enum
D3DXPT_FORCE_DWORD = 0x7fffffff
} D3DXPARAMETER_TYPE, *LPD3DXPARAMETER_TYPE;
//----------------------------------------------------------------------------
// D3DXCONSTANTTABLE_DESC:
//----------------------------------------------------------------------------
typedef struct _D3DXCONSTANTTABLE_DESC
{
LPCSTR Creator; // Creator string
DWORD Version; // Shader version
UINT Constants; // Number of constants
} D3DXCONSTANTTABLE_DESC, *LPD3DXCONSTANTTABLE_DESC;
//----------------------------------------------------------------------------
// D3DXCONSTANT_DESC:
//----------------------------------------------------------------------------
typedef struct _D3DXCONSTANT_DESC
{
LPCSTR Name; // Constant name
D3DXREGISTER_SET RegisterSet; // Register set
UINT RegisterIndex; // Register index
UINT RegisterCount; // Number of registers occupied
D3DXPARAMETER_CLASS Class; // Class
D3DXPARAMETER_TYPE Type; // Component type
UINT Rows; // Number of rows
UINT Columns; // Number of columns
UINT Elements; // Number of array elements
UINT StructMembers; // Number of structure member sub-parameters
UINT Bytes; // Data size, in bytes
LPCVOID DefaultValue; // Pointer to default value
} D3DXCONSTANT_DESC, *LPD3DXCONSTANT_DESC;
//----------------------------------------------------------------------------
// ID3DXConstantTable:
//----------------------------------------------------------------------------
typedef interface ID3DXConstantTable ID3DXConstantTable;
typedef interface ID3DXConstantTable *LPD3DXCONSTANTTABLE;
// {9DCA3190-38B9-4fc3-92E3-39C6DDFB358B}
DEFINE_GUID( IID_ID3DXConstantTable,
0x9dca3190, 0x38b9, 0x4fc3, 0x92, 0xe3, 0x39, 0xc6, 0xdd, 0xfb, 0x35, 0x8b);
#undef INTERFACE
#define INTERFACE ID3DXConstantTable
DECLARE_INTERFACE_(ID3DXConstantTable, ID3DXBuffer)
{
// IUnknown
STDMETHOD(QueryInterface)(THIS_ REFIID iid, LPVOID *ppv) PURE;
STDMETHOD_(ULONG, AddRef)(THIS) PURE;
STDMETHOD_(ULONG, Release)(THIS) PURE;
// ID3DXBuffer
STDMETHOD_(LPVOID, GetBufferPointer)(THIS) PURE;
STDMETHOD_(DWORD, GetBufferSize)(THIS) PURE;
// Descs
STDMETHOD(GetDesc)(THIS_ D3DXCONSTANTTABLE_DESC *pDesc) PURE;
STDMETHOD(GetConstantDesc)(THIS_ D3DXHANDLE hConstant, D3DXCONSTANT_DESC *pConstantDesc, UINT *pCount) PURE;
// Handle operations
STDMETHOD_(D3DXHANDLE, GetConstant)(THIS_ D3DXHANDLE hConstant, UINT Index) PURE;
STDMETHOD_(D3DXHANDLE, GetConstantByName)(THIS_ D3DXHANDLE hConstant, LPCSTR pName) PURE;
STDMETHOD_(D3DXHANDLE, GetConstantElement)(THIS_ D3DXHANDLE hConstant, UINT Index) PURE;
// Set Constants
STDMETHOD(SetDefaults)(THIS_ LPDIRECT3DDEVICE9 pDevice) PURE;
STDMETHOD(SetValue)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, LPCVOID pData, UINT Bytes) PURE;
STDMETHOD(SetBool)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, BOOL b) PURE;
STDMETHOD(SetBoolArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST BOOL* pb, UINT Count) PURE;
STDMETHOD(SetInt)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, INT n) PURE;
STDMETHOD(SetIntArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST INT* pn, UINT Count) PURE;
STDMETHOD(SetFloat)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, FLOAT f) PURE;
STDMETHOD(SetFloatArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST FLOAT* pf, UINT Count) PURE;
STDMETHOD(SetVector)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXVECTOR4* pVector) PURE;
STDMETHOD(SetVectorArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXVECTOR4* pVector, UINT Count) PURE;
STDMETHOD(SetMatrix)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXMATRIX* pMatrix) PURE;
STDMETHOD(SetMatrixArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXMATRIX* pMatrix, UINT Count) PURE;
STDMETHOD(SetMatrixPointerArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXMATRIX** ppMatrix, UINT Count) PURE;
STDMETHOD(SetMatrixTranspose)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXMATRIX* pMatrix) PURE;
STDMETHOD(SetMatrixTransposeArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXMATRIX* pMatrix, UINT Count) PURE;
STDMETHOD(SetMatrixTransposePointerArray)(THIS_ LPDIRECT3DDEVICE9 pDevice, D3DXHANDLE hConstant, CONST D3DXMATRIX** ppMatrix, UINT Count) PURE;
};
//----------------------------------------------------------------------------
// ID3DXFragmentLinker
//----------------------------------------------------------------------------
#undef INTERFACE
#define INTERFACE ID3DXFragmentLinker
// {D59D3777-C973-4a3c-B4B0-2A62CD3D8B40}
DEFINE_GUID(IID_ID3DXFragmentLinker,
0xd59d3777, 0xc973, 0x4a3c, 0xb4, 0xb0, 0x2a, 0x62, 0xcd, 0x3d, 0x8b, 0x40);
DECLARE_INTERFACE_(ID3DXFragmentLinker, IUnknown)
{
// IUnknown
STDMETHOD(QueryInterface)(THIS_ REFIID iid, LPVOID *ppv) PURE;
STDMETHOD_(ULONG, AddRef)(THIS) PURE;
STDMETHOD_(ULONG, Release)(THIS) PURE;
// ID3DXFragmentLinker
// fragment access and information retrieval functions
STDMETHOD(GetDevice)(THIS_ LPDIRECT3DDEVICE9* ppDevice) PURE;
STDMETHOD_(UINT, GetNumberOfFragments)(THIS) PURE;
STDMETHOD_(D3DXHANDLE, GetFragmentHandleByIndex)(THIS_ UINT Index) PURE;
STDMETHOD_(D3DXHANDLE, GetFragmentHandleByName)(THIS_ LPCSTR Name) PURE;
STDMETHOD(GetFragmentDesc)(THIS_ D3DXHANDLE Name, LPD3DXFRAGMENT_DESC FragDesc) PURE;
// add the fragments in the buffer to the linker
STDMETHOD(AddFragments)(THIS_ CONST DWORD *Fragments) PURE;
// Create a buffer containing the fragments. Suitable for saving to disk
STDMETHOD(GetAllFragments)(THIS_ LPD3DXBUFFER *ppBuffer) PURE;
STDMETHOD(GetFragment)(THIS_ D3DXHANDLE Name, LPD3DXBUFFER *ppBuffer) PURE;
STDMETHOD(LinkShader)(THIS_ LPCSTR pTarget, DWORD Flags, LPD3DXHANDLE rgFragmentHandles, UINT cFragments, LPD3DXBUFFER *ppBuffer, LPD3DXBUFFER *ppErrorMsgs) PURE;
STDMETHOD(LinkVertexShader)(THIS_ LPCSTR pTarget, DWORD Flags, LPD3DXHANDLE rgFragmentHandles, UINT cFragments, LPDIRECT3DVERTEXSHADER9 *pVShader, LPD3DXBUFFER *ppErrorMsgs) PURE;
STDMETHOD(ClearCache)(THIS) PURE;
};
//----------------------------------------------------------------------------
// D3DXINCLUDE_TYPE:
//----------------------------------------------------------------------------
typedef enum _D3DXINCLUDE_TYPE
{
D3DXINC_LOCAL,
D3DXINC_SYSTEM,
// force 32-bit size enum
D3DXINC_FORCE_DWORD = 0x7fffffff
} D3DXINCLUDE_TYPE, *LPD3DXINCLUDE_TYPE;
//----------------------------------------------------------------------------
// ID3DXInclude:
// -------------
// This interface is intended to be implemented by the application, and can
// be used by various D3DX APIs. This enables application-specific handling
// of #include directives in source files.
//
// Open()
// Opens an include file. If successful, it should fill in ppData and
// pBytes. The data pointer returned must remain valid until Close is
// subsequently called.
// Close()
// Closes an include file. If Open was successful, Close is guaranteed
// to be called before the API using this interface returns.
//----------------------------------------------------------------------------
typedef interface ID3DXInclude ID3DXInclude;
typedef interface ID3DXInclude *LPD3DXINCLUDE;
#undef INTERFACE
#define INTERFACE ID3DXInclude
DECLARE_INTERFACE(ID3DXInclude)
{
STDMETHOD(Open)(D3DXINCLUDE_TYPE IncludeType, LPCSTR pFileName, LPCVOID pParentData, LPCVOID *ppData, UINT *pBytes) PURE;
STDMETHOD(Close)(LPCVOID pData) PURE;
};
//////////////////////////////////////////////////////////////////////////////
// APIs //////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
#ifdef __cplusplus
extern "C" {
#endif //__cplusplus
//----------------------------------------------------------------------------
// D3DXAssembleShader:
// -------------------
// Assembles a shader.
//
// Parameters:
// pSrcFile
// Source file name
// hSrcModule
// Module handle. if NULL, current module will be used
// pSrcResource
// Resource name in module
// pSrcData
// Pointer to source code
// SrcDataLen
// Size of source code, in bytes
// pDefines
// Optional NULL-terminated array of preprocessor macro definitions.
// pInclude
// Optional interface pointer to use for handling #include directives.
// If this parameter is NULL, #includes will be honored when assembling
// from file, and will error when assembling from resource or memory.
// Flags
// See D3DXSHADER_xxx flags
// ppShader
// Returns a buffer containing the created shader. This buffer contains
// the assembled shader code, as well as any embedded debug info.
// (See D3DXGetShaderDebugInfo)
// ppErrorMsgs
// Returns a buffer containing a listing of errors and warnings that were
// encountered during assembly. If you are running in a debugger,
// these are the same messages you will see in your debug output.
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXAssembleShaderFromFileA(
LPCSTR pSrcFile,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
HRESULT WINAPI
D3DXAssembleShaderFromFileW(
LPCWSTR pSrcFile,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
#ifdef UNICODE
#define D3DXAssembleShaderFromFile D3DXAssembleShaderFromFileW
#else
#define D3DXAssembleShaderFromFile D3DXAssembleShaderFromFileA
#endif
HRESULT WINAPI
D3DXAssembleShaderFromResourceA(
HMODULE hSrcModule,
LPCSTR pSrcResource,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
HRESULT WINAPI
D3DXAssembleShaderFromResourceW(
HMODULE hSrcModule,
LPCWSTR pSrcResource,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
#ifdef UNICODE
#define D3DXAssembleShaderFromResource D3DXAssembleShaderFromResourceW
#else
#define D3DXAssembleShaderFromResource D3DXAssembleShaderFromResourceA
#endif
HRESULT WINAPI
D3DXAssembleShader(
LPCSTR pSrcData,
UINT SrcDataLen,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
//----------------------------------------------------------------------------
// D3DXCompileShader:
// ------------------
// Compiles a shader.
//
// Parameters:
// pSrcFile
// Source file name.
// hSrcModule
// Module handle. if NULL, current module will be used.
// pSrcResource
// Resource name in module.
// pSrcData
// Pointer to source code.
// SrcDataLen
// Size of source code, in bytes.
// pDefines
// Optional NULL-terminated array of preprocessor macro definitions.
// pInclude
// Optional interface pointer to use for handling #include directives.
// If this parameter is NULL, #includes will be honored when compiling
// from file, and will error when compiling from resource or memory.
// pFunctionName
// Name of the entrypoint function where execution should begin.
// pTarget
// Instruction set to be used when generating code. Currently supported
// targets are "vs_1_1", "vs_2_0", "vs_2_sw", "ps_1_1", "ps_1_2", "ps_1_3",
// "ps_1_4", "ps_2_0", "ps_2_sw", "tx_1_0"
// Flags
// See D3DXSHADER_xxx flags.
// ppShader
// Returns a buffer containing the created shader. This buffer contains
// the compiled shader code, as well as any embedded debug and symbol
// table info. (See D3DXGetShaderDebugInfo, D3DXGetShaderConstantTable)
// ppErrorMsgs
// Returns a buffer containing a listing of errors and warnings that were
// encountered during the compile. If you are running in a debugger,
// these are the same messages you will see in your debug output.
// ppConstantTable
// Returns a ID3DXConstantTable object which can be used to set
// shader constants to the device. Alternatively, an application can
// parse the D3DXSHADER_CONSTANTTABLE block embedded as a comment within
// the shader.
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXCompileShaderFromFileA(
LPCSTR pSrcFile,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
LPCSTR pFunctionName,
LPCSTR pTarget,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs,
LPD3DXCONSTANTTABLE* ppConstantTable);
HRESULT WINAPI
D3DXCompileShaderFromFileW(
LPCWSTR pSrcFile,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
LPCSTR pFunctionName,
LPCSTR pTarget,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs,
LPD3DXCONSTANTTABLE* ppConstantTable);
#ifdef UNICODE
#define D3DXCompileShaderFromFile D3DXCompileShaderFromFileW
#else
#define D3DXCompileShaderFromFile D3DXCompileShaderFromFileA
#endif
HRESULT WINAPI
D3DXCompileShaderFromResourceA(
HMODULE hSrcModule,
LPCSTR pSrcResource,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
LPCSTR pFunctionName,
LPCSTR pTarget,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs,
LPD3DXCONSTANTTABLE* ppConstantTable);
HRESULT WINAPI
D3DXCompileShaderFromResourceW(
HMODULE hSrcModule,
LPCWSTR pSrcResource,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
LPCSTR pFunctionName,
LPCSTR pTarget,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs,
LPD3DXCONSTANTTABLE* ppConstantTable);
#ifdef UNICODE
#define D3DXCompileShaderFromResource D3DXCompileShaderFromResourceW
#else
#define D3DXCompileShaderFromResource D3DXCompileShaderFromResourceA
#endif
HRESULT WINAPI
D3DXCompileShader(
LPCSTR pSrcData,
UINT SrcDataLen,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
LPCSTR pFunctionName,
LPCSTR pTarget,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs,
LPD3DXCONSTANTTABLE* ppConstantTable);
//----------------------------------------------------------------------------
// D3DXFindShaderComment:
// ----------------------
// Searches through a shader for a particular comment, denoted by a FourCC in
// the first DWORD of the comment. If the comment is not found, and no other
// error has occurred, S_FALSE is returned.
//
// Parameters:
// pFunction
// Pointer to the function DWORD stream
// FourCC
// FourCC used to identify the desired comment block.
// ppData
// Returns a pointer to the comment data (not including comment token
// and FourCC). Can be NULL.
// pSizeInBytes
// Returns the size of the comment data in bytes. Can be NULL.
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXFindShaderComment(
CONST DWORD* pFunction,
DWORD FourCC,
LPCVOID* ppData,
UINT* pSizeInBytes);
//----------------------------------------------------------------------------
// D3DXGetShaderSemantics:
// -----------------------
// Gets semantics for all input elements referenced inside a given shader.
//
// Parameters:
// pFunction
// Pointer to the function DWORD stream
// pSemantics
// Pointer to an array of D3DXSEMANTIC structures. The function will
// fill this array with the semantics for each input element referenced
// inside the shader. This array is assumed to contain at least
// MAXD3DDECLLENGTH elements.
// pCount
// Returns the number of elements referenced by the shader
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXGetShaderInputSemantics(
CONST DWORD* pFunction,
D3DXSEMANTIC* pSemantics,
UINT* pCount);
HRESULT WINAPI
D3DXGetShaderOutputSemantics(
CONST DWORD* pFunction,
D3DXSEMANTIC* pSemantics,
UINT* pCount);
//----------------------------------------------------------------------------
// D3DXGetShaderSamplers:
// ----------------------
// Gets semantics for all input elements referenced inside a given shader.
//
// pFunction
// Pointer to the function DWORD stream
// pSamplers
// Pointer to an array of LPCSTRs. The function will fill this array
// with pointers to the sampler names contained within pFunction, for
// each sampler referenced inside the shader. This array is assumed to
// contain at least 16 elements.
// pCount
// Returns the number of samplers referenced by the shader
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXGetShaderSamplers(
CONST DWORD* pFunction,
LPCSTR* pSamplers,
UINT* pCount);
//----------------------------------------------------------------------------
// D3DXGetShaderConstantTable:
// ---------------------------
// Gets shader constant table embedded inside shader. A constant table is
// generated by D3DXAssembleShader and D3DXCompileShader, and is embedded in
// the body of the shader.
//
// Parameters:
// pFunction
// Pointer to the function DWORD stream
// ppConstantTable
// Returns a ID3DXConstantTable object which can be used to set
// shader constants to the device. Alternatively, an application can
// parse the D3DXSHADER_CONSTANTTABLE block embedded as a comment within
// the shader.
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXGetShaderConstantTable(
CONST DWORD* pFunction,
LPD3DXCONSTANTTABLE* ppConstantTable);
//----------------------------------------------------------------------------
// D3DXGetShaderDebugInfo:
// -----------------------
// Gets shader debug info. Debug info is generated D3DXAssembleShader and
// D3DXCompileShader and is embedded the body of the shader.
//
// Parameters:
// pFunction
// Pointer to the function DWORD stream
// ppDebugInfo
// Buffer used to return debug info. For information about the layout
// of this buffer, see definition of D3DXSHADER_DEBUGINFO above.
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXGetShaderDebugInfo(
CONST DWORD* pFunction,
LPD3DXBUFFER* ppDebugInfo);
//----------------------------------------------------------------------------
// D3DXGatherFragments:
// -------------------
// Assembles shader fragments into a buffer to be passed to a fragment linker.
// will generate shader fragments for all fragments in the file
//
// Parameters:
// pSrcFile
// Source file name
// hSrcModule
// Module handle. if NULL, current module will be used
// pSrcResource
// Resource name in module
// pSrcData
// Pointer to source code
// SrcDataLen
// Size of source code, in bytes
// pDefines
// Optional NULL-terminated array of preprocessor macro definitions.
// pInclude
// Optional interface pointer to use for handling #include directives.
// If this parameter is NULL, #includes will be honored when assembling
// from file, and will error when assembling from resource or memory.
// Flags
// See D3DXSHADER_xxx flags
// ppShader
// Returns a buffer containing the created shader fragments. This buffer contains
// the assembled shader code, as well as any embedded debug info.
// ppErrorMsgs
// Returns a buffer containing a listing of errors and warnings that were
// encountered during assembly. If you are running in a debugger,
// these are the same messages you will see in your debug output.
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXGatherFragmentsFromFileA(
LPCSTR pSrcFile,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
HRESULT WINAPI
D3DXGatherFragmentsFromFileW(
LPCWSTR pSrcFile,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
#ifdef UNICODE
#define D3DXGatherFragmentsFromFile D3DXGatherFragmentsFromFileW
#else
#define D3DXGatherFragmentsFromFile D3DXGatherFragmentsFromFileA
#endif
HRESULT WINAPI
D3DXGatherFragmentsFromResourceA(
HMODULE hSrcModule,
LPCSTR pSrcResource,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
HRESULT WINAPI
D3DXGatherFragmentsFromResourceW(
HMODULE hSrcModule,
LPCWSTR pSrcResource,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
#ifdef UNICODE
#define D3DXGatherFragmentsFromResource D3DXGatherFragmentsFromResourceW
#else
#define D3DXGatherFragmentsFromResource D3DXGatherFragmentsFromResourceA
#endif
HRESULT WINAPI
D3DXGatherFragments(
LPCSTR pSrcData,
UINT SrcDataLen,
CONST D3DXMACRO* pDefines,
LPD3DXINCLUDE pInclude,
DWORD Flags,
LPD3DXBUFFER* ppShader,
LPD3DXBUFFER* ppErrorMsgs);
typedef ID3DXFragmentLinker *LPD3DXFRAGMENTLINKER;
//----------------------------------------------------------------------------
// D3DXCreateFragmentLinker:
// -------------------------
// Creates a fragment linker with a given cache size. The interface returned
// can be used to link together shader fragments. (both HLSL & ASM fragements)
//
// Parameters:
// pDevice
// Pointer of the device on which to create the effect
// ShaderCacheSize
// Size of the shader cache
// ppFragmentLinker
// pointer to a memory location to put the created interface pointer
//
//----------------------------------------------------------------------------
HRESULT WINAPI
D3DXCreateFragmentLinker(
LPDIRECT3DDEVICE9 pDevice,
UINT ShaderCacheSize,
LPD3DXFRAGMENTLINKER* ppFragmentLinker);
#ifdef __cplusplus
}
#endif //__cplusplus
//////////////////////////////////////////////////////////////////////////////
// Shader comment block layouts //////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//----------------------------------------------------------------------------
// D3DXSHADER_CONSTANTTABLE:
// -------------------------
// Shader constant information; included as an CTAB comment block inside
// shaders. All offsets are BYTE offsets from start of CONSTANTTABLE struct.
// Entries in the table are sorted by Name in ascending order.
//----------------------------------------------------------------------------
typedef struct _D3DXSHADER_CONSTANTTABLE
{
DWORD Size; // sizeof(D3DXSHADER_CONSTANTTABLE)
DWORD Creator; // LPCSTR offset
DWORD Version; // shader version
DWORD Constants; // number of constants
DWORD ConstantInfo; // D3DXSHADER_CONSTANTINFO[Constants] offset
} D3DXSHADER_CONSTANTTABLE, *LPD3DXSHADER_CONSTANTTABLE;
typedef struct _D3DXSHADER_CONSTANTINFO
{
DWORD Name; // LPCSTR offset
WORD RegisterSet; // D3DXREGISTER_SET
WORD RegisterIndex; // register number
WORD RegisterCount; // number of registers
WORD Reserved; // reserved
DWORD TypeInfo; // D3DXSHADER_TYPEINFO offset
DWORD DefaultValue; // offset of default value
} D3DXSHADER_CONSTANTINFO, *LPD3DXSHADER_CONSTANTINFO;
typedef struct _D3DXSHADER_TYPEINFO
{
WORD Class; // D3DXPARAMETER_CLASS
WORD Type; // D3DXPARAMETER_TYPE
WORD Rows; // number of rows (matrices)
WORD Columns; // number of columns (vectors and matrices)
WORD Elements; // array dimension
WORD StructMembers; // number of struct members
DWORD StructMemberInfo; // D3DXSHADER_STRUCTMEMBERINFO[Members] offset
} D3DXSHADER_TYPEINFO, *LPD3DXSHADER_TYPEINFO;
typedef struct _D3DXSHADER_STRUCTMEMBERINFO
{
DWORD Name; // LPCSTR offset
DWORD TypeInfo; // D3DXSHADER_TYPEINFO offset
} D3DXSHADER_STRUCTMEMBERINFO, *LPD3DXSHADER_STRUCTMEMBERINFO;
#endif //__D3DX9SHADER_H__
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.base.test.util;
import android.content.Context;
import junit.framework.Assert;
import org.chromium.base.BaseChromiumApplication;
import org.chromium.base.CommandLine;
import org.chromium.base.test.BaseTestResult.PreTestHook;
import org.chromium.base.test.util.parameter.BaseParameter;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Provides annotations related to command-line flag handling.
*
* Uses of these annotations on a derived class will take precedence over uses on its base classes,
* so a derived class can add a command-line flag that a base class has removed (or vice versa).
* Similarly, uses of these annotations on a test method will take precedence over uses on the
* containing class.
*
* Note that this class should never be instantiated.
*/
public final class CommandLineFlags {
/**
* Adds command-line flags to the {@link org.chromium.base.CommandLine} for this test.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface Add {
String[] value();
}
/**
* Removes command-line flags from the {@link org.chromium.base.CommandLine} from this test.
*
* Note that this can only remove flags added via {@link Add} above.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface Remove {
String[] value();
}
/**
* Sets up the CommandLine with the appropriate flags.
*
* This will add the difference of the sets of flags specified by {@link CommandLineFlags.Add}
* and {@link CommandLineFlags.Remove} to the {@link org.chromium.base.CommandLine}. Note that
* trying to remove a flag set externally, i.e. by the command-line flags file, will not work.
*/
public static void setUp(Context targetContext, AnnotatedElement element) {
Assert.assertNotNull("Unable to get a non-null target context.", targetContext);
CommandLine.reset();
BaseChromiumApplication.initCommandLine(targetContext);
Set<String> flags = getFlags(element);
for (String flag : flags) {
CommandLine.getInstance().appendSwitch(flag);
}
}
private static Set<String> getFlags(AnnotatedElement element) {
AnnotatedElement parent = (element instanceof Method)
? ((Method) element).getDeclaringClass()
: ((Class) element).getSuperclass();
Set<String> flags = (parent == null) ? new HashSet<String>() : getFlags(parent);
if (element.isAnnotationPresent(CommandLineFlags.Add.class)) {
flags.addAll(
Arrays.asList(element.getAnnotation(CommandLineFlags.Add.class).value()));
}
if (element.isAnnotationPresent(CommandLineFlags.Remove.class)) {
List<String> flagsToRemove =
Arrays.asList(element.getAnnotation(CommandLineFlags.Remove.class).value());
for (String flagToRemove : flagsToRemove) {
// If your test fails here, you have tried to remove a command-line flag via
// CommandLineFlags.Remove that was loaded into CommandLine via something other
// than CommandLineFlags.Add (probably the command-line flag file).
Assert.assertFalse("Unable to remove command-line flag \"" + flagToRemove + "\".",
CommandLine.getInstance().hasSwitch(flagToRemove));
}
flags.removeAll(flagsToRemove);
}
return flags;
}
private CommandLineFlags() {}
public static PreTestHook getRegistrationHook() {
return new PreTestHook() {
@Override
public void run(Context targetContext, Method testMethod) {
CommandLineFlags.setUp(targetContext, testMethod);
}
};
}
/**
* Instructs the test runner to execute the test with modified command-line flags.
* Flags to add are specified using 'stringArray' of argument named 'add',
* and flags to remove -- in the argument named 'remove'. A parameter without arguments
* instructs to run the test with default command-line flags.
*
* Example:
* @ParameterizedTest.Set(tests = {
* @ParameterizedTest(parameters = {
* @Parameter(
* tag = CommandLineFlags.Parameter.PARAMETER_TAG)}),
* @ParameterizedTest(parameters = {
* @Parameter(
* tag = CommandLineFlags.Parameter.PARAMETER_TAG,
* arguments = {
* @Parameter.Argument(
* name = CommandLineFlags.Parameter.ADD_ARG,
* stringArray = {'arg1', 'arg2'})
* })})})
*
* Note that because the entire instrumentation test process needs to be restarted to apply
* modified command-line arguments, this annotation is handled by test_runner.py, not by
* BaseTestResult class.
*/
public static class Parameter extends BaseParameter {
public static final String PARAMETER_TAG = "cmdlinearg-parameter";
public static final String ADD_ARG = "add";
public static final String REMOVE_ARG = "remove";
public Parameter(org.chromium.base.test.util.parameter.Parameter.Reader parameterReader) {
super(PARAMETER_TAG, parameterReader);
}
}
}
| {
"pile_set_name": "Github"
} |
name: com.valvesoftware.unity.openvr
displayName: OpenVR XR Plugin
description: OpenVR plugin for Unity's XR API
repoUrl: 'https://github.com/ValveSoftware/unity-xr-plugin'
parentRepoUrl: null
licenseSpdxId: BSD-3-Clause
licenseName: BSD 3-Clause "New" or "Revised" License
topics:
- ar-and-vr
hunter: JesseTG
gitTagPrefix: ''
gitTagIgnore: ''
minVersion: ''
image: null
readme: 'master:README.md'
createdAt: 1594058566020
| {
"pile_set_name": "Github"
} |
<br />
<div class="row">
<div class="col-md-1"></div>
<div class="col-md-5">
<label for="select_request_prefix">Custom Injection Prefix:</label>
<select class="form-control" id="select_request_prefix" name="request_prefix">
<option value="" selected="selected" onClick="divHideAndSeek('display_prefix_data_form', 1)">Disabled</option>
<option value="enabled" onClick="divHideAndSeek('display_prefix_data_form', 0)">Enabled</option>
</select>
<div id="display_prefix_data_form" align="central" style="display: none">
<br />
<label for="request_prefix_str">Custom Injection Prefix String to Use:</label>
<input type="text" class="form-control" id="request_prefix_str" name="prefix" placeholder="i.e. ') ">
<br />
</div><br />
<label for="select_request_suffix">Custom Injection Suffix:</label>
<select class="form-control" id="select_request_suffix" name="request_suffix">
<option value="" selected="selected" onClick="divHideAndSeek('display_suffix_data_form', 1)">Disabled</option>
<option value="enabled" onClick="divHideAndSeek('display_suffix_data_form', 0)">Enabled</option>
</select>
<div id="display_suffix_data_form" align="central" style="display: none">
<br />
<label for="request_suffix_str">Custom Injection Suffix String to Use:</label>
<input type="text" class="form-control" id="request_suffix_str" name="suffix" placeholder="i.e. AND ('abc'='abc ">
<br />
</div><br />
<label for="select_request_invalidator">Select Method Used to Invalidate Query:</label>
<select class="form-control" id="select_request_invalidator" name="request_invalidator">
<option value="default" selected="selected">Negate Value</option>
<option value="invalidBignum">Large Integer</option>
<option value="invalidLogical">Logical Operator</option>
<option value="invalidString">Random String</option>
</select><br />
<label for="select_request_casting">Payload Casting Mechanism:</label>
<select class="form-control" id="select_request_casting" name="noCast">
<option value="" selected="selected">Enabled</option>
<option value="true">Disabled</option>
</select><br />
<label for="select_request_hex">Use of DBMS hex function(s) for data retrieval:</label>
<select class="form-control" id="select_request_hex" name="hexConvert">
<option value="true">Enabled</option>
<option value="" selected="selected">Disabled</option>
</select><br />
<label for="select_request_hpp">Use of HTTP Parameter Pollution Method:</label>
<select class="form-control" id="select_request_hpp" name="hpp">
<option value="true">Enabled</option>
<option value="" selected="selected">Disabled</option>
</select><br />
<div id="display_time_based_data_form" align="central" style="display: none">
<label for="select_timeSec">Seconds to Delay the DBMS Response for Time Based Atatck:</label>
<select class="form-control" id="select_timeSec" name="timeSec">
<option value="3"> 3 </option>
<option value="5" selected="selected"> 5 </option>
<option value="8"> 8 </option>
<option value="10"> 10 </option>
<option value="12"> 12 </option>
<option value="15"> 15 </option>
<option value="18"> 18 </option>
<option value="20"> 20 </option>
<option value="25"> 25 </option>
</select><br />
</div>
<div id="display_union_data_form" align="central" style="display: none">
<label for="select_union_col_range">Define Union Column Range:</label>
<select class="form-control" id="select_union_col_range" name="union_col_range">
<option value="" selected="selected" onClick="divHideAndSeek('display_union_col_range_data_form', 1)">Disabled</option>
<option value="enabled" onClick="divHideAndSeek('display_union_col_range_data_form', 0)">Enabled</option>
</select>
<div id="display_union_col_range_data_form" align="central" style="display: none">
<br />
<label for="union_col_min">Min Columns:</label>
<select class="form-control" id="union_col_min" name="union_col_min">
<option value="1" selected="selected"> 1 </option>
<?php
foreach(range(2, 999) as $number) {
echo " <option value=\"$number\"> $number </option>";
}
?>
</select><br />
<label for="union_col_max">Max Columns:</label>
<select class="form-control" id="union_col_max" name="union_col_max">
<option value="2" selected="selected"> 2 </option>
<?php
foreach(range(3, 1000) as $number) {
echo " <option value=\"$number\"> $number </option>";
}
?>
</select><br />
</div><br />
<label for="select_union_char_filter">Define Custom Char for Union Column Brute:</label>
<select class="form-control" id="select_union_char_filter" name="union_char_filter">
<option value="" selected="selected" onClick="divHideAndSeek('display_union_char_filter_data_form', 1)">Disabled</option>
<option value="enabled" onClick="divHideAndSeek('display_union_char_filter_data_form', 0)">Enabled</option>
</select>
<div id="display_union_char_filter_data_form" align="central" style="display: none">
<br />
<label for="union_char">Custom Char to Use:</label>
<input type="text" class="form-control" id="union_char" name="uChar" placeholder="i.e. 123 ">
<br />
</div><br />
<label for="select_union_from_filter">Define Table for FROM part of UNION:</label>
<select class="form-control" id="select_union_from_filter" name="union_from_filter">
<option value="" selected="selected" onClick="divHideAndSeek('display_union_from_filter_data_form', 1)">Disabled</option>
<option value="enabled" onClick="divHideAndSeek('display_union_from_filter_data_form', 0)">Enabled</option>
</select>
<div id="display_union_from_filter_data_form" align="central" style="display: none">
<br />
<label for="union_from">FROM Table to Use:</label>
<input type="text" class="form-control" id="union_from" name="uFrom" placeholder="i.e. users ">
<br />
</div><br />
</div><br />
</div>
<div class="col-md-1"></div>
<div class="col-md-4">
<label for="select_technique">Select SQLi Method(s) to Test:</label>
<select class="form-control" id="technique" name="tech[]" size="7" onchange="techCheck()" multiple>
<option value="A">Test ALL Methods!</option>
<option value="B" selected="selected">Boolean Based Blind</option>
<option value="E">Error Based</option>
<option value="Q">Inline Queries</option>
<option value="S">Stacked Queries</option>
<option value="T">Time Based Blind</option>
<option value="U">Union Based</option>
</select><br />
<div class="col-md-4">
<label for="select_scan_level">Scan Level:</label>
<select class="form-control" id="select_scan_level" name="level">
<option value="1"> 1 </option>
<option value="2"> 2 </option>
<option value="3" selected="selected"> 3 </option>
<option value="4"> 4 </option>
<option value="5"> 5 </option>
</select><br />
</div>
<div class="col-md-4">
<label for="select_scan_risk">Scan Risk:</label>
<select class="form-control" id="select_risk" name="risk">
<option value="0"> None </option>
<option value="1"> Low </option>
<option value="2" selected="selected"> Med </option>
<option value="3"> Hi </option>
</select><br />
</div>
<div class="col-md-3">
<label for="select_thread_count">Threads:</label>
<select class="form-control" id="select_thread_count" name="threads">
<option value="1" selected="selected"> 1 </option>
<?php
foreach(range(2, 10) as $number) {
echo " <option value=\"$number\"> $number </option>";
}
?>
</select><br />
</div>
<label for="select_dbms">Select Backend Database Type:</label>
<select class="form-control" id="select_dbms" name="dbms">
<option value="" selected="selected">Unknown</option>
<option value="DB2">DB2</option>
<option value="Firebird">Firebird</option>
<option value="Microsoft Access">MS-Access</option>
<option value="Microsoft SQL Server">MS-SQL</option>
<option value="MySQL">MySQL</option>
<option value="Oracle">Oracle</option>
<option value="PostgreSQL">PostgreSQL</option>
<option value="SAP MaxDB">SAP MaxDB</option>
<option value="SQLite">SQLite</option>
<option value="Sybase">Sybase</option>
</select><br />
<label for="select_os">Select Backend OS Type:</label>
<select class="form-control" id="select_os" name="os">
<option value="" selected="selected">Unknown</option>
<option value="Linux">Linux</option>
<option value="Windows">Windows</option>
</select><br />
<label for="select_tamper">Select Tamper Scripts to Use:</label>
<select class="form-control" id="select_tamper" name="tamper[]" size="7" multiple>
<option value="" selected="selected">Do NOT Apply Any Tamper Scripts!</option>
<?php
include("./inc/config.php");
$tamperScripts = array_diff(glob(SQLMAP_BIN_PATH . "tamper/*.py"), array(".", "..", SQLMAP_BIN_PATH . "tamper/__init__.py"));
foreach($tamperScripts as $tscript) {
$ts = str_replace(SQLMAP_BIN_PATH . "tamper/", "", $tscript);
echo '<option value="tamper/' . $ts . '">' . $ts . '</option>';
}
?>
</select><br />
</div>
<div class="col-md-1"></div>
</div>
| {
"pile_set_name": "Github"
} |
(************************************************************************)
(* v * The Coq Proof Assistant / The Coq Development Team *)
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2012 *)
(* \VV/ **************************************************************)
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(** This module implements various tactics used to simplify the goals produced by Program,
which are also generally useful. *)
(** Debugging tactics to show the goal during evaluation. *)
Ltac show_goal := match goal with [ |- ?T ] => idtac T end.
Ltac show_hyp id :=
match goal with
| [ H := ?b : ?T |- _ ] =>
match H with
| id => idtac id ":=" b ":" T
end
| [ H : ?T |- _ ] =>
match H with
| id => idtac id ":" T
end
end.
Ltac show_hyps :=
try match reverse goal with
| [ H : ?T |- _ ] => show_hyp H ; fail
end.
(** The [do] tactic but using a Coq-side nat. *)
Ltac do_nat n tac :=
match n with
| 0 => idtac
| S ?n' => tac ; do_nat n' tac
end.
(** Do something on the last hypothesis, or fail *)
Ltac on_last_hyp tac :=
match goal with [ H : _ |- _ ] => first [ tac H | fail 1 ] end.
(** Destructs one pair, without care regarding naming. *)
Ltac destruct_one_pair :=
match goal with
| [H : (_ /\ _) |- _] => destruct H
| [H : prod _ _ |- _] => destruct H
end.
(** Repeateadly destruct pairs. *)
Ltac destruct_pairs := repeat (destruct_one_pair).
(** Destruct one existential package, keeping the name of the hypothesis for the first component. *)
Ltac destruct_one_ex :=
let tac H := let ph := fresh "H" in (destruct H as [H ph]) in
let tac2 H := let ph := fresh "H" in let ph' := fresh "H" in
(destruct H as [H ph ph'])
in
let tacT H := let ph := fresh "X" in (destruct H as [H ph]) in
let tacT2 H := let ph := fresh "X" in let ph' := fresh "X" in
(destruct H as [H ph ph'])
in
match goal with
| [H : (ex _) |- _] => tac H
| [H : (sig ?P) |- _ ] => tac H
| [H : (sigT ?P) |- _ ] => tacT H
| [H : (ex2 _ _) |- _] => tac2 H
| [H : (sig2 ?P _) |- _ ] => tac2 H
| [H : (sigT2 ?P _) |- _ ] => tacT2 H
end.
(** Repeateadly destruct existentials. *)
Ltac destruct_exists := repeat (destruct_one_ex).
(** Repeateadly destruct conjunctions and existentials. *)
Ltac destruct_conjs := repeat (destruct_one_pair || destruct_one_ex).
(** Destruct an existential hypothesis [t] keeping its name for the first component
and using [Ht] for the second *)
Tactic Notation "destruct" "exist" ident(t) ident(Ht) := destruct t as [t Ht].
(** Destruct a disjunction keeping its name in both subgoals. *)
Tactic Notation "destruct" "or" ident(H) := destruct H as [H|H].
(** Discriminate that also work on a [x <> x] hypothesis. *)
Ltac discriminates :=
match goal with
| [ H : ?x <> ?x |- _ ] => elim H ; reflexivity
| _ => discriminate
end.
(** Revert the last hypothesis. *)
Ltac revert_last :=
match goal with
[ H : _ |- _ ] => revert H
end.
(** Repeatedly reverse the last hypothesis, putting everything in the goal. *)
Ltac reverse := repeat revert_last.
(** Reverse everything up to hypothesis id (not included). *)
Ltac revert_until id :=
on_last_hyp ltac:(fun id' =>
match id' with
| id => idtac
| _ => revert id' ; revert_until id
end).
(** Clear duplicated hypotheses *)
Ltac clear_dup :=
match goal with
| [ H : ?X |- _ ] =>
match goal with
| [ H' : ?Y |- _ ] =>
match H with
| H' => fail 2
| _ => unify X Y ; (clear H' || clear H)
end
end
end.
Ltac clear_dups := repeat clear_dup.
(** Try to clear everything except some hyp *)
Ltac clear_except hyp :=
repeat match goal with [ H : _ |- _ ] =>
match H with
| hyp => fail 1
| _ => clear H
end
end.
(** A non-failing subst that substitutes as much as possible. *)
Ltac subst_no_fail :=
repeat (match goal with
[ H : ?X = ?Y |- _ ] => subst X || subst Y
end).
Tactic Notation "subst" "*" := subst_no_fail.
Ltac on_application f tac T :=
match T with
| context [f ?x ?y ?z ?w ?v ?u ?a ?b ?c] => tac (f x y z w v u a b c)
| context [f ?x ?y ?z ?w ?v ?u ?a ?b] => tac (f x y z w v u a b)
| context [f ?x ?y ?z ?w ?v ?u ?a] => tac (f x y z w v u a)
| context [f ?x ?y ?z ?w ?v ?u] => tac (f x y z w v u)
| context [f ?x ?y ?z ?w ?v] => tac (f x y z w v)
| context [f ?x ?y ?z ?w] => tac (f x y z w)
| context [f ?x ?y ?z] => tac (f x y z)
| context [f ?x ?y] => tac (f x y)
| context [f ?x] => tac (f x)
end.
(** A variant of [apply] using [refine], doing as much conversion as necessary. *)
Ltac rapply p :=
refine (p _ _ _ _ _ _ _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _ _) ||
refine (p _ _ _ _ _ _) ||
refine (p _ _ _ _ _) ||
refine (p _ _ _ _) ||
refine (p _ _ _) ||
refine (p _ _) ||
refine (p _) ||
refine p.
(** Tactical [on_call f tac] applies [tac] on any application of [f] in the hypothesis or goal. *)
Ltac on_call f tac :=
match goal with
| |- ?T => on_application f tac T
| H : ?T |- _ => on_application f tac T
end.
(* Destructs calls to f in hypothesis or conclusion, useful if f creates a subset object. *)
Ltac destruct_call f :=
let tac t := (destruct t) in on_call f tac.
Ltac destruct_calls f := repeat destruct_call f.
Ltac destruct_call_in f H :=
let tac t := (destruct t) in
let T := type of H in
on_application f tac T.
Ltac destruct_call_as f l :=
let tac t := (destruct t as l) in on_call f tac.
Ltac destruct_call_as_in f l H :=
let tac t := (destruct t as l) in
let T := type of H in
on_application f tac T.
Tactic Notation "destruct_call" constr(f) := destruct_call f.
(** Permit to name the results of destructing the call to [f]. *)
Tactic Notation "destruct_call" constr(f) "as" simple_intropattern(l) :=
destruct_call_as f l.
(** Specify the hypothesis in which the call occurs as well. *)
Tactic Notation "destruct_call" constr(f) "in" hyp(id) :=
destruct_call_in f id.
Tactic Notation "destruct_call" constr(f) "as" simple_intropattern(l) "in" hyp(id) :=
destruct_call_as_in f l id.
(** A marker for prototypes to destruct. *)
Definition fix_proto {A : Type} (a : A) := a.
Ltac destruct_rec_calls :=
match goal with
| [ H : fix_proto _ |- _ ] => destruct_calls H ; clear H
end.
Ltac destruct_all_rec_calls :=
repeat destruct_rec_calls ; unfold fix_proto in *.
(** Try to inject any potential constructor equality hypothesis. *)
Ltac autoinjection tac :=
match goal with
| [ H : ?f ?a = ?f' ?a' |- _ ] => tac H
end.
Ltac inject H := progress (inversion H ; subst*; clear_dups) ; clear H.
Ltac autoinjections := repeat (clear_dups ; autoinjection ltac:inject).
(** Destruct an hypothesis by first copying it to avoid dependencies. *)
Ltac destruct_nondep H := let H0 := fresh "H" in assert(H0 := H); destruct H0.
(** If bang appears in the goal, it means that we have a proof of False and the goal is solved. *)
Ltac bang :=
match goal with
| |- ?x =>
match x with
| appcontext [False_rect _ ?p] => elim p
end
end.
(** A tactic to show contradiction by first asserting an automatically provable hypothesis. *)
Tactic Notation "contradiction" "by" constr(t) :=
let H := fresh in assert t as H by auto with * ; contradiction.
(** A tactic that adds [H:=p:typeof(p)] to the context if no hypothesis of the same type appears in the goal.
Useful to do saturation using tactics. *)
Ltac add_hypothesis H' p :=
match type of p with
?X =>
match goal with
| [ H : X |- _ ] => fail 1
| _ => set (H':=p) ; try (change p with H') ; clearbody H'
end
end.
(** A tactic to replace an hypothesis by another term. *)
Ltac replace_hyp H c :=
let H' := fresh "H" in
assert(H' := c) ; clear H ; rename H' into H.
(** A tactic to refine an hypothesis by supplying some of its arguments. *)
Ltac refine_hyp c :=
let tac H := replace_hyp H c in
match c with
| ?H _ => tac H
| ?H _ _ => tac H
| ?H _ _ _ => tac H
| ?H _ _ _ _ => tac H
| ?H _ _ _ _ _ => tac H
| ?H _ _ _ _ _ _ => tac H
| ?H _ _ _ _ _ _ _ => tac H
| ?H _ _ _ _ _ _ _ _ => tac H
end.
(** The default simplification tactic used by Program is defined by [program_simpl], sometimes [auto]
is not enough, better rebind using [Obligation Tactic := tac] in this case,
possibly using [program_simplify] to use standard goal-cleaning tactics. *)
Ltac program_simplify :=
simpl; intros ; destruct_all_rec_calls ; repeat (destruct_conjs; simpl proj1_sig in * );
subst*; autoinjections ; try discriminates ;
try (solve [ red ; intros ; destruct_conjs ; autoinjections ; discriminates ]).
(** Restrict automation to propositional obligations. *)
Ltac program_solve_wf :=
match goal with
| |- well_founded _ => auto with *
| |- ?T => match type of T with Prop => auto end
end.
Create HintDb program discriminated.
Ltac program_simpl := program_simplify ; try typeclasses eauto with program ; try program_solve_wf.
Obligation Tactic := program_simpl.
Definition obligation (A : Type) {a : A} := a. | {
"pile_set_name": "Github"
} |
# License: BSD 3 clause
import warnings
from abc import ABC, abstractmethod
import numpy as np
from tick.base import Base
__author__ = 'Stephane Gaiffas'
LOSS = "loss"
GRAD = "grad"
LOSS_AND_GRAD = "loss_and_grad"
HESSIAN_NORM = "hessian_norm"
N_CALLS_LOSS = "n_calls_loss"
N_CALLS_GRAD = "n_calls_grad"
N_CALLS_LOSS_AND_GRAD = "n_calls_loss_and_grad"
N_CALLS_HESSIAN_NORM = "n_calls_hessian_norm"
PASS_OVER_DATA = "n_passes_over_data"
class Model(ABC, Base):
"""Abstract class for a model. It describes a zero-order model,
namely only with the ability to compute a loss (goodness-of-fit
criterion).
Attributes
----------
n_coeffs : `int` (read-only)
Total number of coefficients of the model
n_calls_loss : `int` (read-only)
Number of times ``loss`` has been called so far
n_passes_over_data : `int` (read-only)
Number of effective passes through the data
dtype : `{'float64', 'float32'}`
Type of the data arrays used.
Notes
-----
This class should be not used by end-users, it is intended for
development only.
"""
# A dict which specifies for each operation how many times we
# pass through data
pass_per_operation = {LOSS: 1}
_attrinfos = {
"_fitted": {
"writable": False
},
N_CALLS_LOSS: {
"writable": False
},
PASS_OVER_DATA: {
"writable": False
},
"n_coeffs": {
"writable": False
},
"_model": {
"writable": False
}
}
# The name of the attribute that might contain the C++ model object
_cpp_obj_name = "_model"
def __init__(self):
Base.__init__(self)
self._fitted = False
self._model = None
setattr(self, N_CALLS_LOSS, 0)
setattr(self, PASS_OVER_DATA, 0)
self.dtype = None
def fit(self, *args):
self._set_data(*args)
self._set("_fitted", True)
self._set(N_CALLS_LOSS, 0)
self._set(PASS_OVER_DATA, 0)
return self
@abstractmethod
def _get_n_coeffs(self) -> int:
"""An abstract method that forces childs to be able to give
the number of parameters
"""
pass
@property
def n_coeffs(self):
if not self._fitted:
raise ValueError(("call ``fit`` before using " "``n_coeffs``"))
return self._get_n_coeffs()
@abstractmethod
def _set_data(self, *args):
"""Must be overloaded in child class. This method is called to
fit data onto the gradient.
Useful when pre-processing is necessary, etc...
It should also set the dtype
"""
pass
def loss(self, coeffs: np.ndarray) -> float:
"""Computes the value of the goodness-of-fit at ``coeffs``
Parameters
----------
coeffs : `numpy.ndarray`
The loss is computed at this point
Returns
-------
output : `float`
The value of the loss
Notes
-----
The ``fit`` method must be called to give data to the model,
before using ``loss``. An error is raised otherwise.
"""
# This is a bit of a hack as I don't see how to control the dtype of
# coeffs returning from scipy through lambdas
if coeffs.dtype != self.dtype:
warnings.warn(
'coeffs vector of type {} has been cast to {}'.format(
coeffs.dtype, self.dtype))
coeffs = coeffs.astype(self.dtype)
if not self._fitted:
raise ValueError("call ``fit`` before using ``loss``")
if coeffs.shape[0] != self.n_coeffs:
raise ValueError(
("``coeffs`` has size %i while the model" +
" expects %i coefficients") % (coeffs.shape[0], self.n_coeffs))
self._inc_attr(N_CALLS_LOSS)
self._inc_attr(PASS_OVER_DATA, step=self.pass_per_operation[LOSS])
return self._loss(coeffs)
@abstractmethod
def _loss(self, coeffs: np.ndarray) -> float:
"""Must be overloaded in child class
"""
pass
def _get_typed_class(self, dtype_or_object_with_dtype, dtype_map):
"""Deduce dtype and return true if C++ _model should be set
"""
import tick.base.dtype_to_cpp_type
return tick.base.dtype_to_cpp_type.get_typed_class(
self, dtype_or_object_with_dtype, dtype_map)
def astype(self, dtype_or_object_with_dtype):
import tick.base.dtype_to_cpp_type
new_model = tick.base.dtype_to_cpp_type.copy_with(
self,
["_model"] # ignore _model on deepcopy
)
new_model._set('_model',
new_model._build_cpp_model(dtype_or_object_with_dtype))
return new_model
def _build_cpp_model(self, dtype: str):
raise ValueError("""This function is expected to
overriden in a subclass""".strip())
| {
"pile_set_name": "Github"
} |
{
"source": "http://www.geonames.org/PL/administrative-division-poland.html",
"country": "Poland",
"subdivisions": {
"DS": "Dolnoslaskie",
"KP": "Kujawsko-Pomorskie",
"LB": "Lubuskie",
"LD": "Lodzkie",
"LU": "Lubelskie",
"MA": "Malopolskie",
"MZ": "Mazowieckie",
"OP": "Opolskie",
"PD": "Podlaskie",
"PK": "Podkarpackie",
"PM": "Pomorskie",
"SK": "Swietokrzyskie",
"SL": "Slaskie",
"WN": "Warminsko-Mazurskie",
"WP": "Wielkopolskie",
"ZP": "Zachodniopomorskie"
}
}
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2011, Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
********************************************************************************
* Content : Eigen bindings to Intel(R) MKL
* MKL VML support for coefficient-wise unary Eigen expressions like a=b.sin()
********************************************************************************
*/
#ifndef EIGEN_ASSIGN_VML_H
#define EIGEN_ASSIGN_VML_H
namespace Eigen {
namespace internal {
template<typename Op> struct vml_call
{ enum { IsSupported = 0 }; };
template<typename Dst, typename Src, typename UnaryOp>
class vml_assign_traits
{
private:
enum {
DstHasDirectAccess = Dst::Flags & DirectAccessBit,
SrcHasDirectAccess = Src::Flags & DirectAccessBit,
StorageOrdersAgree = (int(Dst::IsRowMajor) == int(Src::IsRowMajor)),
InnerSize = int(Dst::IsVectorAtCompileTime) ? int(Dst::SizeAtCompileTime)
: int(Dst::Flags)&RowMajorBit ? int(Dst::ColsAtCompileTime)
: int(Dst::RowsAtCompileTime),
InnerMaxSize = int(Dst::IsVectorAtCompileTime) ? int(Dst::MaxSizeAtCompileTime)
: int(Dst::Flags)&RowMajorBit ? int(Dst::MaxColsAtCompileTime)
: int(Dst::MaxRowsAtCompileTime),
MaxSizeAtCompileTime = Dst::SizeAtCompileTime,
MightEnableVml = vml_call<UnaryOp>::IsSupported && StorageOrdersAgree && DstHasDirectAccess && SrcHasDirectAccess
&& Src::InnerStrideAtCompileTime==1 && Dst::InnerStrideAtCompileTime==1,
MightLinearize = MightEnableVml && (int(Dst::Flags) & int(Src::Flags) & LinearAccessBit),
VmlSize = MightLinearize ? MaxSizeAtCompileTime : InnerMaxSize,
LargeEnough = VmlSize==Dynamic || VmlSize>=EIGEN_MKL_VML_THRESHOLD,
MayEnableVml = MightEnableVml && LargeEnough,
MayLinearize = MayEnableVml && MightLinearize
};
public:
enum {
Traversal = MayLinearize ? LinearVectorizedTraversal
: MayEnableVml ? InnerVectorizedTraversal
: DefaultTraversal
};
};
template<typename Derived1, typename Derived2, typename UnaryOp, int Traversal, int Unrolling,
int VmlTraversal = vml_assign_traits<Derived1, Derived2, UnaryOp>::Traversal >
struct vml_assign_impl
: assign_impl<Derived1, Eigen::CwiseUnaryOp<UnaryOp, Derived2>,Traversal,Unrolling,BuiltIn>
{
};
template<typename Derived1, typename Derived2, typename UnaryOp, int Traversal, int Unrolling>
struct vml_assign_impl<Derived1, Derived2, UnaryOp, Traversal, Unrolling, InnerVectorizedTraversal>
{
typedef typename Derived1::Scalar Scalar;
typedef typename Derived1::Index Index;
static inline void run(Derived1& dst, const CwiseUnaryOp<UnaryOp, Derived2>& src)
{
// in case we want to (or have to) skip VML at runtime we can call:
// assign_impl<Derived1,Eigen::CwiseUnaryOp<UnaryOp, Derived2>,Traversal,Unrolling,BuiltIn>::run(dst,src);
const Index innerSize = dst.innerSize();
const Index outerSize = dst.outerSize();
for(Index outer = 0; outer < outerSize; ++outer) {
const Scalar *src_ptr = src.IsRowMajor ? &(src.nestedExpression().coeffRef(outer,0)) :
&(src.nestedExpression().coeffRef(0, outer));
Scalar *dst_ptr = dst.IsRowMajor ? &(dst.coeffRef(outer,0)) : &(dst.coeffRef(0, outer));
vml_call<UnaryOp>::run(src.functor(), innerSize, src_ptr, dst_ptr );
}
}
};
template<typename Derived1, typename Derived2, typename UnaryOp, int Traversal, int Unrolling>
struct vml_assign_impl<Derived1, Derived2, UnaryOp, Traversal, Unrolling, LinearVectorizedTraversal>
{
static inline void run(Derived1& dst, const CwiseUnaryOp<UnaryOp, Derived2>& src)
{
// in case we want to (or have to) skip VML at runtime we can call:
// assign_impl<Derived1,Eigen::CwiseUnaryOp<UnaryOp, Derived2>,Traversal,Unrolling,BuiltIn>::run(dst,src);
vml_call<UnaryOp>::run(src.functor(), dst.size(), src.nestedExpression().data(), dst.data() );
}
};
// Macroses
#define EIGEN_MKL_VML_SPECIALIZE_ASSIGN(TRAVERSAL,UNROLLING) \
template<typename Derived1, typename Derived2, typename UnaryOp> \
struct assign_impl<Derived1, Eigen::CwiseUnaryOp<UnaryOp, Derived2>, TRAVERSAL, UNROLLING, Specialized> { \
static inline void run(Derived1 &dst, const Eigen::CwiseUnaryOp<UnaryOp, Derived2> &src) { \
vml_assign_impl<Derived1,Derived2,UnaryOp,TRAVERSAL,UNROLLING>::run(dst, src); \
} \
};
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(DefaultTraversal,NoUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(DefaultTraversal,CompleteUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(DefaultTraversal,InnerUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(LinearTraversal,NoUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(LinearTraversal,CompleteUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(InnerVectorizedTraversal,NoUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(InnerVectorizedTraversal,CompleteUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(InnerVectorizedTraversal,InnerUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(LinearVectorizedTraversal,CompleteUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(LinearVectorizedTraversal,NoUnrolling)
EIGEN_MKL_VML_SPECIALIZE_ASSIGN(SliceVectorizedTraversal,NoUnrolling)
#if !defined (EIGEN_FAST_MATH) || (EIGEN_FAST_MATH != 1)
#define EIGEN_MKL_VML_MODE VML_HA
#else
#define EIGEN_MKL_VML_MODE VML_LA
#endif
#define EIGEN_MKL_VML_DECLARE_UNARY_CALL(EIGENOP, VMLOP, EIGENTYPE, VMLTYPE) \
template<> struct vml_call< scalar_##EIGENOP##_op<EIGENTYPE> > { \
enum { IsSupported = 1 }; \
static inline void run( const scalar_##EIGENOP##_op<EIGENTYPE>& /*func*/, \
int size, const EIGENTYPE* src, EIGENTYPE* dst) { \
VMLOP(size, (const VMLTYPE*)src, (VMLTYPE*)dst); \
} \
};
#define EIGEN_MKL_VML_DECLARE_UNARY_CALL_LA(EIGENOP, VMLOP, EIGENTYPE, VMLTYPE) \
template<> struct vml_call< scalar_##EIGENOP##_op<EIGENTYPE> > { \
enum { IsSupported = 1 }; \
static inline void run( const scalar_##EIGENOP##_op<EIGENTYPE>& /*func*/, \
int size, const EIGENTYPE* src, EIGENTYPE* dst) { \
MKL_INT64 vmlMode = EIGEN_MKL_VML_MODE; \
VMLOP(size, (const VMLTYPE*)src, (VMLTYPE*)dst, vmlMode); \
} \
};
#define EIGEN_MKL_VML_DECLARE_POW_CALL(EIGENOP, VMLOP, EIGENTYPE, VMLTYPE) \
template<> struct vml_call< scalar_##EIGENOP##_op<EIGENTYPE> > { \
enum { IsSupported = 1 }; \
static inline void run( const scalar_##EIGENOP##_op<EIGENTYPE>& func, \
int size, const EIGENTYPE* src, EIGENTYPE* dst) { \
EIGENTYPE exponent = func.m_exponent; \
MKL_INT64 vmlMode = EIGEN_MKL_VML_MODE; \
VMLOP(&size, (const VMLTYPE*)src, (const VMLTYPE*)&exponent, \
(VMLTYPE*)dst, &vmlMode); \
} \
};
#define EIGEN_MKL_VML_DECLARE_UNARY_CALLS_REAL(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL(EIGENOP, vs##VMLOP, float, float) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL(EIGENOP, vd##VMLOP, double, double)
#define EIGEN_MKL_VML_DECLARE_UNARY_CALLS_COMPLEX(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL(EIGENOP, vc##VMLOP, scomplex, MKL_Complex8) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL(EIGENOP, vz##VMLOP, dcomplex, MKL_Complex16)
#define EIGEN_MKL_VML_DECLARE_UNARY_CALLS(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_REAL(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_COMPLEX(EIGENOP, VMLOP)
#define EIGEN_MKL_VML_DECLARE_UNARY_CALLS_REAL_LA(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL_LA(EIGENOP, vms##VMLOP, float, float) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL_LA(EIGENOP, vmd##VMLOP, double, double)
#define EIGEN_MKL_VML_DECLARE_UNARY_CALLS_COMPLEX_LA(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL_LA(EIGENOP, vmc##VMLOP, scomplex, MKL_Complex8) \
EIGEN_MKL_VML_DECLARE_UNARY_CALL_LA(EIGENOP, vmz##VMLOP, dcomplex, MKL_Complex16)
#define EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_REAL_LA(EIGENOP, VMLOP) \
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_COMPLEX_LA(EIGENOP, VMLOP)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(sin, Sin)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(asin, Asin)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(cos, Cos)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(acos, Acos)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(tan, Tan)
//EIGEN_MKL_VML_DECLARE_UNARY_CALLS(abs, Abs)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(exp, Exp)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(log, Ln)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_LA(sqrt, Sqrt)
EIGEN_MKL_VML_DECLARE_UNARY_CALLS_REAL(square, Sqr)
// The vm*powx functions are not avaibale in the windows version of MKL.
#ifndef _WIN32
EIGEN_MKL_VML_DECLARE_POW_CALL(pow, vmspowx_, float, float)
EIGEN_MKL_VML_DECLARE_POW_CALL(pow, vmdpowx_, double, double)
EIGEN_MKL_VML_DECLARE_POW_CALL(pow, vmcpowx_, scomplex, MKL_Complex8)
EIGEN_MKL_VML_DECLARE_POW_CALL(pow, vmzpowx_, dcomplex, MKL_Complex16)
#endif
} // end namespace internal
} // end namespace Eigen
#endif // EIGEN_ASSIGN_VML_H
| {
"pile_set_name": "Github"
} |
// Copyright 2008 Google Inc.
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Author: [email protected] (Vlad Losev)
// This sample shows how to test code relying on some global flag variables.
// Combine() helps with generating all possible combinations of such flags,
// and each test is given one combination as a parameter.
// Use class definitions to test from this header.
#include "prime_tables.h"
#include "gtest/gtest.h"
#if GTEST_HAS_COMBINE
// Suppose we want to introduce a new, improved implementation of PrimeTable
// which combines speed of PrecalcPrimeTable and versatility of
// OnTheFlyPrimeTable (see prime_tables.h). Inside it instantiates both
// PrecalcPrimeTable and OnTheFlyPrimeTable and uses the one that is more
// appropriate under the circumstances. But in low memory conditions, it can be
// told to instantiate without PrecalcPrimeTable instance at all and use only
// OnTheFlyPrimeTable.
class HybridPrimeTable : public PrimeTable {
public:
HybridPrimeTable(bool force_on_the_fly, int max_precalculated)
: on_the_fly_impl_(new OnTheFlyPrimeTable),
precalc_impl_(force_on_the_fly ? NULL :
new PreCalculatedPrimeTable(max_precalculated)),
max_precalculated_(max_precalculated) {}
virtual ~HybridPrimeTable() {
delete on_the_fly_impl_;
delete precalc_impl_;
}
virtual bool IsPrime(int n) const {
if (precalc_impl_ != NULL && n < max_precalculated_)
return precalc_impl_->IsPrime(n);
else
return on_the_fly_impl_->IsPrime(n);
}
virtual int GetNextPrime(int p) const {
int next_prime = -1;
if (precalc_impl_ != NULL && p < max_precalculated_)
next_prime = precalc_impl_->GetNextPrime(p);
return next_prime != -1 ? next_prime : on_the_fly_impl_->GetNextPrime(p);
}
private:
OnTheFlyPrimeTable* on_the_fly_impl_;
PreCalculatedPrimeTable* precalc_impl_;
int max_precalculated_;
};
using ::testing::TestWithParam;
using ::testing::Bool;
using ::testing::Values;
using ::testing::Combine;
// To test all code paths for HybridPrimeTable we must test it with numbers
// both within and outside PreCalculatedPrimeTable's capacity and also with
// PreCalculatedPrimeTable disabled. We do this by defining fixture which will
// accept different combinations of parameters for instantiating a
// HybridPrimeTable instance.
class PrimeTableTest : public TestWithParam< ::std::tr1::tuple<bool, int> > {
protected:
virtual void SetUp() {
// This can be written as
//
// bool force_on_the_fly;
// int max_precalculated;
// tie(force_on_the_fly, max_precalculated) = GetParam();
//
// once the Google C++ Style Guide allows use of ::std::tr1::tie.
//
bool force_on_the_fly = ::std::tr1::get<0>(GetParam());
int max_precalculated = ::std::tr1::get<1>(GetParam());
table_ = new HybridPrimeTable(force_on_the_fly, max_precalculated);
}
virtual void TearDown() {
delete table_;
table_ = NULL;
}
HybridPrimeTable* table_;
};
TEST_P(PrimeTableTest, ReturnsFalseForNonPrimes) {
// Inside the test body, you can refer to the test parameter by GetParam().
// In this case, the test parameter is a PrimeTable interface pointer which
// we can use directly.
// Please note that you can also save it in the fixture's SetUp() method
// or constructor and use saved copy in the tests.
EXPECT_FALSE(table_->IsPrime(-5));
EXPECT_FALSE(table_->IsPrime(0));
EXPECT_FALSE(table_->IsPrime(1));
EXPECT_FALSE(table_->IsPrime(4));
EXPECT_FALSE(table_->IsPrime(6));
EXPECT_FALSE(table_->IsPrime(100));
}
TEST_P(PrimeTableTest, ReturnsTrueForPrimes) {
EXPECT_TRUE(table_->IsPrime(2));
EXPECT_TRUE(table_->IsPrime(3));
EXPECT_TRUE(table_->IsPrime(5));
EXPECT_TRUE(table_->IsPrime(7));
EXPECT_TRUE(table_->IsPrime(11));
EXPECT_TRUE(table_->IsPrime(131));
}
TEST_P(PrimeTableTest, CanGetNextPrime) {
EXPECT_EQ(2, table_->GetNextPrime(0));
EXPECT_EQ(3, table_->GetNextPrime(2));
EXPECT_EQ(5, table_->GetNextPrime(3));
EXPECT_EQ(7, table_->GetNextPrime(5));
EXPECT_EQ(11, table_->GetNextPrime(7));
EXPECT_EQ(131, table_->GetNextPrime(128));
}
// In order to run value-parameterized tests, you need to instantiate them,
// or bind them to a list of values which will be used as test parameters.
// You can instantiate them in a different translation module, or even
// instantiate them several times.
//
// Here, we instantiate our tests with a list of parameters. We must combine
// all variations of the boolean flag suppressing PrecalcPrimeTable and some
// meaningful values for tests. We choose a small value (1), and a value that
// will put some of the tested numbers beyond the capability of the
// PrecalcPrimeTable instance and some inside it (10). Combine will produce all
// possible combinations.
INSTANTIATE_TEST_CASE_P(MeaningfulTestParameters,
PrimeTableTest,
Combine(Bool(), Values(1, 10)));
#else
// Google Test may not support Combine() with some compilers. If we
// use conditional compilation to compile out all code referring to
// the gtest_main library, MSVC linker will not link that library at
// all and consequently complain about missing entry point defined in
// that library (fatal error LNK1561: entry point must be
// defined). This dummy test keeps gtest_main linked in.
TEST(DummyTest, CombineIsNotSupportedOnThisPlatform) {}
#endif // GTEST_HAS_COMBINE
| {
"pile_set_name": "Github"
} |
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/register-configuration.h"
#include "src/globals.h"
#include "src/macro-assembler.h"
namespace v8 {
namespace internal {
namespace compiler {
namespace {
STATIC_ASSERT(RegisterConfiguration::kMaxGeneralRegisters >=
Register::kNumRegisters);
STATIC_ASSERT(RegisterConfiguration::kMaxDoubleRegisters >=
DoubleRegister::kMaxNumRegisters);
class ArchDefaultRegisterConfiguration : public RegisterConfiguration {
public:
ArchDefaultRegisterConfiguration()
: RegisterConfiguration(Register::kMaxNumAllocatableRegisters,
#if V8_TARGET_ARCH_X87
1,
1,
#else
DoubleRegister::kMaxNumAllocatableRegisters,
DoubleRegister::NumAllocatableAliasedRegisters(),
#endif
general_register_name_table_,
double_register_name_table_) {
DCHECK_EQ(Register::kMaxNumAllocatableRegisters,
Register::NumAllocatableRegisters());
for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
general_register_name_table_[i] = Register::AllocationIndexToString(i);
}
for (int i = 0; i < DoubleRegister::kMaxNumAllocatableRegisters; ++i) {
double_register_name_table_[i] =
DoubleRegister::AllocationIndexToString(i);
}
}
const char*
general_register_name_table_[Register::kMaxNumAllocatableRegisters];
const char*
double_register_name_table_[DoubleRegister::kMaxNumAllocatableRegisters];
};
static base::LazyInstance<ArchDefaultRegisterConfiguration>::type
kDefaultRegisterConfiguration = LAZY_INSTANCE_INITIALIZER;
} // namespace
const RegisterConfiguration* RegisterConfiguration::ArchDefault() {
return &kDefaultRegisterConfiguration.Get();
}
RegisterConfiguration::RegisterConfiguration(
int num_general_registers, int num_double_registers,
int num_aliased_double_registers, const char* const* general_register_names,
const char* const* double_register_names)
: num_general_registers_(num_general_registers),
num_double_registers_(num_double_registers),
num_aliased_double_registers_(num_aliased_double_registers),
general_register_names_(general_register_names),
double_register_names_(double_register_names) {}
} // namespace compiler
} // namespace internal
} // namespace v8
| {
"pile_set_name": "Github"
} |
/**
* Yona, 21st Century Project Hosting SW
* <p>
* Copyright Yona & Yobi Authors & NAVER Corp.
* https://yona.io
**/
package controllers.api;
import com.fasterxml.jackson.databind.node.ObjectNode;
import play.libs.Json;
import play.mvc.Controller;
import play.mvc.Result;
public class GlobalApi extends Controller {
public static Result hello() {
ObjectNode json = Json.newObject();
json.put("message", "I'm alive!");
json.put("ok", true);
return ok(json);
}
}
| {
"pile_set_name": "Github"
} |
package llf.videomodel.utils;
import android.app.Activity;
import android.provider.Settings;
import android.util.Log;
import android.view.WindowManager;
/**
* Created by llf on 2017/3/16.
* 调节亮度
*/
public class LightUtil {
/**
* 调节亮度
* @param activity
* @param value
*/
public static void SetLightness(Activity activity, int value) {
try {
Settings.System.putInt(activity.getContentResolver(), Settings.System.SCREEN_BRIGHTNESS, value);
WindowManager.LayoutParams lp = activity.getWindow().getAttributes();
lp.screenBrightness = (value <= 0 ? 1 : value) / 255f;
activity.getWindow().setAttributes(lp);
} catch (Exception e) {
Log.d("视频播放","无法改变亮度");
}
}
/**
* 获取亮度0~255的值
* @param activity
* @return
*/
public static int GetLightness(Activity activity) {
return Settings.System.getInt(activity.getContentResolver(), Settings.System.SCREEN_BRIGHTNESS, -1);
}
}
| {
"pile_set_name": "Github"
} |
module wreduce_test0(input [7:0] a, b, output [15:0] x, y, z);
assign x = -$signed({1'b0, a});
assign y = $signed({1'b0, a}) + $signed({1'b0, b});
assign z = x ^ y;
endmodule
module wreduce_test1(input [31:0] a, b, output [7:0] x, y, z, w);
assign x = a - b, y = a * b, z = a >> b, w = a << b;
endmodule
| {
"pile_set_name": "Github"
} |
contract A {
function g() public { revert("fail"); }
}
contract C {
A a = new A();
function f() public {
a.g();
}
}
// ====
// compileViaYul: also
// EVMVersion: >=byzantium
// revertStrings: debug
// ----
// f() -> FAILURE, hex"08c379a0", 0x20, 4, "fail"
| {
"pile_set_name": "Github"
} |
<?php
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_Iam_Policy extends Google_Collection
{
protected $collection_key = 'rules';
protected $bindingsType = 'Google_Service_Iam_Binding';
protected $bindingsDataType = 'array';
public $etag;
protected $rulesType = 'Google_Service_Iam_Rule';
protected $rulesDataType = 'array';
public $version;
public function setBindings($bindings)
{
$this->bindings = $bindings;
}
public function getBindings()
{
return $this->bindings;
}
public function setEtag($etag)
{
$this->etag = $etag;
}
public function getEtag()
{
return $this->etag;
}
public function setRules($rules)
{
$this->rules = $rules;
}
public function getRules()
{
return $this->rules;
}
public function setVersion($version)
{
$this->version = $version;
}
public function getVersion()
{
return $this->version;
}
}
| {
"pile_set_name": "Github"
} |
// //CreateItem(AcceptSharingInvitation) - not availbale or traceable in my fork of ews managed api repo.
// //GetSharingFolder - not availbale or traceable in my fork of ews managed api repo.
// //GetSharingMetadat - not availbale or traceable in my fork of ews managed api repo.
// //RefreshSharingFolder - not availbale or traceable in my fork of ews managed api repo.
//}
| {
"pile_set_name": "Github"
} |
/*
* searchtools.js_t
* ~~~~~~~~~~~~~~~~
*
* Sphinx JavaScript utilities for the full-text search.
*
* :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
* :license: BSD, see LICENSE for details.
*
*/
/* Non-minified version JS is _stemmer.js if file is provided */
/**
* Porter Stemmer
*/
var Stemmer = function() {
var step2list = {
ational: 'ate',
tional: 'tion',
enci: 'ence',
anci: 'ance',
izer: 'ize',
bli: 'ble',
alli: 'al',
entli: 'ent',
eli: 'e',
ousli: 'ous',
ization: 'ize',
ation: 'ate',
ator: 'ate',
alism: 'al',
iveness: 'ive',
fulness: 'ful',
ousness: 'ous',
aliti: 'al',
iviti: 'ive',
biliti: 'ble',
logi: 'log'
};
var step3list = {
icate: 'ic',
ative: '',
alize: 'al',
iciti: 'ic',
ical: 'ic',
ful: '',
ness: ''
};
var c = "[^aeiou]"; // consonant
var v = "[aeiouy]"; // vowel
var C = c + "[^aeiouy]*"; // consonant sequence
var V = v + "[aeiou]*"; // vowel sequence
var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
var s_v = "^(" + C + ")?" + v; // vowel in stem
this.stemWord = function (w) {
var stem;
var suffix;
var firstch;
var origword = w;
if (w.length < 3)
return w;
var re;
var re2;
var re3;
var re4;
firstch = w.substr(0,1);
if (firstch == "y")
w = firstch.toUpperCase() + w.substr(1);
// Step 1a
re = /^(.+?)(ss|i)es$/;
re2 = /^(.+?)([^s])s$/;
if (re.test(w))
w = w.replace(re,"$1$2");
else if (re2.test(w))
w = w.replace(re2,"$1$2");
// Step 1b
re = /^(.+?)eed$/;
re2 = /^(.+?)(ed|ing)$/;
if (re.test(w)) {
var fp = re.exec(w);
re = new RegExp(mgr0);
if (re.test(fp[1])) {
re = /.$/;
w = w.replace(re,"");
}
}
else if (re2.test(w)) {
var fp = re2.exec(w);
stem = fp[1];
re2 = new RegExp(s_v);
if (re2.test(stem)) {
w = stem;
re2 = /(at|bl|iz)$/;
re3 = new RegExp("([^aeiouylsz])\\1$");
re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
if (re2.test(w))
w = w + "e";
else if (re3.test(w)) {
re = /.$/;
w = w.replace(re,"");
}
else if (re4.test(w))
w = w + "e";
}
}
// Step 1c
re = /^(.+?)y$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
re = new RegExp(s_v);
if (re.test(stem))
w = stem + "i";
}
// Step 2
re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
suffix = fp[2];
re = new RegExp(mgr0);
if (re.test(stem))
w = stem + step2list[suffix];
}
// Step 3
re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
suffix = fp[2];
re = new RegExp(mgr0);
if (re.test(stem))
w = stem + step3list[suffix];
}
// Step 4
re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
re2 = /^(.+?)(s|t)(ion)$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
re = new RegExp(mgr1);
if (re.test(stem))
w = stem;
}
else if (re2.test(w)) {
var fp = re2.exec(w);
stem = fp[1] + fp[2];
re2 = new RegExp(mgr1);
if (re2.test(stem))
w = stem;
}
// Step 5
re = /^(.+?)e$/;
if (re.test(w)) {
var fp = re.exec(w);
stem = fp[1];
re = new RegExp(mgr1);
re2 = new RegExp(meq1);
re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
w = stem;
}
re = /ll$/;
re2 = new RegExp(mgr1);
if (re.test(w) && re2.test(w)) {
re = /.$/;
w = w.replace(re,"");
}
// and turn initial Y back to y
if (firstch == "y")
w = firstch.toLowerCase() + w.substr(1);
return w;
}
}
/**
* Simple result scoring code.
*/
var Scorer = {
// Implement the following function to further tweak the score for each result
// The function takes a result array [filename, title, anchor, descr, score]
// and returns the new score.
/*
score: function(result) {
return result[4];
},
*/
// query matches the full name of an object
objNameMatch: 11,
// or matches in the last dotted part of the object name
objPartialMatch: 6,
// Additive scores depending on the priority of the object
objPrio: {0: 15, // used to be importantResults
1: 5, // used to be objectResults
2: -5}, // used to be unimportantResults
// Used when the priority is not in the mapping.
objPrioDefault: 0,
// query found in title
title: 15,
// query found in terms
term: 5
};
var splitChars = (function() {
var result = {};
var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648,
1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702,
2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971,
2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345,
3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761,
3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823,
4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125,
8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695,
11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587,
43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141];
var i, j, start, end;
for (i = 0; i < singles.length; i++) {
result[singles[i]] = true;
}
var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709],
[722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161],
[1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568],
[1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807],
[1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047],
[2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383],
[2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450],
[2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547],
[2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673],
[2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820],
[2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946],
[2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023],
[3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173],
[3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332],
[3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481],
[3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718],
[3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791],
[3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095],
[4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205],
[4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687],
[4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968],
[4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869],
[5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102],
[6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271],
[6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592],
[6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822],
[6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167],
[7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959],
[7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143],
[8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318],
[8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483],
[8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101],
[10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567],
[11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292],
[12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444],
[12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783],
[12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311],
[19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511],
[42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774],
[42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071],
[43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263],
[43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519],
[43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647],
[43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967],
[44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295],
[57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274],
[64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007],
[65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381],
[65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]];
for (i = 0; i < ranges.length; i++) {
start = ranges[i][0];
end = ranges[i][1];
for (j = start; j <= end; j++) {
result[j] = true;
}
}
return result;
})();
function splitQuery(query) {
var result = [];
var start = -1;
for (var i = 0; i < query.length; i++) {
if (splitChars[query.charCodeAt(i)]) {
if (start !== -1) {
result.push(query.slice(start, i));
start = -1;
}
} else if (start === -1) {
start = i;
}
}
if (start !== -1) {
result.push(query.slice(start));
}
return result;
}
/**
* Search Module
*/
var Search = {
_index : null,
_queued_query : null,
_pulse_status : -1,
init : function() {
var params = $.getQueryParameters();
if (params.q) {
var query = params.q[0];
$('input[name="q"]')[0].value = query;
this.performSearch(query);
}
},
loadIndex : function(url) {
$.ajax({type: "GET", url: url, data: null,
dataType: "script", cache: true,
complete: function(jqxhr, textstatus) {
if (textstatus != "success") {
document.getElementById("searchindexloader").src = url;
}
}});
},
setIndex : function(index) {
var q;
this._index = index;
if ((q = this._queued_query) !== null) {
this._queued_query = null;
Search.query(q);
}
},
hasIndex : function() {
return this._index !== null;
},
deferQuery : function(query) {
this._queued_query = query;
},
stopPulse : function() {
this._pulse_status = 0;
},
startPulse : function() {
if (this._pulse_status >= 0)
return;
function pulse() {
var i;
Search._pulse_status = (Search._pulse_status + 1) % 4;
var dotString = '';
for (i = 0; i < Search._pulse_status; i++)
dotString += '.';
Search.dots.text(dotString);
if (Search._pulse_status > -1)
window.setTimeout(pulse, 500);
}
pulse();
},
/**
* perform a search for something (or wait until index is loaded)
*/
performSearch : function(query) {
// create the required interface elements
this.out = $('#search-results');
this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
this.dots = $('<span></span>').appendTo(this.title);
this.status = $('<p style="display: none"></p>').appendTo(this.out);
this.output = $('<ul class="search"/>').appendTo(this.out);
$('#search-progress').text(_('Preparing search...'));
this.startPulse();
// index already loaded, the browser was quick!
if (this.hasIndex())
this.query(query);
else
this.deferQuery(query);
},
/**
* execute search (requires search index to be loaded)
*/
query : function(query) {
var i;
var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"];
// stem the searchterms and add them to the correct list
var stemmer = new Stemmer();
var searchterms = [];
var excluded = [];
var hlterms = [];
var tmp = splitQuery(query);
var objectterms = [];
for (i = 0; i < tmp.length; i++) {
if (tmp[i] !== "") {
objectterms.push(tmp[i].toLowerCase());
}
if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) ||
tmp[i] === "") {
// skip this "word"
continue;
}
// stem the word
var word = stemmer.stemWord(tmp[i].toLowerCase());
// prevent stemmer from cutting word smaller than two chars
if(word.length < 3 && tmp[i].length >= 3) {
word = tmp[i];
}
var toAppend;
// select the correct list
if (word[0] == '-') {
toAppend = excluded;
word = word.substr(1);
}
else {
toAppend = searchterms;
hlterms.push(tmp[i].toLowerCase());
}
// only add if not already in the list
if (!$u.contains(toAppend, word))
toAppend.push(word);
}
var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
// console.debug('SEARCH: searching for:');
// console.info('required: ', searchterms);
// console.info('excluded: ', excluded);
// prepare search
var terms = this._index.terms;
var titleterms = this._index.titleterms;
// array of [filename, title, anchor, descr, score]
var results = [];
$('#search-progress').empty();
// lookup as object
for (i = 0; i < objectterms.length; i++) {
var others = [].concat(objectterms.slice(0, i),
objectterms.slice(i+1, objectterms.length));
results = results.concat(this.performObjectSearch(objectterms[i], others));
}
// lookup as search terms in fulltext
results = results.concat(this.performTermsSearch(searchterms, excluded, terms, titleterms));
// let the scorer override scores with a custom scoring function
if (Scorer.score) {
for (i = 0; i < results.length; i++)
results[i][4] = Scorer.score(results[i]);
}
// now sort the results by score (in opposite order of appearance, since the
// display function below uses pop() to retrieve items) and then
// alphabetically
results.sort(function(a, b) {
var left = a[4];
var right = b[4];
if (left > right) {
return 1;
} else if (left < right) {
return -1;
} else {
// same score: sort alphabetically
left = a[1].toLowerCase();
right = b[1].toLowerCase();
return (left > right) ? -1 : ((left < right) ? 1 : 0);
}
});
// for debugging
//Search.lastresults = results.slice(); // a copy
//console.info('search results:', Search.lastresults);
// print the results
var resultCount = results.length;
function displayNextItem() {
// results left, load the summary and display it
if (results.length) {
var item = results.pop();
var listItem = $('<li style="display:none"></li>');
if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
// dirhtml builder
var dirname = item[0] + '/';
if (dirname.match(/\/index\/$/)) {
dirname = dirname.substring(0, dirname.length-6);
} else if (dirname == 'index/') {
dirname = '';
}
listItem.append($('<a/>').attr('href',
DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
highlightstring + item[2]).html(item[1]));
} else {
// normal html builders
listItem.append($('<a/>').attr('href',
item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
highlightstring + item[2]).html(item[1]));
}
if (item[3]) {
listItem.append($('<span> (' + item[3] + ')</span>'));
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
} else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
var suffix = DOCUMENTATION_OPTIONS.SOURCELINK_SUFFIX;
if (suffix === undefined) {
suffix = '.txt';
}
$.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[5] + (item[5].slice(-suffix.length) === suffix ? '' : suffix),
dataType: "text",
complete: function(jqxhr, textstatus) {
var data = jqxhr.responseText;
if (data !== '' && data !== undefined) {
listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
}
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
}});
} else {
// no source available, just display title
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
}
}
// search finished, update title and status message
else {
Search.stopPulse();
Search.title.text(_('Search Results'));
if (!resultCount)
Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
else
Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
Search.status.fadeIn(500);
}
}
displayNextItem();
},
/**
* search for object names
*/
performObjectSearch : function(object, otherterms) {
var filenames = this._index.filenames;
var docnames = this._index.docnames;
var objects = this._index.objects;
var objnames = this._index.objnames;
var titles = this._index.titles;
var i;
var results = [];
for (var prefix in objects) {
for (var name in objects[prefix]) {
var fullname = (prefix ? prefix + '.' : '') + name;
if (fullname.toLowerCase().indexOf(object) > -1) {
var score = 0;
var parts = fullname.split('.');
// check for different match types: exact matches of full name or
// "last name" (i.e. last dotted part)
if (fullname == object || parts[parts.length - 1] == object) {
score += Scorer.objNameMatch;
// matches in last name
} else if (parts[parts.length - 1].indexOf(object) > -1) {
score += Scorer.objPartialMatch;
}
var match = objects[prefix][name];
var objname = objnames[match[1]][2];
var title = titles[match[0]];
// If more than one term searched for, we require other words to be
// found in the name/title/description
if (otherterms.length > 0) {
var haystack = (prefix + ' ' + name + ' ' +
objname + ' ' + title).toLowerCase();
var allfound = true;
for (i = 0; i < otherterms.length; i++) {
if (haystack.indexOf(otherterms[i]) == -1) {
allfound = false;
break;
}
}
if (!allfound) {
continue;
}
}
var descr = objname + _(', in ') + title;
var anchor = match[3];
if (anchor === '')
anchor = fullname;
else if (anchor == '-')
anchor = objnames[match[1]][1] + '-' + fullname;
// add custom score for some objects according to scorer
if (Scorer.objPrio.hasOwnProperty(match[2])) {
score += Scorer.objPrio[match[2]];
} else {
score += Scorer.objPrioDefault;
}
results.push([docnames[match[0]], fullname, '#'+anchor, descr, score, filenames[match[0]]]);
}
}
}
return results;
},
/**
* search for full-text terms in the index
*/
performTermsSearch : function(searchterms, excluded, terms, titleterms) {
var docnames = this._index.docnames;
var filenames = this._index.filenames;
var titles = this._index.titles;
var i, j, file;
var fileMap = {};
var scoreMap = {};
var results = [];
// perform the search on the required terms
for (i = 0; i < searchterms.length; i++) {
var word = searchterms[i];
var files = [];
var _o = [
{files: terms[word], score: Scorer.term},
{files: titleterms[word], score: Scorer.title}
];
// no match but word was a required one
if ($u.every(_o, function(o){return o.files === undefined;})) {
break;
}
// found search word in contents
$u.each(_o, function(o) {
var _files = o.files;
if (_files === undefined)
return
if (_files.length === undefined)
_files = [_files];
files = files.concat(_files);
// set score for the word in each file to Scorer.term
for (j = 0; j < _files.length; j++) {
file = _files[j];
if (!(file in scoreMap))
scoreMap[file] = {}
scoreMap[file][word] = o.score;
}
});
// create the mapping
for (j = 0; j < files.length; j++) {
file = files[j];
if (file in fileMap)
fileMap[file].push(word);
else
fileMap[file] = [word];
}
}
// now check if the files don't contain excluded terms
for (file in fileMap) {
var valid = true;
// check if all requirements are matched
if (fileMap[file].length != searchterms.length)
continue;
// ensure that none of the excluded terms is in the search result
for (i = 0; i < excluded.length; i++) {
if (terms[excluded[i]] == file ||
titleterms[excluded[i]] == file ||
$u.contains(terms[excluded[i]] || [], file) ||
$u.contains(titleterms[excluded[i]] || [], file)) {
valid = false;
break;
}
}
// if we have still a valid result we can add it to the result list
if (valid) {
// select one (max) score for the file.
// for better ranking, we should calculate ranking by using words statistics like basic tf-idf...
var score = $u.max($u.map(fileMap[file], function(w){return scoreMap[file][w]}));
results.push([docnames[file], titles[file], '', null, score, filenames[file]]);
}
}
return results;
},
/**
* helper function to return a node containing the
* search summary for a given text. keywords is a list
* of stemmed words, hlwords is the list of normal, unstemmed
* words. the first one is used to find the occurrence, the
* latter for highlighting it.
*/
makeSearchSummary : function(text, keywords, hlwords) {
var textLower = text.toLowerCase();
var start = 0;
$.each(keywords, function() {
var i = textLower.indexOf(this.toLowerCase());
if (i > -1)
start = i;
});
start = Math.max(start - 120, 0);
var excerpt = ((start > 0) ? '...' : '') +
$.trim(text.substr(start, 240)) +
((start + 240 - text.length) ? '...' : '');
var rv = $('<div class="context"></div>').text(excerpt);
$.each(hlwords, function() {
rv = rv.highlightText(this, 'highlighted');
});
return rv;
}
};
$(document).ready(function() {
Search.init();
}); | {
"pile_set_name": "Github"
} |
/*
Annex/MacTypes.h
----------------
Written in 2011 by Joshua Juran, who places it in the public domain.
*/
#ifndef ANNEX_MACTYPES_H
#define ANNEX_MACTYPES_H
// Mac OS
#ifndef __MACTYPES__
#include <MacTypes.h>
#endif
#ifndef MAC_OS_X_VERSION_10_5
typedef void* PRefCon;
#if __LP64__
typedef void* SRefCon;
typedef void* URefCon;
#else
typedef SInt32 SRefCon;
typedef UInt32 URefCon;
#endif // #ifdef __LP64__
#endif // #ifndef MAC_OS_X_VERSION_10_5
#endif
| {
"pile_set_name": "Github"
} |
@api @smokeTest @public_link_share-feature-required @toImplementOnOCIS @issue-ocis-reva-172
Feature: set timeouts of LOCKS
Background:
Given user "Alice" has been created with default attributes and skeleton files
@skipOnOcV10.3 @skipOnOcV10.4
Scenario Outline: do not set timeout on folder and check the default timeout
Given using <dav-path> DAV path
And parameter "lock_timeout_default" of app "core" has been set to "<default-timeout>"
And parameter "lock_timeout_max" of app "core" has been set to "<max-timeout>"
When user "Alice" locks folder "PARENT" using the WebDAV API setting following properties
| lockscope | exclusive |
And user "Alice" gets the following properties of folder "PARENT" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/CHILD" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/parent.txt" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response should match "<result>"
# consider a drift of up to 9 seconds between setting the lock and retrieving it
Examples:
| dav-path | default-timeout | max-timeout | result |
| old | 120 | 3600 | /Second-(120\|11[1-9])$/ |
| old | 99999 | 3600 | /Second-(3600\|359[1-9])$/ |
| new | 120 | 3600 | /Second-(120\|11[1-9])$/ |
| new | 99999 | 3600 | /Second-(3600\|359[1-9])$/ |
Scenario Outline: set timeout on folder
Given using <dav-path> DAV path
When user "Alice" locks folder "PARENT" using the WebDAV API setting following properties
| lockscope | shared |
| timeout | <timeout> |
And user "Alice" gets the following properties of folder "PARENT" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/CHILD" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/parent.txt" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
Examples:
| dav-path | timeout | result |
| old | second-999 | /Second-\d{3}$/ |
| old | second-99999999 | /Second-\d{5}$/ |
| old | infinite | /Second-\d{5}$/ |
| old | second--1 | /Second-\d{5}$/ |
| old | second-0 | /Second-\d{4}$/ |
| new | second-999 | /Second-\d{3}$/ |
| new | second-99999999 | /Second-\d{5}$/ |
| new | infinite | /Second-\d{5}$/ |
| new | second--1 | /Second-\d{5}$/ |
| new | second-0 | /Second-\d{4}$/ |
@skipOnOcV10.3 @skipOnOcV10.4
Scenario Outline: set timeout over the maximum on folder
Given using <dav-path> DAV path
And parameter "lock_timeout_default" of app "core" has been set to "<default-timeout>"
And parameter "lock_timeout_max" of app "core" has been set to "<max-timeout>"
When user "Alice" locks folder "PARENT" using the WebDAV API setting following properties
| lockscope | shared |
| timeout | <timeout> |
And user "Alice" gets the following properties of folder "PARENT" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/CHILD" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/parent.txt" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response should match "<result>"
Examples:
| dav-path | timeout | default-timeout | max-timeout | result |
| old | second-600 | 120 | 3600 | /Second-(600\|59[1-9])$/ |
| old | second-600 | 99999 | 3600 | /Second-(600\|59[1-9])$/ |
| old | second-10000 | 120 | 3600 | /Second-(3600\|359[1-9])$/ |
| old | second-10000 | 99999 | 3600 | /Second-(3600\|359[1-9])$/ |
| old | infinite | 120 | 3600 | /Second-(3600\|359[1-9])$/ |
| old | infinite | 99999 | 3600 | /Second-(3600\|359[1-9])$/ |
| new | second-600 | 120 | 3600 | /Second-(600\|59[1-9])$/ |
| new | second-600 | 99999 | 3600 | /Second-(600\|59[1-9])$/ |
| new | second-10000 | 120 | 3600 | /Second-(3600\|359[1-9])$/ |
| new | second-10000 | 99999 | 3600 | /Second-(3600\|359[1-9])$/ |
| new | infinite | 120 | 3600 | /Second-(3600\|359[1-9])$/ |
| new | infinite | 99999 | 3600 | /Second-(3600\|359[1-9])$/ |
@files_sharing-app-required
Scenario Outline: as owner set timeout on folder as receiver check it
Given using <dav-path> DAV path
And user "Brian" has been created with default attributes and skeleton files
And user "Alice" has shared folder "PARENT" with user "Brian"
When user "Alice" locks folder "PARENT" using the WebDAV API setting following properties
| lockscope | shared |
| timeout | <timeout> |
And user "Brian" gets the following properties of folder "PARENT (2)" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Brian" should match "<result>"
When user "Brian" gets the following properties of folder "PARENT (2)/CHILD" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Brian" should match "<result>"
When user "Brian" gets the following properties of folder "PARENT (2)/parent.txt" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Brian" should match "<result>"
Examples:
| dav-path | timeout | result |
| old | second-999 | /Second-\d{3}$/ |
| old | second-99999999 | /Second-\d{5}$/ |
| old | infinite | /Second-\d{5}$/ |
| old | second--1 | /Second-\d{5}$/ |
| old | second-0 | /Second-\d{4}$/ |
| new | second-999 | /Second-\d{3}$/ |
| new | second-99999999 | /Second-\d{5}$/ |
| new | infinite | /Second-\d{5}$/ |
| new | second--1 | /Second-\d{5}$/ |
| new | second-0 | /Second-\d{4}$/ |
@files_sharing-app-required
Scenario Outline: as share receiver set timeout on folder as owner check it
Given using <dav-path> DAV path
And user "Brian" has been created with default attributes and skeleton files
And user "Alice" has shared folder "PARENT" with user "Brian"
When user "Brian" locks folder "PARENT (2)" using the WebDAV API setting following properties
| lockscope | shared |
| timeout | <timeout> |
And user "Alice" gets the following properties of folder "PARENT" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/CHILD" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
When user "Alice" gets the following properties of folder "PARENT/parent.txt" using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
Examples:
| dav-path | timeout | result |
| old | second-999 | /Second-\d{3}$/ |
| old | second-99999999 | /Second-\d{5}$/ |
| old | infinite | /Second-\d{5}$/ |
| old | second--1 | /Second-\d{5}$/ |
| old | second-0 | /Second-\d{4}$/ |
| new | second-999 | /Second-\d{3}$/ |
| new | second-99999999 | /Second-\d{5}$/ |
| new | infinite | /Second-\d{5}$/ |
| new | second--1 | /Second-\d{5}$/ |
| new | second-0 | /Second-\d{4}$/ |
@files_sharing-app-required
Scenario Outline: as owner set timeout on folder as public check it
Given using <dav-path> DAV path
And user "Alice" has created a public link share of folder "PARENT"
When user "Alice" locks folder "PARENT" using the WebDAV API setting following properties
| lockscope | shared |
| timeout | <timeout> |
And the public gets the following properties of entry "/" in the last created public link using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
When the public gets the following properties of entry "/CHILD" in the last created public link using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
When the public gets the following properties of entry "/parent.txt" in the last created public link using the WebDAV API
| propertyName |
| d:lockdiscovery |
Then the value of the item "//d:timeout" in the response to user "Alice" should match "<result>"
Examples:
| dav-path | timeout | result |
| old | second-999 | /Second-\d{3}$/ |
| old | second-99999999 | /Second-\d{5}$/ |
| old | infinite | /Second-\d{5}$/ |
| old | second--1 | /Second-\d{5}$/ |
| old | second-0 | /Second-\d{4}$/ |
| new | second-999 | /Second-\d{3}$/ |
| new | second-99999999 | /Second-\d{5}$/ |
| new | infinite | /Second-\d{5}$/ |
| new | second--1 | /Second-\d{5}$/ |
| new | second-0 | /Second-\d{4}$/ |
| {
"pile_set_name": "Github"
} |
[preset00]
fRating=5.000000
fGammaAdj=2.000000
fDecay=0.970000
fVideoEchoZoom=2.000000
fVideoEchoAlpha=0.000000
nVideoEchoOrientation=0
nWaveMode=2
bAdditiveWaves=0
bWaveDots=0
bModWaveAlphaByVolume=0
bMaximizeWaveColor=1
bTexWrap=0
bDarkenCenter=0
bMotionVectorsOn=0
bRedBlueStereo=0
nMotionVectorsX=12
nMotionVectorsY=9
bBrighten=0
bDarken=0
bSolarize=0
bInvert=0
fWaveAlpha=1.958904
fWaveScale=0.550447
fWaveSmoothing=0.360000
fWaveParam=-0.500000
fModWaveAlphaStart=0.750000
fModWaveAlphaEnd=0.950000
fWarpAnimSpeed=1.000000
fWarpScale=1.000000
fZoomExponent=1.000000
fShader=0.000000
zoom=1.000000
rot=0.000000
cx=0.500000
cy=0.500000
dx=0.000000
dy=0.000000
warp=1.000000
sx=1.000000
sy=1.000000
wave_r=0.500000
wave_g=0.500000
wave_b=0.500000
wave_x=0.500000
wave_y=0.500000
ob_size=0.310000
ob_r=0.300000
ob_g=0.000000
ob_b=0.000000
ob_a=0.000000
ib_size=0.010000
ib_r=0.250000
ib_g=0.250000
ib_b=0.250000
ib_a=0.000000
per_frame_1=dx = dx - .001;
per_frame_2=dy = dy - .001;
per_frame_3=wave_r = wave_r + .5*sin(time*.666);
per_frame_4=wave_g = wave_g + .5*sin(time*.777);
per_frame_5=wave_b = wave_b + .5*sin(time*.888);
per_pixel_1=dx = if(above(y,.7),001.*asin((x-.5)*(y-.7)),dx);
per_pixel_2=dx = if(below(y,.3),001.*-asin((x-.5)*(y-.3)),dx);
per_pixel_3=dy = (y-.5)*.05;
per_pixel_4=warp = dx*50; | {
"pile_set_name": "Github"
} |
/*
JPC: An x86 PC Hardware Emulator for a pure Java Virtual Machine
Copyright (C) 2012-2013 Ian Preston
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Details (including contact information) can be found at:
jpc.sourceforge.net
or the developer website
sourceforge.net/projects/jpc/
End of licence header
*/
package org.jpc.emulator.execution.opcodes.vm;
import org.jpc.emulator.execution.*;
import org.jpc.emulator.execution.decoder.*;
import org.jpc.emulator.processor.*;
import org.jpc.emulator.processor.fpu64.*;
import static org.jpc.emulator.processor.Processor.*;
public class fbstp_Mt_mem extends Executable
{
final Pointer op1;
public fbstp_Mt_mem(int blockStart, int eip, int prefices, PeekableInputStream input)
{
super(blockStart, eip);
int modrm = input.readU8();
op1 = Modrm.getPointer(prefices, modrm, input);
}
public Branch execute(Processor cpu)
{
byte[] data = new byte[10];
long n = (long)Math.abs(cpu.fpu.ST(0));
long decade = 1;
for (int i = 0; i < 9; i++)
{
int val = (int) ((n % (decade * 10)) / decade);
byte b = (byte) val;
decade *= 10;
val = (int) ((n % (decade * 10)) / decade);
b |= (val << 4);
data[i] = b;
}
data[9] = (cpu.fpu.ST(0) < 0) ? (byte)0x80 : (byte)0x00;
op1.setF80(cpu, data);
cpu.fpu.pop();
return Branch.None;
}
public boolean isBranch()
{
return false;
}
public String toString()
{
return this.getClass().getName();
}
} | {
"pile_set_name": "Github"
} |
// (C) Copyright John Maddock 2005.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// The aim of this header is just to include <utility> but to do
// so in a way that does not result in recursive inclusion of
// the Boost TR1 components if boost/tr1/tr1/utility is in the
// include search path. We have to do this to avoid circular
// dependencies:
//
#ifndef BOOST_CONFIG_UTILITY
# define BOOST_CONFIG_UTILITY
# ifndef BOOST_TR1_NO_RECURSION
# define BOOST_TR1_NO_RECURSION
# define BOOST_CONFIG_NO_UTILITY_RECURSION
# endif
# include <utility>
# ifdef BOOST_CONFIG_NO_UTILITY_RECURSION
# undef BOOST_TR1_NO_RECURSION
# undef BOOST_CONFIG_NO_UTILITY_RECURSION
# endif
#endif
| {
"pile_set_name": "Github"
} |
#!/bin/bash
for i in *.go; do golint $i | grep -q . || echo $i; done > .linted
| {
"pile_set_name": "Github"
} |
/*
* Implementation of operations over local quota file
*/
#include <linux/fs.h>
#include <linux/slab.h>
#include <linux/quota.h>
#include <linux/quotaops.h>
#include <linux/module.h>
#include <cluster/masklog.h>
#include "ocfs2_fs.h"
#include "ocfs2.h"
#include "inode.h"
#include "alloc.h"
#include "file.h"
#include "buffer_head_io.h"
#include "journal.h"
#include "sysfile.h"
#include "dlmglue.h"
#include "quota.h"
#include "uptodate.h"
#include "super.h"
#include "ocfs2_trace.h"
/* Number of local quota structures per block */
static inline unsigned int ol_quota_entries_per_block(struct super_block *sb)
{
return ((sb->s_blocksize - OCFS2_QBLK_RESERVED_SPACE) /
sizeof(struct ocfs2_local_disk_dqblk));
}
/* Number of blocks with entries in one chunk */
static inline unsigned int ol_chunk_blocks(struct super_block *sb)
{
return ((sb->s_blocksize - sizeof(struct ocfs2_local_disk_chunk) -
OCFS2_QBLK_RESERVED_SPACE) << 3) /
ol_quota_entries_per_block(sb);
}
/* Number of entries in a chunk bitmap */
static unsigned int ol_chunk_entries(struct super_block *sb)
{
return ol_chunk_blocks(sb) * ol_quota_entries_per_block(sb);
}
/* Offset of the chunk in quota file */
static unsigned int ol_quota_chunk_block(struct super_block *sb, int c)
{
/* 1 block for local quota file info, 1 block per chunk for chunk info */
return 1 + (ol_chunk_blocks(sb) + 1) * c;
}
static unsigned int ol_dqblk_block(struct super_block *sb, int c, int off)
{
int epb = ol_quota_entries_per_block(sb);
return ol_quota_chunk_block(sb, c) + 1 + off / epb;
}
static unsigned int ol_dqblk_block_off(struct super_block *sb, int c, int off)
{
int epb = ol_quota_entries_per_block(sb);
return (off % epb) * sizeof(struct ocfs2_local_disk_dqblk);
}
/* Offset of the dquot structure in the quota file */
static loff_t ol_dqblk_off(struct super_block *sb, int c, int off)
{
return (ol_dqblk_block(sb, c, off) << sb->s_blocksize_bits) +
ol_dqblk_block_off(sb, c, off);
}
/* Compute block number from given offset */
static inline unsigned int ol_dqblk_file_block(struct super_block *sb, loff_t off)
{
return off >> sb->s_blocksize_bits;
}
static inline unsigned int ol_dqblk_block_offset(struct super_block *sb, loff_t off)
{
return off & ((1 << sb->s_blocksize_bits) - 1);
}
/* Compute offset in the chunk of a structure with the given offset */
static int ol_dqblk_chunk_off(struct super_block *sb, int c, loff_t off)
{
int epb = ol_quota_entries_per_block(sb);
return ((off >> sb->s_blocksize_bits) -
ol_quota_chunk_block(sb, c) - 1) * epb
+ ((unsigned int)(off & ((1 << sb->s_blocksize_bits) - 1))) /
sizeof(struct ocfs2_local_disk_dqblk);
}
/* Write bufferhead into the fs */
static int ocfs2_modify_bh(struct inode *inode, struct buffer_head *bh,
void (*modify)(struct buffer_head *, void *), void *private)
{
struct super_block *sb = inode->i_sb;
handle_t *handle;
int status;
handle = ocfs2_start_trans(OCFS2_SB(sb),
OCFS2_QUOTA_BLOCK_UPDATE_CREDITS);
if (IS_ERR(handle)) {
status = PTR_ERR(handle);
mlog_errno(status);
return status;
}
status = ocfs2_journal_access_dq(handle, INODE_CACHE(inode), bh,
OCFS2_JOURNAL_ACCESS_WRITE);
if (status < 0) {
mlog_errno(status);
ocfs2_commit_trans(OCFS2_SB(sb), handle);
return status;
}
lock_buffer(bh);
modify(bh, private);
unlock_buffer(bh);
ocfs2_journal_dirty(handle, bh);
status = ocfs2_commit_trans(OCFS2_SB(sb), handle);
if (status < 0) {
mlog_errno(status);
return status;
}
return 0;
}
/*
* Read quota block from a given logical offset.
*
* This function acquires ip_alloc_sem and thus it must not be called with a
* transaction started.
*/
static int ocfs2_read_quota_block(struct inode *inode, u64 v_block,
struct buffer_head **bh)
{
int rc = 0;
struct buffer_head *tmp = *bh;
if (i_size_read(inode) >> inode->i_sb->s_blocksize_bits <= v_block) {
ocfs2_error(inode->i_sb,
"Quota file %llu is probably corrupted! Requested "
"to read block %Lu but file has size only %Lu\n",
(unsigned long long)OCFS2_I(inode)->ip_blkno,
(unsigned long long)v_block,
(unsigned long long)i_size_read(inode));
return -EIO;
}
rc = ocfs2_read_virt_blocks(inode, v_block, 1, &tmp, 0,
ocfs2_validate_quota_block);
if (rc)
mlog_errno(rc);
/* If ocfs2_read_virt_blocks() got us a new bh, pass it up. */
if (!rc && !*bh)
*bh = tmp;
return rc;
}
/* Check whether we understand format of quota files */
static int ocfs2_local_check_quota_file(struct super_block *sb, int type)
{
unsigned int lmagics[MAXQUOTAS] = OCFS2_LOCAL_QMAGICS;
unsigned int lversions[MAXQUOTAS] = OCFS2_LOCAL_QVERSIONS;
unsigned int gmagics[MAXQUOTAS] = OCFS2_GLOBAL_QMAGICS;
unsigned int gversions[MAXQUOTAS] = OCFS2_GLOBAL_QVERSIONS;
unsigned int ino[MAXQUOTAS] = { USER_QUOTA_SYSTEM_INODE,
GROUP_QUOTA_SYSTEM_INODE };
struct buffer_head *bh = NULL;
struct inode *linode = sb_dqopt(sb)->files[type];
struct inode *ginode = NULL;
struct ocfs2_disk_dqheader *dqhead;
int status, ret = 0;
/* First check whether we understand local quota file */
status = ocfs2_read_quota_block(linode, 0, &bh);
if (status) {
mlog_errno(status);
mlog(ML_ERROR, "failed to read quota file header (type=%d)\n",
type);
goto out_err;
}
dqhead = (struct ocfs2_disk_dqheader *)(bh->b_data);
if (le32_to_cpu(dqhead->dqh_magic) != lmagics[type]) {
mlog(ML_ERROR, "quota file magic does not match (%u != %u),"
" type=%d\n", le32_to_cpu(dqhead->dqh_magic),
lmagics[type], type);
goto out_err;
}
if (le32_to_cpu(dqhead->dqh_version) != lversions[type]) {
mlog(ML_ERROR, "quota file version does not match (%u != %u),"
" type=%d\n", le32_to_cpu(dqhead->dqh_version),
lversions[type], type);
goto out_err;
}
brelse(bh);
bh = NULL;
/* Next check whether we understand global quota file */
ginode = ocfs2_get_system_file_inode(OCFS2_SB(sb), ino[type],
OCFS2_INVALID_SLOT);
if (!ginode) {
mlog(ML_ERROR, "cannot get global quota file inode "
"(type=%d)\n", type);
goto out_err;
}
/* Since the header is read only, we don't care about locking */
status = ocfs2_read_quota_block(ginode, 0, &bh);
if (status) {
mlog_errno(status);
mlog(ML_ERROR, "failed to read global quota file header "
"(type=%d)\n", type);
goto out_err;
}
dqhead = (struct ocfs2_disk_dqheader *)(bh->b_data);
if (le32_to_cpu(dqhead->dqh_magic) != gmagics[type]) {
mlog(ML_ERROR, "global quota file magic does not match "
"(%u != %u), type=%d\n",
le32_to_cpu(dqhead->dqh_magic), gmagics[type], type);
goto out_err;
}
if (le32_to_cpu(dqhead->dqh_version) != gversions[type]) {
mlog(ML_ERROR, "global quota file version does not match "
"(%u != %u), type=%d\n",
le32_to_cpu(dqhead->dqh_version), gversions[type],
type);
goto out_err;
}
ret = 1;
out_err:
brelse(bh);
iput(ginode);
return ret;
}
/* Release given list of quota file chunks */
static void ocfs2_release_local_quota_bitmaps(struct list_head *head)
{
struct ocfs2_quota_chunk *pos, *next;
list_for_each_entry_safe(pos, next, head, qc_chunk) {
list_del(&pos->qc_chunk);
brelse(pos->qc_headerbh);
kmem_cache_free(ocfs2_qf_chunk_cachep, pos);
}
}
/* Load quota bitmaps into memory */
static int ocfs2_load_local_quota_bitmaps(struct inode *inode,
struct ocfs2_local_disk_dqinfo *ldinfo,
struct list_head *head)
{
struct ocfs2_quota_chunk *newchunk;
int i, status;
INIT_LIST_HEAD(head);
for (i = 0; i < le32_to_cpu(ldinfo->dqi_chunks); i++) {
newchunk = kmem_cache_alloc(ocfs2_qf_chunk_cachep, GFP_NOFS);
if (!newchunk) {
ocfs2_release_local_quota_bitmaps(head);
return -ENOMEM;
}
newchunk->qc_num = i;
newchunk->qc_headerbh = NULL;
status = ocfs2_read_quota_block(inode,
ol_quota_chunk_block(inode->i_sb, i),
&newchunk->qc_headerbh);
if (status) {
mlog_errno(status);
kmem_cache_free(ocfs2_qf_chunk_cachep, newchunk);
ocfs2_release_local_quota_bitmaps(head);
return status;
}
list_add_tail(&newchunk->qc_chunk, head);
}
return 0;
}
static void olq_update_info(struct buffer_head *bh, void *private)
{
struct mem_dqinfo *info = private;
struct ocfs2_mem_dqinfo *oinfo = info->dqi_priv;
struct ocfs2_local_disk_dqinfo *ldinfo;
ldinfo = (struct ocfs2_local_disk_dqinfo *)(bh->b_data +
OCFS2_LOCAL_INFO_OFF);
spin_lock(&dq_data_lock);
ldinfo->dqi_flags = cpu_to_le32(info->dqi_flags & DQF_MASK);
ldinfo->dqi_chunks = cpu_to_le32(oinfo->dqi_chunks);
ldinfo->dqi_blocks = cpu_to_le32(oinfo->dqi_blocks);
spin_unlock(&dq_data_lock);
}
static int ocfs2_add_recovery_chunk(struct super_block *sb,
struct ocfs2_local_disk_chunk *dchunk,
int chunk,
struct list_head *head)
{
struct ocfs2_recovery_chunk *rc;
rc = kmalloc(sizeof(struct ocfs2_recovery_chunk), GFP_NOFS);
if (!rc)
return -ENOMEM;
rc->rc_chunk = chunk;
rc->rc_bitmap = kmalloc(sb->s_blocksize, GFP_NOFS);
if (!rc->rc_bitmap) {
kfree(rc);
return -ENOMEM;
}
memcpy(rc->rc_bitmap, dchunk->dqc_bitmap,
(ol_chunk_entries(sb) + 7) >> 3);
list_add_tail(&rc->rc_list, head);
return 0;
}
static void free_recovery_list(struct list_head *head)
{
struct ocfs2_recovery_chunk *next;
struct ocfs2_recovery_chunk *rchunk;
list_for_each_entry_safe(rchunk, next, head, rc_list) {
list_del(&rchunk->rc_list);
kfree(rchunk->rc_bitmap);
kfree(rchunk);
}
}
void ocfs2_free_quota_recovery(struct ocfs2_quota_recovery *rec)
{
int type;
for (type = 0; type < MAXQUOTAS; type++)
free_recovery_list(&(rec->r_list[type]));
kfree(rec);
}
/* Load entries in our quota file we have to recover*/
static int ocfs2_recovery_load_quota(struct inode *lqinode,
struct ocfs2_local_disk_dqinfo *ldinfo,
int type,
struct list_head *head)
{
struct super_block *sb = lqinode->i_sb;
struct buffer_head *hbh;
struct ocfs2_local_disk_chunk *dchunk;
int i, chunks = le32_to_cpu(ldinfo->dqi_chunks);
int status = 0;
for (i = 0; i < chunks; i++) {
hbh = NULL;
status = ocfs2_read_quota_block(lqinode,
ol_quota_chunk_block(sb, i),
&hbh);
if (status) {
mlog_errno(status);
break;
}
dchunk = (struct ocfs2_local_disk_chunk *)hbh->b_data;
if (le32_to_cpu(dchunk->dqc_free) < ol_chunk_entries(sb))
status = ocfs2_add_recovery_chunk(sb, dchunk, i, head);
brelse(hbh);
if (status < 0)
break;
}
if (status < 0)
free_recovery_list(head);
return status;
}
static struct ocfs2_quota_recovery *ocfs2_alloc_quota_recovery(void)
{
int type;
struct ocfs2_quota_recovery *rec;
rec = kmalloc(sizeof(struct ocfs2_quota_recovery), GFP_NOFS);
if (!rec)
return NULL;
for (type = 0; type < MAXQUOTAS; type++)
INIT_LIST_HEAD(&(rec->r_list[type]));
return rec;
}
/* Load information we need for quota recovery into memory */
struct ocfs2_quota_recovery *ocfs2_begin_quota_recovery(
struct ocfs2_super *osb,
int slot_num)
{
unsigned int feature[MAXQUOTAS] = { OCFS2_FEATURE_RO_COMPAT_USRQUOTA,
OCFS2_FEATURE_RO_COMPAT_GRPQUOTA};
unsigned int ino[MAXQUOTAS] = { LOCAL_USER_QUOTA_SYSTEM_INODE,
LOCAL_GROUP_QUOTA_SYSTEM_INODE };
struct super_block *sb = osb->sb;
struct ocfs2_local_disk_dqinfo *ldinfo;
struct inode *lqinode;
struct buffer_head *bh;
int type;
int status = 0;
struct ocfs2_quota_recovery *rec;
mlog(ML_NOTICE, "Beginning quota recovery in slot %u\n", slot_num);
rec = ocfs2_alloc_quota_recovery();
if (!rec)
return ERR_PTR(-ENOMEM);
/* First init... */
for (type = 0; type < MAXQUOTAS; type++) {
if (!OCFS2_HAS_RO_COMPAT_FEATURE(sb, feature[type]))
continue;
/* At this point, journal of the slot is already replayed so
* we can trust metadata and data of the quota file */
lqinode = ocfs2_get_system_file_inode(osb, ino[type], slot_num);
if (!lqinode) {
status = -ENOENT;
goto out;
}
status = ocfs2_inode_lock_full(lqinode, NULL, 1,
OCFS2_META_LOCK_RECOVERY);
if (status < 0) {
mlog_errno(status);
goto out_put;
}
/* Now read local header */
bh = NULL;
status = ocfs2_read_quota_block(lqinode, 0, &bh);
if (status) {
mlog_errno(status);
mlog(ML_ERROR, "failed to read quota file info header "
"(slot=%d type=%d)\n", slot_num, type);
goto out_lock;
}
ldinfo = (struct ocfs2_local_disk_dqinfo *)(bh->b_data +
OCFS2_LOCAL_INFO_OFF);
status = ocfs2_recovery_load_quota(lqinode, ldinfo, type,
&rec->r_list[type]);
brelse(bh);
out_lock:
ocfs2_inode_unlock(lqinode, 1);
out_put:
iput(lqinode);
if (status < 0)
break;
}
out:
if (status < 0) {
ocfs2_free_quota_recovery(rec);
rec = ERR_PTR(status);
}
return rec;
}
/* Sync changes in local quota file into global quota file and
* reinitialize local quota file.
* The function expects local quota file to be already locked and
* dqonoff_mutex locked. */
static int ocfs2_recover_local_quota_file(struct inode *lqinode,
int type,
struct ocfs2_quota_recovery *rec)
{
struct super_block *sb = lqinode->i_sb;
struct ocfs2_mem_dqinfo *oinfo = sb_dqinfo(sb, type)->dqi_priv;
struct ocfs2_local_disk_chunk *dchunk;
struct ocfs2_local_disk_dqblk *dqblk;
struct dquot *dquot;
handle_t *handle;
struct buffer_head *hbh = NULL, *qbh = NULL;
int status = 0;
int bit, chunk;
struct ocfs2_recovery_chunk *rchunk, *next;
qsize_t spacechange, inodechange;
trace_ocfs2_recover_local_quota_file((unsigned long)lqinode->i_ino, type);
list_for_each_entry_safe(rchunk, next, &(rec->r_list[type]), rc_list) {
chunk = rchunk->rc_chunk;
hbh = NULL;
status = ocfs2_read_quota_block(lqinode,
ol_quota_chunk_block(sb, chunk),
&hbh);
if (status) {
mlog_errno(status);
break;
}
dchunk = (struct ocfs2_local_disk_chunk *)hbh->b_data;
for_each_set_bit(bit, rchunk->rc_bitmap, ol_chunk_entries(sb)) {
qbh = NULL;
status = ocfs2_read_quota_block(lqinode,
ol_dqblk_block(sb, chunk, bit),
&qbh);
if (status) {
mlog_errno(status);
break;
}
dqblk = (struct ocfs2_local_disk_dqblk *)(qbh->b_data +
ol_dqblk_block_off(sb, chunk, bit));
dquot = dqget(sb, le64_to_cpu(dqblk->dqb_id), type);
if (!dquot) {
status = -EIO;
mlog(ML_ERROR, "Failed to get quota structure "
"for id %u, type %d. Cannot finish quota "
"file recovery.\n",
(unsigned)le64_to_cpu(dqblk->dqb_id),
type);
goto out_put_bh;
}
status = ocfs2_lock_global_qf(oinfo, 1);
if (status < 0) {
mlog_errno(status);
goto out_put_dquot;
}
handle = ocfs2_start_trans(OCFS2_SB(sb),
OCFS2_QSYNC_CREDITS);
if (IS_ERR(handle)) {
status = PTR_ERR(handle);
mlog_errno(status);
goto out_drop_lock;
}
mutex_lock(&sb_dqopt(sb)->dqio_mutex);
spin_lock(&dq_data_lock);
/* Add usage from quota entry into quota changes
* of our node. Auxiliary variables are important
* due to signedness */
spacechange = le64_to_cpu(dqblk->dqb_spacemod);
inodechange = le64_to_cpu(dqblk->dqb_inodemod);
dquot->dq_dqb.dqb_curspace += spacechange;
dquot->dq_dqb.dqb_curinodes += inodechange;
spin_unlock(&dq_data_lock);
/* We want to drop reference held by the crashed
* node. Since we have our own reference we know
* global structure actually won't be freed. */
status = ocfs2_global_release_dquot(dquot);
if (status < 0) {
mlog_errno(status);
goto out_commit;
}
/* Release local quota file entry */
status = ocfs2_journal_access_dq(handle,
INODE_CACHE(lqinode),
qbh, OCFS2_JOURNAL_ACCESS_WRITE);
if (status < 0) {
mlog_errno(status);
goto out_commit;
}
lock_buffer(qbh);
WARN_ON(!ocfs2_test_bit(bit, dchunk->dqc_bitmap));
ocfs2_clear_bit(bit, dchunk->dqc_bitmap);
le32_add_cpu(&dchunk->dqc_free, 1);
unlock_buffer(qbh);
ocfs2_journal_dirty(handle, qbh);
out_commit:
mutex_unlock(&sb_dqopt(sb)->dqio_mutex);
ocfs2_commit_trans(OCFS2_SB(sb), handle);
out_drop_lock:
ocfs2_unlock_global_qf(oinfo, 1);
out_put_dquot:
dqput(dquot);
out_put_bh:
brelse(qbh);
if (status < 0)
break;
}
brelse(hbh);
list_del(&rchunk->rc_list);
kfree(rchunk->rc_bitmap);
kfree(rchunk);
if (status < 0)
break;
}
if (status < 0)
free_recovery_list(&(rec->r_list[type]));
if (status)
mlog_errno(status);
return status;
}
/* Recover local quota files for given node different from us */
int ocfs2_finish_quota_recovery(struct ocfs2_super *osb,
struct ocfs2_quota_recovery *rec,
int slot_num)
{
unsigned int ino[MAXQUOTAS] = { LOCAL_USER_QUOTA_SYSTEM_INODE,
LOCAL_GROUP_QUOTA_SYSTEM_INODE };
struct super_block *sb = osb->sb;
struct ocfs2_local_disk_dqinfo *ldinfo;
struct buffer_head *bh;
handle_t *handle;
int type;
int status = 0;
struct inode *lqinode;
unsigned int flags;
mlog(ML_NOTICE, "Finishing quota recovery in slot %u\n", slot_num);
mutex_lock(&sb_dqopt(sb)->dqonoff_mutex);
for (type = 0; type < MAXQUOTAS; type++) {
if (list_empty(&(rec->r_list[type])))
continue;
trace_ocfs2_finish_quota_recovery(slot_num);
lqinode = ocfs2_get_system_file_inode(osb, ino[type], slot_num);
if (!lqinode) {
status = -ENOENT;
goto out;
}
status = ocfs2_inode_lock_full(lqinode, NULL, 1,
OCFS2_META_LOCK_NOQUEUE);
/* Someone else is holding the lock? Then he must be
* doing the recovery. Just skip the file... */
if (status == -EAGAIN) {
mlog(ML_NOTICE, "skipping quota recovery for slot %d "
"because quota file is locked.\n", slot_num);
status = 0;
goto out_put;
} else if (status < 0) {
mlog_errno(status);
goto out_put;
}
/* Now read local header */
bh = NULL;
status = ocfs2_read_quota_block(lqinode, 0, &bh);
if (status) {
mlog_errno(status);
mlog(ML_ERROR, "failed to read quota file info header "
"(slot=%d type=%d)\n", slot_num, type);
goto out_lock;
}
ldinfo = (struct ocfs2_local_disk_dqinfo *)(bh->b_data +
OCFS2_LOCAL_INFO_OFF);
/* Is recovery still needed? */
flags = le32_to_cpu(ldinfo->dqi_flags);
if (!(flags & OLQF_CLEAN))
status = ocfs2_recover_local_quota_file(lqinode,
type,
rec);
/* We don't want to mark file as clean when it is actually
* active */
if (slot_num == osb->slot_num)
goto out_bh;
/* Mark quota file as clean if we are recovering quota file of
* some other node. */
handle = ocfs2_start_trans(osb,
OCFS2_LOCAL_QINFO_WRITE_CREDITS);
if (IS_ERR(handle)) {
status = PTR_ERR(handle);
mlog_errno(status);
goto out_bh;
}
status = ocfs2_journal_access_dq(handle, INODE_CACHE(lqinode),
bh,
OCFS2_JOURNAL_ACCESS_WRITE);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
lock_buffer(bh);
ldinfo->dqi_flags = cpu_to_le32(flags | OLQF_CLEAN);
unlock_buffer(bh);
ocfs2_journal_dirty(handle, bh);
out_trans:
ocfs2_commit_trans(osb, handle);
out_bh:
brelse(bh);
out_lock:
ocfs2_inode_unlock(lqinode, 1);
out_put:
iput(lqinode);
if (status < 0)
break;
}
out:
mutex_unlock(&sb_dqopt(sb)->dqonoff_mutex);
kfree(rec);
return status;
}
/* Read information header from quota file */
static int ocfs2_local_read_info(struct super_block *sb, int type)
{
struct ocfs2_local_disk_dqinfo *ldinfo;
struct mem_dqinfo *info = sb_dqinfo(sb, type);
struct ocfs2_mem_dqinfo *oinfo;
struct inode *lqinode = sb_dqopt(sb)->files[type];
int status;
struct buffer_head *bh = NULL;
struct ocfs2_quota_recovery *rec;
int locked = 0;
/* We don't need the lock and we have to acquire quota file locks
* which will later depend on this lock */
mutex_unlock(&sb_dqopt(sb)->dqio_mutex);
info->dqi_maxblimit = 0x7fffffffffffffffLL;
info->dqi_maxilimit = 0x7fffffffffffffffLL;
oinfo = kmalloc(sizeof(struct ocfs2_mem_dqinfo), GFP_NOFS);
if (!oinfo) {
mlog(ML_ERROR, "failed to allocate memory for ocfs2 quota"
" info.");
goto out_err;
}
info->dqi_priv = oinfo;
oinfo->dqi_type = type;
INIT_LIST_HEAD(&oinfo->dqi_chunk);
oinfo->dqi_rec = NULL;
oinfo->dqi_lqi_bh = NULL;
oinfo->dqi_libh = NULL;
status = ocfs2_global_read_info(sb, type);
if (status < 0)
goto out_err;
status = ocfs2_inode_lock(lqinode, &oinfo->dqi_lqi_bh, 1);
if (status < 0) {
mlog_errno(status);
goto out_err;
}
locked = 1;
/* Now read local header */
status = ocfs2_read_quota_block(lqinode, 0, &bh);
if (status) {
mlog_errno(status);
mlog(ML_ERROR, "failed to read quota file info header "
"(type=%d)\n", type);
goto out_err;
}
ldinfo = (struct ocfs2_local_disk_dqinfo *)(bh->b_data +
OCFS2_LOCAL_INFO_OFF);
info->dqi_flags = le32_to_cpu(ldinfo->dqi_flags);
oinfo->dqi_chunks = le32_to_cpu(ldinfo->dqi_chunks);
oinfo->dqi_blocks = le32_to_cpu(ldinfo->dqi_blocks);
oinfo->dqi_libh = bh;
/* We crashed when using local quota file? */
if (!(info->dqi_flags & OLQF_CLEAN)) {
rec = OCFS2_SB(sb)->quota_rec;
if (!rec) {
rec = ocfs2_alloc_quota_recovery();
if (!rec) {
status = -ENOMEM;
mlog_errno(status);
goto out_err;
}
OCFS2_SB(sb)->quota_rec = rec;
}
status = ocfs2_recovery_load_quota(lqinode, ldinfo, type,
&rec->r_list[type]);
if (status < 0) {
mlog_errno(status);
goto out_err;
}
}
status = ocfs2_load_local_quota_bitmaps(lqinode,
ldinfo,
&oinfo->dqi_chunk);
if (status < 0) {
mlog_errno(status);
goto out_err;
}
/* Now mark quota file as used */
info->dqi_flags &= ~OLQF_CLEAN;
status = ocfs2_modify_bh(lqinode, bh, olq_update_info, info);
if (status < 0) {
mlog_errno(status);
goto out_err;
}
mutex_lock(&sb_dqopt(sb)->dqio_mutex);
return 0;
out_err:
if (oinfo) {
iput(oinfo->dqi_gqinode);
ocfs2_simple_drop_lockres(OCFS2_SB(sb), &oinfo->dqi_gqlock);
ocfs2_lock_res_free(&oinfo->dqi_gqlock);
brelse(oinfo->dqi_lqi_bh);
if (locked)
ocfs2_inode_unlock(lqinode, 1);
ocfs2_release_local_quota_bitmaps(&oinfo->dqi_chunk);
kfree(oinfo);
}
brelse(bh);
mutex_lock(&sb_dqopt(sb)->dqio_mutex);
return -1;
}
/* Write local info to quota file */
static int ocfs2_local_write_info(struct super_block *sb, int type)
{
struct mem_dqinfo *info = sb_dqinfo(sb, type);
struct buffer_head *bh = ((struct ocfs2_mem_dqinfo *)info->dqi_priv)
->dqi_libh;
int status;
status = ocfs2_modify_bh(sb_dqopt(sb)->files[type], bh, olq_update_info,
info);
if (status < 0) {
mlog_errno(status);
return -1;
}
return 0;
}
/* Release info from memory */
static int ocfs2_local_free_info(struct super_block *sb, int type)
{
struct mem_dqinfo *info = sb_dqinfo(sb, type);
struct ocfs2_mem_dqinfo *oinfo = info->dqi_priv;
struct ocfs2_quota_chunk *chunk;
struct ocfs2_local_disk_chunk *dchunk;
int mark_clean = 1, len;
int status;
iput(oinfo->dqi_gqinode);
ocfs2_simple_drop_lockres(OCFS2_SB(sb), &oinfo->dqi_gqlock);
ocfs2_lock_res_free(&oinfo->dqi_gqlock);
list_for_each_entry(chunk, &oinfo->dqi_chunk, qc_chunk) {
dchunk = (struct ocfs2_local_disk_chunk *)
(chunk->qc_headerbh->b_data);
if (chunk->qc_num < oinfo->dqi_chunks - 1) {
len = ol_chunk_entries(sb);
} else {
len = (oinfo->dqi_blocks -
ol_quota_chunk_block(sb, chunk->qc_num) - 1)
* ol_quota_entries_per_block(sb);
}
/* Not all entries free? Bug! */
if (le32_to_cpu(dchunk->dqc_free) != len) {
mlog(ML_ERROR, "releasing quota file with used "
"entries (type=%d)\n", type);
mark_clean = 0;
}
}
ocfs2_release_local_quota_bitmaps(&oinfo->dqi_chunk);
/* dqonoff_mutex protects us against racing with recovery thread... */
if (oinfo->dqi_rec) {
ocfs2_free_quota_recovery(oinfo->dqi_rec);
mark_clean = 0;
}
if (!mark_clean)
goto out;
/* Mark local file as clean */
info->dqi_flags |= OLQF_CLEAN;
status = ocfs2_modify_bh(sb_dqopt(sb)->files[type],
oinfo->dqi_libh,
olq_update_info,
info);
if (status < 0) {
mlog_errno(status);
goto out;
}
out:
ocfs2_inode_unlock(sb_dqopt(sb)->files[type], 1);
brelse(oinfo->dqi_libh);
brelse(oinfo->dqi_lqi_bh);
kfree(oinfo);
return 0;
}
static void olq_set_dquot(struct buffer_head *bh, void *private)
{
struct ocfs2_dquot *od = private;
struct ocfs2_local_disk_dqblk *dqblk;
struct super_block *sb = od->dq_dquot.dq_sb;
dqblk = (struct ocfs2_local_disk_dqblk *)(bh->b_data
+ ol_dqblk_block_offset(sb, od->dq_local_off));
dqblk->dqb_id = cpu_to_le64(od->dq_dquot.dq_id);
spin_lock(&dq_data_lock);
dqblk->dqb_spacemod = cpu_to_le64(od->dq_dquot.dq_dqb.dqb_curspace -
od->dq_origspace);
dqblk->dqb_inodemod = cpu_to_le64(od->dq_dquot.dq_dqb.dqb_curinodes -
od->dq_originodes);
spin_unlock(&dq_data_lock);
trace_olq_set_dquot(
(unsigned long long)le64_to_cpu(dqblk->dqb_spacemod),
(unsigned long long)le64_to_cpu(dqblk->dqb_inodemod),
od->dq_dquot.dq_id);
}
/* Write dquot to local quota file */
int ocfs2_local_write_dquot(struct dquot *dquot)
{
struct super_block *sb = dquot->dq_sb;
struct ocfs2_dquot *od = OCFS2_DQUOT(dquot);
struct buffer_head *bh;
struct inode *lqinode = sb_dqopt(sb)->files[dquot->dq_type];
int status;
status = ocfs2_read_quota_phys_block(lqinode, od->dq_local_phys_blk,
&bh);
if (status) {
mlog_errno(status);
goto out;
}
status = ocfs2_modify_bh(lqinode, bh, olq_set_dquot, od);
if (status < 0) {
mlog_errno(status);
goto out;
}
out:
brelse(bh);
return status;
}
/* Find free entry in local quota file */
static struct ocfs2_quota_chunk *ocfs2_find_free_entry(struct super_block *sb,
int type,
int *offset)
{
struct mem_dqinfo *info = sb_dqinfo(sb, type);
struct ocfs2_mem_dqinfo *oinfo = info->dqi_priv;
struct ocfs2_quota_chunk *chunk;
struct ocfs2_local_disk_chunk *dchunk;
int found = 0, len;
list_for_each_entry(chunk, &oinfo->dqi_chunk, qc_chunk) {
dchunk = (struct ocfs2_local_disk_chunk *)
chunk->qc_headerbh->b_data;
if (le32_to_cpu(dchunk->dqc_free) > 0) {
found = 1;
break;
}
}
if (!found)
return NULL;
if (chunk->qc_num < oinfo->dqi_chunks - 1) {
len = ol_chunk_entries(sb);
} else {
len = (oinfo->dqi_blocks -
ol_quota_chunk_block(sb, chunk->qc_num) - 1)
* ol_quota_entries_per_block(sb);
}
found = ocfs2_find_next_zero_bit(dchunk->dqc_bitmap, len, 0);
/* We failed? */
if (found == len) {
mlog(ML_ERROR, "Did not find empty entry in chunk %d with %u"
" entries free (type=%d)\n", chunk->qc_num,
le32_to_cpu(dchunk->dqc_free), type);
return ERR_PTR(-EIO);
}
*offset = found;
return chunk;
}
/* Add new chunk to the local quota file */
static struct ocfs2_quota_chunk *ocfs2_local_quota_add_chunk(
struct super_block *sb,
int type,
int *offset)
{
struct mem_dqinfo *info = sb_dqinfo(sb, type);
struct ocfs2_mem_dqinfo *oinfo = info->dqi_priv;
struct inode *lqinode = sb_dqopt(sb)->files[type];
struct ocfs2_quota_chunk *chunk = NULL;
struct ocfs2_local_disk_chunk *dchunk;
int status;
handle_t *handle;
struct buffer_head *bh = NULL, *dbh = NULL;
u64 p_blkno;
/* We are protected by dqio_sem so no locking needed */
status = ocfs2_extend_no_holes(lqinode, NULL,
lqinode->i_size + 2 * sb->s_blocksize,
lqinode->i_size);
if (status < 0) {
mlog_errno(status);
goto out;
}
status = ocfs2_simple_size_update(lqinode, oinfo->dqi_lqi_bh,
lqinode->i_size + 2 * sb->s_blocksize);
if (status < 0) {
mlog_errno(status);
goto out;
}
chunk = kmem_cache_alloc(ocfs2_qf_chunk_cachep, GFP_NOFS);
if (!chunk) {
status = -ENOMEM;
mlog_errno(status);
goto out;
}
/* Local quota info and two new blocks we initialize */
handle = ocfs2_start_trans(OCFS2_SB(sb),
OCFS2_LOCAL_QINFO_WRITE_CREDITS +
2 * OCFS2_QUOTA_BLOCK_UPDATE_CREDITS);
if (IS_ERR(handle)) {
status = PTR_ERR(handle);
mlog_errno(status);
goto out;
}
/* Initialize chunk header */
status = ocfs2_extent_map_get_blocks(lqinode, oinfo->dqi_blocks,
&p_blkno, NULL, NULL);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
bh = sb_getblk(sb, p_blkno);
if (!bh) {
status = -ENOMEM;
mlog_errno(status);
goto out_trans;
}
dchunk = (struct ocfs2_local_disk_chunk *)bh->b_data;
ocfs2_set_new_buffer_uptodate(INODE_CACHE(lqinode), bh);
status = ocfs2_journal_access_dq(handle, INODE_CACHE(lqinode), bh,
OCFS2_JOURNAL_ACCESS_CREATE);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
lock_buffer(bh);
dchunk->dqc_free = cpu_to_le32(ol_quota_entries_per_block(sb));
memset(dchunk->dqc_bitmap, 0,
sb->s_blocksize - sizeof(struct ocfs2_local_disk_chunk) -
OCFS2_QBLK_RESERVED_SPACE);
unlock_buffer(bh);
ocfs2_journal_dirty(handle, bh);
/* Initialize new block with structures */
status = ocfs2_extent_map_get_blocks(lqinode, oinfo->dqi_blocks + 1,
&p_blkno, NULL, NULL);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
dbh = sb_getblk(sb, p_blkno);
if (!dbh) {
status = -ENOMEM;
mlog_errno(status);
goto out_trans;
}
ocfs2_set_new_buffer_uptodate(INODE_CACHE(lqinode), dbh);
status = ocfs2_journal_access_dq(handle, INODE_CACHE(lqinode), dbh,
OCFS2_JOURNAL_ACCESS_CREATE);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
lock_buffer(dbh);
memset(dbh->b_data, 0, sb->s_blocksize - OCFS2_QBLK_RESERVED_SPACE);
unlock_buffer(dbh);
ocfs2_journal_dirty(handle, dbh);
/* Update local quotafile info */
oinfo->dqi_blocks += 2;
oinfo->dqi_chunks++;
status = ocfs2_local_write_info(sb, type);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
status = ocfs2_commit_trans(OCFS2_SB(sb), handle);
if (status < 0) {
mlog_errno(status);
goto out;
}
list_add_tail(&chunk->qc_chunk, &oinfo->dqi_chunk);
chunk->qc_num = list_entry(chunk->qc_chunk.prev,
struct ocfs2_quota_chunk,
qc_chunk)->qc_num + 1;
chunk->qc_headerbh = bh;
*offset = 0;
return chunk;
out_trans:
ocfs2_commit_trans(OCFS2_SB(sb), handle);
out:
brelse(bh);
brelse(dbh);
kmem_cache_free(ocfs2_qf_chunk_cachep, chunk);
return ERR_PTR(status);
}
/* Find free entry in local quota file */
static struct ocfs2_quota_chunk *ocfs2_extend_local_quota_file(
struct super_block *sb,
int type,
int *offset)
{
struct mem_dqinfo *info = sb_dqinfo(sb, type);
struct ocfs2_mem_dqinfo *oinfo = info->dqi_priv;
struct ocfs2_quota_chunk *chunk;
struct inode *lqinode = sb_dqopt(sb)->files[type];
struct ocfs2_local_disk_chunk *dchunk;
int epb = ol_quota_entries_per_block(sb);
unsigned int chunk_blocks;
struct buffer_head *bh;
u64 p_blkno;
int status;
handle_t *handle;
if (list_empty(&oinfo->dqi_chunk))
return ocfs2_local_quota_add_chunk(sb, type, offset);
/* Is the last chunk full? */
chunk = list_entry(oinfo->dqi_chunk.prev,
struct ocfs2_quota_chunk, qc_chunk);
chunk_blocks = oinfo->dqi_blocks -
ol_quota_chunk_block(sb, chunk->qc_num) - 1;
if (ol_chunk_blocks(sb) == chunk_blocks)
return ocfs2_local_quota_add_chunk(sb, type, offset);
/* We are protected by dqio_sem so no locking needed */
status = ocfs2_extend_no_holes(lqinode, NULL,
lqinode->i_size + sb->s_blocksize,
lqinode->i_size);
if (status < 0) {
mlog_errno(status);
goto out;
}
status = ocfs2_simple_size_update(lqinode, oinfo->dqi_lqi_bh,
lqinode->i_size + sb->s_blocksize);
if (status < 0) {
mlog_errno(status);
goto out;
}
/* Get buffer from the just added block */
status = ocfs2_extent_map_get_blocks(lqinode, oinfo->dqi_blocks,
&p_blkno, NULL, NULL);
if (status < 0) {
mlog_errno(status);
goto out;
}
bh = sb_getblk(sb, p_blkno);
if (!bh) {
status = -ENOMEM;
mlog_errno(status);
goto out;
}
ocfs2_set_new_buffer_uptodate(INODE_CACHE(lqinode), bh);
/* Local quota info, chunk header and the new block we initialize */
handle = ocfs2_start_trans(OCFS2_SB(sb),
OCFS2_LOCAL_QINFO_WRITE_CREDITS +
2 * OCFS2_QUOTA_BLOCK_UPDATE_CREDITS);
if (IS_ERR(handle)) {
status = PTR_ERR(handle);
mlog_errno(status);
goto out;
}
/* Zero created block */
status = ocfs2_journal_access_dq(handle, INODE_CACHE(lqinode), bh,
OCFS2_JOURNAL_ACCESS_CREATE);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
lock_buffer(bh);
memset(bh->b_data, 0, sb->s_blocksize);
unlock_buffer(bh);
ocfs2_journal_dirty(handle, bh);
/* Update chunk header */
status = ocfs2_journal_access_dq(handle, INODE_CACHE(lqinode),
chunk->qc_headerbh,
OCFS2_JOURNAL_ACCESS_WRITE);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
dchunk = (struct ocfs2_local_disk_chunk *)chunk->qc_headerbh->b_data;
lock_buffer(chunk->qc_headerbh);
le32_add_cpu(&dchunk->dqc_free, ol_quota_entries_per_block(sb));
unlock_buffer(chunk->qc_headerbh);
ocfs2_journal_dirty(handle, chunk->qc_headerbh);
/* Update file header */
oinfo->dqi_blocks++;
status = ocfs2_local_write_info(sb, type);
if (status < 0) {
mlog_errno(status);
goto out_trans;
}
status = ocfs2_commit_trans(OCFS2_SB(sb), handle);
if (status < 0) {
mlog_errno(status);
goto out;
}
*offset = chunk_blocks * epb;
return chunk;
out_trans:
ocfs2_commit_trans(OCFS2_SB(sb), handle);
out:
return ERR_PTR(status);
}
static void olq_alloc_dquot(struct buffer_head *bh, void *private)
{
int *offset = private;
struct ocfs2_local_disk_chunk *dchunk;
dchunk = (struct ocfs2_local_disk_chunk *)bh->b_data;
ocfs2_set_bit(*offset, dchunk->dqc_bitmap);
le32_add_cpu(&dchunk->dqc_free, -1);
}
/* Create dquot in the local file for given id */
int ocfs2_create_local_dquot(struct dquot *dquot)
{
struct super_block *sb = dquot->dq_sb;
int type = dquot->dq_type;
struct inode *lqinode = sb_dqopt(sb)->files[type];
struct ocfs2_quota_chunk *chunk;
struct ocfs2_dquot *od = OCFS2_DQUOT(dquot);
int offset;
int status;
u64 pcount;
down_write(&OCFS2_I(lqinode)->ip_alloc_sem);
chunk = ocfs2_find_free_entry(sb, type, &offset);
if (!chunk) {
chunk = ocfs2_extend_local_quota_file(sb, type, &offset);
if (IS_ERR(chunk)) {
status = PTR_ERR(chunk);
goto out;
}
} else if (IS_ERR(chunk)) {
status = PTR_ERR(chunk);
goto out;
}
od->dq_local_off = ol_dqblk_off(sb, chunk->qc_num, offset);
od->dq_chunk = chunk;
status = ocfs2_extent_map_get_blocks(lqinode,
ol_dqblk_block(sb, chunk->qc_num, offset),
&od->dq_local_phys_blk,
&pcount,
NULL);
/* Initialize dquot structure on disk */
status = ocfs2_local_write_dquot(dquot);
if (status < 0) {
mlog_errno(status);
goto out;
}
/* Mark structure as allocated */
status = ocfs2_modify_bh(lqinode, chunk->qc_headerbh, olq_alloc_dquot,
&offset);
if (status < 0) {
mlog_errno(status);
goto out;
}
out:
up_write(&OCFS2_I(lqinode)->ip_alloc_sem);
return status;
}
/*
* Release dquot structure from local quota file. ocfs2_release_dquot() has
* already started a transaction and written all changes to global quota file
*/
int ocfs2_local_release_dquot(handle_t *handle, struct dquot *dquot)
{
int status;
int type = dquot->dq_type;
struct ocfs2_dquot *od = OCFS2_DQUOT(dquot);
struct super_block *sb = dquot->dq_sb;
struct ocfs2_local_disk_chunk *dchunk;
int offset;
status = ocfs2_journal_access_dq(handle,
INODE_CACHE(sb_dqopt(sb)->files[type]),
od->dq_chunk->qc_headerbh, OCFS2_JOURNAL_ACCESS_WRITE);
if (status < 0) {
mlog_errno(status);
goto out;
}
offset = ol_dqblk_chunk_off(sb, od->dq_chunk->qc_num,
od->dq_local_off);
dchunk = (struct ocfs2_local_disk_chunk *)
(od->dq_chunk->qc_headerbh->b_data);
/* Mark structure as freed */
lock_buffer(od->dq_chunk->qc_headerbh);
ocfs2_clear_bit(offset, dchunk->dqc_bitmap);
le32_add_cpu(&dchunk->dqc_free, 1);
unlock_buffer(od->dq_chunk->qc_headerbh);
ocfs2_journal_dirty(handle, od->dq_chunk->qc_headerbh);
out:
/* Clear the read bit so that next time someone uses this
* dquot he reads fresh info from disk and allocates local
* dquot structure */
clear_bit(DQ_READ_B, &dquot->dq_flags);
return status;
}
static const struct quota_format_ops ocfs2_format_ops = {
.check_quota_file = ocfs2_local_check_quota_file,
.read_file_info = ocfs2_local_read_info,
.write_file_info = ocfs2_global_write_info,
.free_file_info = ocfs2_local_free_info,
};
struct quota_format_type ocfs2_quota_format = {
.qf_fmt_id = QFMT_OCFS2,
.qf_ops = &ocfs2_format_ops,
.qf_owner = THIS_MODULE
};
| {
"pile_set_name": "Github"
} |
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer that changes filter(F, X) into list(filter(F, X)).
We avoid the transformation if the filter() call is directly contained
in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
for V in <>:.
NOTE: This is still not correct if the original code was depending on
filter(F, X) to return a string if X is a string and a tuple if X is a
tuple. That would require type inference, which we don't do. Let
Python 2.6 figure it out.
"""
# Local imports
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, ListComp, in_special_context
class FixFilter(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = """
filter_lambda=power<
'filter'
trailer<
'('
arglist<
lambdef< 'lambda'
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
>
','
it=any
>
')'
>
>
|
power<
'filter'
trailer< '(' arglist< none='None' ',' seq=any > ')' >
>
|
power<
'filter'
args=trailer< '(' [any] ')' >
>
"""
skip_on = "future_builtins.filter"
def transform(self, node, results):
if self.should_skip(node):
return
if "filter_lambda" in results:
new = ListComp(results.get("fp").clone(),
results.get("fp").clone(),
results.get("it").clone(),
results.get("xp").clone())
elif "none" in results:
new = ListComp(Name(u"_f"),
Name(u"_f"),
results["seq"].clone(),
Name(u"_f"))
else:
if in_special_context(node):
return None
new = node.clone()
new.prefix = u""
new = Call(Name(u"list"), [new])
new.prefix = node.prefix
return new
| {
"pile_set_name": "Github"
} |
##
## POP3 specific settings
##
protocol pop3 {
# Don't try to set mails non-recent or seen with POP3 sessions. This is
# mostly intended to reduce disk I/O. With maildir it doesn't move files
# from new/ to cur/, with mbox it doesn't write Status-header.
#pop3_no_flag_updates = no
# Support LAST command which exists in old POP3 specs, but has been removed
# from new ones. Some clients still wish to use this though. Enabling this
# makes RSET command clear all \Seen flags from messages.
#pop3_enable_last = no
# If mail has X-UIDL header, use it as the mail's UIDL.
#pop3_reuse_xuidl = no
# Keep the mailbox locked for the entire POP3 session.
#pop3_lock_session = no
# POP3 requires message sizes to be listed as if they had CR+LF linefeeds.
# Many POP3 servers violate this by returning the sizes with LF linefeeds,
# because it's faster to get. When this setting is enabled, Dovecot still
# tries to do the right thing first, but if that requires opening the
# message, it fallbacks to the easier (but incorrect) size.
#pop3_fast_size_lookups = no
# POP3 UIDL (unique mail identifier) format to use. You can use following
# variables, along with the variable modifiers described in
# doc/wiki/Variables.txt (e.g. %Uf for the filename in uppercase)
#
# %v - Mailbox's IMAP UIDVALIDITY
# %u - Mail's IMAP UID
# %m - MD5 sum of the mailbox headers in hex (mbox only)
# %f - filename (maildir only)
# %g - Mail's GUID
#
# If you want UIDL compatibility with other POP3 servers, use:
# UW's ipop3d : %08Xv%08Xu
# Courier : %f or %v-%u (both might be used simultaneosly)
# Cyrus (<= 2.1.3) : %u
# Cyrus (>= 2.1.4) : %v.%u
# Dovecot v0.99.x : %v.%u
# tpop3d : %Mf
#
# Note that Outlook 2003 seems to have problems with %v.%u format which was
# Dovecot's default, so if you're building a new server it would be a good
# idea to change this. %08Xu%08Xv should be pretty fail-safe.
#
#pop3_uidl_format = %08Xu%08Xv
# Permanently save UIDLs sent to POP3 clients, so pop3_uidl_format changes
# won't change those UIDLs. Currently this works only with Maildir.
#pop3_save_uidl = no
# What to do about duplicate UIDLs if they exist?
# allow: Show duplicates to clients.
# rename: Append a temporary -2, -3, etc. counter after the UIDL.
#pop3_uidl_duplicates = allow
# POP3 logout format string:
# %i - total number of bytes read from client
# %o - total number of bytes sent to client
# %t - number of TOP commands
# %p - number of bytes sent to client as a result of TOP command
# %r - number of RETR commands
# %b - number of bytes sent to client as a result of RETR command
# %d - number of deleted messages
# %m - number of messages (before deletion)
# %s - mailbox size in bytes (before deletion)
# %u - old/new UIDL hash. may help finding out if UIDLs changed unexpectedly
#pop3_logout_format = top=%t/%p, retr=%r/%b, del=%d/%m, size=%s
# Maximum number of POP3 connections allowed for a user from each IP address.
# NOTE: The username is compared case-sensitively.
#mail_max_userip_connections = 10
# Space separated list of plugins to load (default is global mail_plugins).
#mail_plugins = $mail_plugins
# Workarounds for various client bugs:
# outlook-no-nuls:
# Outlook and Outlook Express hang if mails contain NUL characters.
# This setting replaces them with 0x80 character.
# oe-ns-eoh:
# Outlook Express and Netscape Mail breaks if end of headers-line is
# missing. This option simply sends it if it's missing.
# The list is space-separated.
#pop3_client_workarounds =
}
| {
"pile_set_name": "Github"
} |
<frameset cols="50%,50%">
<frameset rows="20%,20%,20%,20%,20%">
<frame frameborder="0" scrolling="no" src="data:text/html,<body style='overflow:scroll'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="no" src="data:text/html,<body style='overflow:auto'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="no" src="data:text/html,<body style='overflow:hidden'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="no" src="data:text/html,<body style='overflow:visible'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="no" src="data:text/html,<body style='overflow:inherit'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
</frameset>
<frameset rows="20%,20%,20%,20%,20%">
<frame frameborder="0" scrolling="yes" src="data:text/html,<body style='overflow:scroll'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="yes" src="data:text/html,<body style='overflow:auto'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="yes" src="data:text/html,<body style='overflow:hidden'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="yes" src="data:text/html,<body style='overflow:visible'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
<frame frameborder="0" scrolling="yes" src="data:text/html,<body style='overflow:inherit'>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>X<br>">
</frameset>
</frameset>
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.