content
stringlengths 10
4.9M
|
---|
import fc from '../../src/fast-check';
import {
IncreaseCommand,
DecreaseCommand,
EvenCommand,
OddCommand,
CheckLessThanCommand,
} from './model/CounterCommands';
const testFunc = (value: unknown) => {
const repr = fc.stringify(value).replace(/^(|Big)(Int|Uint|Float)(8|16|32|64)(|Clamped)Array\.from\((.*)\)$/, '$5');
for (let idx = 1; idx < repr.length; ++idx) {
if (repr[idx - 1] === repr[idx] && repr[idx] !== '"' && repr[idx] !== '}') {
return false;
}
}
return true;
};
// Bumping from one patch of fast-check to another is not supposed
// to change the values that will be generated by the framework.
//
// Except in case of a real bug causing the arbitrary to be totally unusable.
//
// This suite checks this invariant stays true.
// Moreover, the framework should build consistent values throughout all the versions of node.
const settings = { seed: 42, verbose: 2 };
describe(`NoRegression`, () => {
it('.filter', () => {
expect(() =>
fc.assert(
fc.property(
fc.nat().filter((n) => n % 3 !== 0),
(v) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('.map', () => {
expect(() =>
fc.assert(
fc.property(
fc.nat().map((n) => String(n)),
(v) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('.chain', () => {
expect(() =>
fc.assert(
fc.property(
fc.nat(20).chain((n) => fc.clone(fc.nat(n), n)),
(v) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('float', () => {
expect(() =>
fc.assert(
fc.property(fc.float(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('floatNext', () => {
expect(() =>
fc.assert(
fc.property(fc.float({ next: true }), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('double', () => {
expect(() =>
fc.assert(
fc.property(fc.double(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('doubleNext', () => {
expect(() =>
fc.assert(
fc.property(fc.double({ next: true }), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('integer', () => {
expect(() =>
fc.assert(
fc.property(fc.integer(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('nat', () => {
expect(() =>
fc.assert(
fc.property(fc.nat(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('maxSafeInteger', () => {
expect(() =>
fc.assert(
fc.property(fc.maxSafeInteger(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('maxSafeNat', () => {
expect(() =>
fc.assert(
fc.property(fc.maxSafeNat(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('string', () => {
expect(() =>
fc.assert(
fc.property(fc.string(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('asciiString', () => {
expect(() =>
fc.assert(
fc.property(fc.asciiString(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
// // Jest Snapshot seems not to support incomplete surrogate pair correctly
// it('string16bits', () => {
// expect(() => fc.assert(fc.property(fc.string16bits(), v => testFunc(v + v)), settings)).toThrowErrorMatchingSnapshot();
// });
it('stringOf', () => {
expect(() =>
fc.assert(
fc.property(fc.stringOf(fc.constantFrom('a', 'b')), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('unicodeString', () => {
expect(() =>
fc.assert(
fc.property(fc.unicodeString(), (v) => testFunc(v + v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('fullUnicodeString', () => {
expect(() =>
fc.assert(
fc.property(fc.fullUnicodeString(), (v) => testFunc(v + v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('hexaString', () => {
expect(() =>
fc.assert(
fc.property(fc.hexaString(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('base64String', () => {
expect(() =>
fc.assert(
fc.property(fc.base64String(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('lorem', () => {
expect(() =>
fc.assert(
fc.property(fc.lorem(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('mapToConstant', () => {
expect(() =>
fc.assert(
fc.property(
fc.mapToConstant({ num: 26, build: (v) => String.fromCharCode(v + 0x61) }),
fc.mapToConstant({ num: 26, build: (v) => String.fromCharCode(v + 0x61) }),
(a, b) => testFunc(a + b)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('option', () => {
expect(() =>
fc.assert(
fc.property(fc.option(fc.nat()), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('oneof', () => {
expect(() =>
fc.assert(
fc.property(fc.oneof<any>(fc.nat(), fc.char()), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('frequency', () => {
expect(() =>
fc.assert(
fc.property(
fc.frequency<any>({ weight: 1, arbitrary: fc.nat() }, { weight: 5, arbitrary: fc.char() }),
testFunc
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('clone', () => {
expect(() =>
fc.assert(
fc.property(fc.clone(fc.nat(), 2), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('shuffledSubarray', () => {
expect(() =>
fc.assert(
fc.property(fc.shuffledSubarray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), (v) =>
testFunc(v.join(''))
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('subarray', () => {
expect(() =>
fc.assert(
fc.property(fc.subarray([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), (v) =>
testFunc(v.join(''))
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('array', () => {
expect(() =>
fc.assert(
fc.property(fc.array(fc.nat()), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('sparseArray', () => {
expect(() =>
fc.assert(
fc.property(
fc.sparseArray(fc.nat()),
(v) =>
// Sum of first element of each group should be less or equal to 10
// If a group starts at index 0, the whole group is ignored
Object.entries(v).reduce((acc, [index, cur]) => {
if (index === '0' || v[Number(index) - 1] !== undefined) return acc;
else return acc + cur;
}, 0) <= 10
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('sparseArray({noTrailingHole:true})', () => {
expect(() =>
fc.assert(
fc.property(
fc.sparseArray(fc.nat(), { noTrailingHole: true }),
(v) =>
// Sum of first element of each group should be less or equal to 10
// If a group starts at index 0, the whole group is ignored
Object.entries(v).reduce((acc, [index, cur]) => {
if (index === '0' || v[Number(index) - 1] !== undefined) return acc;
else return acc + cur;
}, 0) <= 10
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('infiniteStream', () => {
expect(() =>
fc.assert(
fc.property(fc.infiniteStream(fc.nat()), (s) => testFunc([...s.take(10)])),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('set', () => {
expect(() =>
fc.assert(
fc.property(fc.set(fc.nat()), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('tuple', () => {
expect(() =>
fc.assert(
fc.property(fc.tuple(fc.nat(), fc.nat()), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('int8Array', () => {
expect(() =>
fc.assert(
fc.property(fc.int8Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('uint8Array', () => {
expect(() =>
fc.assert(
fc.property(fc.uint8Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('uint8ClampedArray', () => {
expect(() =>
fc.assert(
fc.property(fc.uint8ClampedArray(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('int16Array', () => {
expect(() =>
fc.assert(
fc.property(fc.int16Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('uint16Array', () => {
expect(() =>
fc.assert(
fc.property(fc.uint16Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('int32Array', () => {
expect(() =>
fc.assert(
fc.property(fc.int32Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('uint32Array', () => {
expect(() =>
fc.assert(
fc.property(fc.uint32Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('float32Array', () => {
expect(() =>
fc.assert(
fc.property(fc.float32Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('float64Array', () => {
expect(() =>
fc.assert(
fc.property(fc.float64Array(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('record', () => {
expect(() =>
fc.assert(
fc.property(fc.record({ k1: fc.nat(), k2: fc.nat() }, { withDeletedKeys: true }), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('dictionary', () => {
expect(() =>
fc.assert(
fc.property(fc.dictionary(fc.string(), fc.nat()), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('anything', () => {
expect(() =>
fc.assert(
fc.property(fc.anything(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('object', () => {
expect(() =>
fc.assert(
fc.property(fc.object(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('json', () => {
expect(() =>
fc.assert(
fc.property(fc.json(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('jsonObject', () => {
expect(() =>
fc.assert(
fc.property(fc.jsonObject(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('unicodeJson', () => {
expect(() =>
fc.assert(
fc.property(fc.unicodeJson(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('unicodeJsonObject', () => {
expect(() =>
fc.assert(
fc.property(fc.unicodeJsonObject(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('compareFunc', () => {
expect(() =>
fc.assert(
fc.property(fc.compareFunc(), (f) => testFunc(f(1, 2))),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('func', () => {
expect(() =>
fc.assert(
fc.property(fc.func(fc.nat()), (f) => testFunc(f())),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('ipV4', () => {
expect(() =>
fc.assert(
fc.property(fc.ipV4(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('ipV4Extended', () => {
expect(() =>
fc.assert(
fc.property(fc.ipV4Extended(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('ipV6', () => {
expect(() =>
fc.assert(
fc.property(fc.ipV6(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('domain', () => {
expect(() =>
fc.assert(
fc.property(fc.domain(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('webAuthority', () => {
expect(() =>
fc.assert(
fc.property(fc.webAuthority(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('webSegment', () => {
expect(() =>
fc.assert(
fc.property(fc.webSegment(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('webFragments', () => {
expect(() =>
fc.assert(
fc.property(fc.webFragments(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('webQueryParameters', () => {
expect(() =>
fc.assert(
fc.property(fc.webQueryParameters(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('webUrl', () => {
expect(() =>
fc.assert(
fc.property(fc.webUrl(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('emailAddress', () => {
expect(() =>
fc.assert(
fc.property(fc.emailAddress(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('date', () => {
expect(() =>
fc.assert(
fc.property(fc.date(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('uuid', () => {
expect(() =>
fc.assert(
fc.property(fc.uuid(), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('uuidV', () => {
expect(() =>
fc.assert(
fc.property(fc.uuidV(4), (v) => testFunc(v)),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('letrec', () => {
expect(() =>
fc.assert(
fc.property(
fc.letrec((tie) => ({
// Trick to be able to shrink from node to leaf
tree: fc.nat(1).chain((id) => (id === 0 ? tie('leaf') : tie('node'))),
node: fc.record({ left: tie('tree'), right: tie('tree') }),
leaf: fc.nat(21),
})).tree,
(v) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('letrec (oneof:maxDepth)', () => {
expect(() =>
fc.assert(
fc.property(
fc.letrec((tie) => ({
tree: fc.oneof({ withCrossShrink: true, maxDepth: 2 }, tie('leaf'), tie('node')),
node: fc.record({ a: tie('tree'), b: tie('tree'), c: tie('tree') }),
leaf: fc.nat(21),
})).tree,
(v) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('letrec (oneof:depthFactor)', () => {
expect(() =>
fc.assert(
fc.property(
fc.letrec((tie) => ({
tree: fc.oneof({ withCrossShrink: true, depthFactor: 0.5 }, tie('leaf'), tie('node')),
node: fc.record({ a: tie('tree'), b: tie('tree'), c: tie('tree') }),
leaf: fc.nat(21),
})).tree,
(v) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('commands', () => {
expect(() =>
fc.assert(
fc.property(
fc.commands([
fc.nat().map((n) => new IncreaseCommand(n)),
fc.nat().map((n) => new DecreaseCommand(n)),
fc.constant(new EvenCommand()),
fc.constant(new OddCommand()),
fc.nat().map((n) => new CheckLessThanCommand(n + 1)),
]),
(cmds) => {
const setup = () => ({
model: { count: 0 },
real: {},
});
try {
fc.modelRun(setup, cmds);
return true;
} catch (err) {
return false;
}
}
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('scheduler', async () => {
await expect(
fc.assert(
fc.asyncProperty(fc.scheduler(), async (s) => {
const received = [] as string[];
for (const v of ['a', 'b', 'c']) {
s.schedule(Promise.resolve(v)).then((out) => {
received.push(out);
s.schedule(Promise.resolve(out.toUpperCase())).then((out2) => {
received.push(out2);
});
});
}
await s.waitAll();
return !received.join('').includes('aBc');
}),
settings
)
).rejects.toThrowErrorMatchingSnapshot();
});
it('context', () => {
expect(() =>
fc.assert(
fc.property(fc.context(), fc.nat(), (ctx, v) => {
ctx.log(`Value was ${v}`);
return testFunc(v);
}),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('Promise<number>', () => {
expect(() =>
fc.assert(
fc.property(
fc.integer().map((v) => [v, Promise.resolve(v)] as const),
([v, _p]) => testFunc(v)
),
settings
)
).toThrowErrorMatchingSnapshot();
});
it('user defined examples', () => {
expect(() =>
fc.assert(
fc.property(fc.string(), (v) => testFunc(v)),
{ ...settings, examples: [['hi'], ['hello'], ['hey']] }
)
).toThrowErrorMatchingSnapshot();
});
it('user defined examples (including not shrinkable values)', () => {
expect(() =>
fc.assert(
fc.property(
// Shrinkable: built-in
fc.nat(),
// Cannot shrinking: missing unmapper
fc.convertFromNext(fc.convertToNext(fc.nat()).map((v) => String(v))),
// Shrinkable: unmapper provided
fc.convertFromNext(
fc.convertToNext(fc.nat()).map(
(v) => String(v),
(v) => Number(v)
)
),
// Shrinkable: filter can shrink given the value to shrink matches the predicate
fc.nat().filter((v) => v % 2 === 0),
(a, b, c, d) => testFunc([a, b, c, d])
),
{
...settings,
examples: [
[1, '2', '3', 4],
[5, '6', '7', 8],
[9, '10', '11', 12],
[13, '14', '15', 16],
[17, '18', '19', 20],
],
}
)
).toThrowErrorMatchingSnapshot();
});
});
describe(`NoRegression (async)`, () => {
const asyncNumber = fc.integer().map((v) => Promise.resolve(v));
it('number', async () => {
await expect(
async () =>
await fc.assert(
fc.asyncProperty(fc.integer(), async (v) => testFunc(v)),
settings
)
).rejects.toThrowErrorMatchingSnapshot();
});
it('.map (to Promise)', async () => {
await expect(
async () =>
await fc.assert(
fc.asyncProperty(asyncNumber, async (v) => testFunc(await v)),
settings
)
).rejects.toThrowErrorMatchingSnapshot();
});
it('func (to Promise)', async () => {
await expect(
async () =>
await fc.assert(
fc.asyncProperty(fc.func(asyncNumber), async (f) => testFunc(await f())),
settings
)
).rejects.toThrowErrorMatchingSnapshot();
});
it('infiniteStream (to Promise)', async () => {
await expect(
async () =>
await fc.assert(
fc.asyncProperty(fc.infiniteStream(asyncNumber), async (s) => testFunc(await Promise.all([...s.take(10)]))),
settings
)
).rejects.toThrowErrorMatchingSnapshot();
});
});
|
Play up to 5 piece endgames against the computer. Get feedback on sub-optimal moves and receive an endgame rating. Over 10,000 positions, all taken from actual games. Train Endgames
News
June 12 2018 A new opening training feature has been added to the site. The opening trainer allows you to create one or more repertoires, and train them using a spaced repetition learning algorithm. The opening trainer integrates with the beta site online play feature and after each game played you get feedback on where the game deviated from your current repertoire. All membership levels have access to the opening trainer, with free members able to create repertoires with up to 20,000 moves. The trainer makes use of some existing premium features such as unlimited opening explorer and cloud engine access, which can be helpful in choosing new moves for your repertoire, however the core repertoire management and training features work fine for free members. For more details please read the opening traning blog post announcement and the opening training user guide section.
November 23 2017 We have introduced a new diamond membership level, and a new membership signup page with some important new features. The diamond membership level provides a way to offer features that members have requested, but are difficult to provide at gold membership pricing. The first diamond-only feature will be the ability to use multiple CPU threads when using chess engines for position analysis. Currently, each analysis is run on a single CPU thread. Diamond members will be able to run 8 threads at once, either with the same engine for deeper analysis, or across multiple engines to analyse up to 8 different positions at the same time. A total of 96 hours across all threads will be provided each day (12 hours per thread), with a maximum analysis time for any one thread of 36 hours. The higher priced Diamond membership will help fund the purchase of more engine cluster servers to provide the extra analysis power. More features will be added to diamond in the future as we roll out new features, however Gold and Silver memberships will retain all current features, and will continue to receive at least some of the benefits of new premium features in the future. New membership signup page features: Allows access to our new diamond membership level.
Support for credit card purchases without using Paypal (should work in countries that do not support Paypal).
Gifting memberships directly to other usernames.
Buying gift certificates for premium memberships which can be printed or emailed on to the recipient.
Group membership discounts by purchasing multiple memberships at once.
Upgrading from silver or gold membership to a higher level membership.
Extending the period of an existing active membership.
Bitcoin support integrated directly into main payment page.
July 5 2017 A new version of the Play online interface is now available on the Beta site. More details on this release can be read on the blog post announcing the update. We have also recently moved to a fully encypted site, with all pages now served over https.
November 29 2016 An initial release of the next version of Chesstempo is now available for beta testing. The beta site will be a complete rewrite of all site features, but for now we have tactics solving, and custom sets available for testing. The new site also includes a number of enhancements. You can read more details on the blog post announcing the features. A quick summary of the new features follows: Available to all users: Distraction free tactics solving mode. Solve in full screen with only the board displayed.
Graph and performance summary for current solving session.
Ability to set difficulty level per problem set type.
Automatic annotations for all rated games played on Chesstempo.
Use of markdown formatting in comments, allowing for structures such as heading/links/lists etc in comments.
"Reply to" button for comments. Replying to a comment will send a notification email to the recipient (at their option).
Easier access to tag descriptions from the tagging interface.
HTTPS support - all traffic on the new site is encrypted via HTTPS.
Premium member features: Cloud engines (Stockfish and Komodo) running on Chesstempo servers to replace the Java based UCI engine interface.
Tactics problems extracted from your games played on Chesstempo and placed in a personal problem set for solving.
Complex tactical motif matching, allowing tag matches like "(Fork AND (Pin OR Distraction)) AND NOT Discovered Attack".
Editing of custom sets after they have been created. Useful for tuning parameters like spaced repetition learning intervals or rating ranges after the set is made.
Ability to specify custom set rating adjustment algorithm, so custom sets can now use untimed, blitz or mixed rating adjustment methods.
Allow custom sets to optionally choose the "give me problems close to my rating" problem selection algorithm.
Advanced problem attempt history filter. Instead of just pre-chosen attempt criteria such as "Problems I always got wrong", you can now specify exact problem history criteria such as "Problem I got wrong more than 4 times and correct less than 2 times".
December 13 2015 A new training targets feature has been released. Training targets allow you to set targets for tactics, endgames, guess the move and online playing and have feedback provided on progress to completion. Targets can be given fixed time periods (e.g. solve 100 problems by 10th July) or recurring targets such as solve 100 problems each week. Targets can be based on number correct (or won), number completed, rating or rank/percentile. This feature is available to both premium and free members. Coaches can also assign training targets to their students, and get a summary of training target progress across all their students. Training targets can be created/deleted on the Training Targets page, which can be reached via the Training Targets option on the Training menu. The Training Targets section of the user guide has more detailed instructions on how to use the new feature. Chesstempo now has a large set of achievement badges that can be earnt by completing tasks on the site. The achievement badges are designed to provide training motivation beyond the simple measure of user rating. We believe this will be especially useful for younger chess players, although we hope adults will also enjoy the new achievements system. Your list of achievements can be found on the achievements tab on your stats page. A list of all available achievement badges can also be found there. You can also check your rank on the achievement points leaderboard. If you would prefer not to be bothered with achievement notifications while training or playing, they can be turned off in your preferences (on the "other" tab). The feature is available to all users, although some achievement types are restricted to premium members. Note that achievements are not retroactive, so achievements like all time highs or solving streaks are awarded based on future efforts made after the release of the achievement system, rather than achievements that may have been met before the system was in place. The Achievements section describes the new feature in more detail. Many new features have been added to the online play feature since it was launched, some of which have been available for some time now. The major additions since launch have been: Abandoned game detection - If a user abandons a game by disconnecting, or ceasing to play moves you will be given the option to claim a win or abort the game if they do not return to playing in a reasonable period time.
Create unrated games from arbitrary start positions - The advanced panel in the challenge create dialog can now create games with any start position you like.
Visibility of think times and clock changes while moving through move list on completed games. After the game is over you can move through the move list and the clocks will update to show the time they had at that point in the game. Hovering over each move in the move list will display a tooltip with the think time used for that move.
Guest play - Guests can now play without signing in to play, some restrictions apply such as not being able to chat, or create rated seeks (or see the rated seeks of other players).
Seek Formula - You can now define who can send you direct seeks, and under what conditions, using a seek formula.
Block list - Block users you do not want to play again, blocked users will not be able to send you seeks, answer your seeks or chat with you.
Seek Favourites - If you have a favourite seek type that is not one of the defaults you can enter the seek details and then save the seek as a favourite which will create a new button in the seek dialog that can be clicked on in the future. Start positions, rating ranges, opponent names can all be saved in the favourite.
Hide ratings mode - In your preferences you can turn on hide ratings mode which avoids display rating information in the game view while you are playing.
Chat disabling - Toggle chat on and off using the cross/tick icon in the chat window.
More diverse bot opening book. The bots now play a much wider range of openings due to using a much more diverse opening book compared to the original bots used when the site first launched.
Improved handling of aborts when user does not play first move.
Option to persistently turn on move confirmations in correspondence games.
January 14 2015 Chess Tempo is pleased to announce that the Play Chess Online feature has entered full public beta testing. All users can now play each other on Chess Tempo, with support for both correspondence and traditional online chess playing. During beta testing, all features will be available to all users. Playing will remain free after beta testing is complete, but some advanced features will be available only to premium members. The purpose of the public beta testing is to iron out any bugs not found during the closed beta testing, and to ask for feedback and improvement suggestions from a wider audience. Please provide any bug reports or improvement suggestions in the forum. The Play Online section of the user guide has instructions on how to use the new feature.
May 27 2014 An initial mobile version of the tactics solving is now available at Chesstempo mobile. This is an early test version, so please report any problems on the forum. It should work reasonably well in the latest IPhone and Android devices. If you have an older Android device, you can try installing a standalone Chrome or Firefox browser app, which should help work around bugs in the older Android browser. A new "Mixed" rating type has been added, it uses a new rating adjustment method which acts like the completely untimed standard mode if you solve under 5 minutes, and after that reverts to a blitz-like mode where time is part of the rating adjustment. The new mode also includes new non-winning problem types where the best move may leave you in an even position, and other moves would lead to a losing or lost position. Premium members can also create custom sets that explicitly include or exclude these non-winning type problems. As already announced on the forum, new Endgame custom sets have been created for Premium members. Premium members have always been able to create their own endgame type specific sets, but now they have access to pre-created sets for each endgame type in the problem set selector. So endgame sets such as "KRPvKR" can be be quickly chosen without having to create a new custom set.
August 21 2013 A new 'Guess The Move' feature has been released. This allows you to play through selected master games from one side, trying to play the best moves possible. Each move is scored based on how closely your moves match either the master move, or the best engine move where you play a different move to the master. Similar to the tactics , both moves and players are rated, allowing more reward (or less punishment) on difficult moves, and harsher penalties for missing easy moves. The Guess The Move user guide has a full set of instructions on how to use the feature. This new feature includes a modified tagging and commenting system, where tags and comments can be assigned to either the full game (by moving to the start of the game), or any move in the game (by moving to the board position after the move is played). The tags include a long list of new 'positional' tags, which will help categorise the types of positions and moves found throughout the games. I am hoping some of the stronger players on CT can help make this new feature even more useful, by providing some commentary at important points in the game ,but comments from all level of players are of course welcome. You can optionally skip the opening of any GTM game by clicking on the skip opening button under the board, but this must be done before you play any moves. This is mostly a Gold member feature, Silver members have a small selection of games available, and free members have a very limited selection, but enough to allow them to sample the feature. Games accessible to Gold members will continue to be added over time, the rate of the addition will be partly dictated by how quickly the ratings of the existing games stabilise (it will likely take several weeks for the initial games to begin to approach stable move ratings). Chess Tempo is now providing free Gold membership to players with either GM or IM FIDE titles. For those who qualify, please see the Free Membership section of the FAQ for details on how to receive your free Gold membership.
April 24 2013 A new version of the tactics solving page has entered into beta testing, you can access the beta interface via the Training menu, or directly at Tactics Beta Link. The new interface has the following improvements: After each problem is completed you can see a history of all your previous attempts on the problem (Gold members only).
Engine variations shown to premium members can be expanded, collapsed or completely hidden.
The opponent pre-move is now shown in the move list, and the move list is updated as each move is played while solving problems.
Board setting changes are now handled by clicking the settings icon in the top left of the board. Several new options, including figurine notation and new board styles are now available.
The board and move list can now both be resized by dragging the available drag handles.
Board and move list settings are remembered via cookies, which means you can now use different settings on different machines without having to switch back and forth in the preference settings. This is especially useful if you solve problems on different machines, with different screen sizes.
Premium users with access to the custom tagging feature (now called Personal Tags) have a tag management panel that allows personal tags to be both created and deleted. Personal tags also have a description associated with them, which allows a more detailed description of the tag to be added.
The 'Start Solving' panel now allows you to change the current problem set before your solving session begins. There is also a 'Change Set' button below the board, so you can change your current problem set during any solving session without going to your preferences.
There is a new analysis board which remembers the size of the analysis window. Click on the annotation icon, or press the 'a' key to add comments to your analysis. See the full list of keyboard commands available on the new boards in the PGN Viewer key bindings section of the user guide.
Resuming problems after a disconnect is now supported in Blitz mode, note that your clock will show any time elapsed since you were first given the problem.
Problem information sections (comments, tags, ratings etc) are now placed inside collapsible and expandable elements. This allows the comments to be moved closer to the board when required by collapsing the problem ratings details element.
The 'Choose Best Move(s)' advice at the top of the page can now be permanently hidden.
A 'Reset' session button has been added, which allows the current session stats to be cleared.
The wrong move played is now added to the 'Last Problem Failed' message. Please report any bugs in the new interface by posting in the Bug Reports. Note that the premium feature set of the new UI is not 100% the same as the old UI, as such, existing premium members will be able to choose the old UI for 1 year from today, after which the old UI will no longer be available.
August 8 2012 Chess Tempo has begun non-public testing of a new feature that allows users to play games against each other. Both correspondence and traditional online chess playing will be supported. It is hoped this provides a fun way to test out your skills against other players without leaving the site. In the future, the playing feature will become more closely integrated into the training features of the site, and will provide a way of giving statistically valid feedback on how your training is impacting your actual playing strength. We will be expanding the non-public beta testing soon, if you would like to be added to the list of testers, please email [email protected]. Work on the playing feature has taken up the majority of recent development time, however a number of small, but hopefully useful features have been added: Spaced repetition custom sets have been added for gold members. Spaced repetition is a memory learning method that allows you to maximise recall, while minimising learning time. You can read about it at the Wikipedia Spaced Repetition Article. Instructions on how to use the feature can be found in the Spaced Repetition Section of the User Guide.
Problem history filtering and summary stats allows Gold members to filter their problem attempt history table by a number of criteria such as date range, duplicate status, problem set, tag, solve time and rating range. Summary stats of the filtered results allow you to see stats for the filtered attempts such as performance rating, accuracy, average time taken, average problem rating, best and worst percentage rank, and others. Together the filter and summary stats allow you to answer questions such as how your performance has changed over time, not just in rating, but also in solve time and accuracy, and for specific tactical motifs or rating ranges, and for only those problems you have seen for the first time if desired.
As already announced in the forum, a new version of the PGN viewer is currently in testing, the board used in the new PGN Viewer will eventually be used in all Chess Tempo boards, so please help test the new board at the PGN Viewer BETA page.
A "Show Solution" button option now allows you to avoid seeing the solution immediately after a problem is complete, allowing you to continue to search for the best move after failing a problem, and eventually clicking the "Show Solution" button in order to see to answer. The option is off by default, and can be turned on in the "Other" tab in your preference settings.
Merged custom sets allow Gold members to merge several existing custom sets into one merged set. You can select the weights of each subset to control how many problems from each subset will be served. For example you could create three subsets Fork, Pin and Discovered Attack, asking for 70% forks, 20% pins and 10% discovered attacks. Another example would be a merged subset of mate and non-mate sets with say 80% non-mates and 20% mates, you could also merge a larger number of sets, and control of mates versus non-mates based on mate length. Problem attempt based sets can also be merged, for example, two sets for problems you have seen before and problems you have not seen before could be merged with say a 90% weight on new problems and 10% on old problems, allowing you to review old problems on average 1 in every 10 attempts. Essentially any existing user created custom sets can be merged (assuming all subsets are of the same problem type, i.e. you cannot merge tactic and endgame sets).
Custom sets with rated attempts (any custom set with attempts made after November 29th 2010) can now have graphs shown for rating history, problem rating distribution, and daily rating fluctuations. Gold members can access the graphs by clicking on the new view button shown on the custom set performance tables on the tactics and endgame stats tabs on each user's stats page.
Solve time custom set/problem search now allows you to specify if you want to include all unsolved problems, as well as those you have already solved. You can now also choose to search based only on your most recent attempt, allowing you to focus on problems you solved slowly on your most recent attempt, even if you had solved them quickly in the past.
Several new advanced problem search/custom set criteria have been added (for gold members only): Number of Pieces - The number of pieces on the board at the start of the problem. Colour to Move - The colour of the pieces controlled by the player solving the problem. Game Move Number - The move number in the source game at which the problem starts. Comment Search - Matches only problems with the supplied search string in one of their problem comments. For example "boden" would return problems where people had discussed Boden's mate (likely because the problem itself was of that type). Tag Vote Threshold - The minimum aggregate votes on a tag before it is counted for tag searches. Setting a higher number means more users had voted for the tag, but will also reduce the total number of matching problems. The number of pieces, colour to move and game move number search criteria can be quite useful when used with the opening name/ECO code search criteria, allowing you to choose tactics from games that used your opening of choice, and concentrate on the tactics that actually appeared in the opening phase of the game, rather than middle game/endgame positions that happened to arise out of the opening you searched for.
A beta release of a new student coaching feature has been added which allows users to nominate a coach, who then gets access to otherwise hidden stats, such as the students' per problem mistake moves, play versus computer stats, tagging stats and FIDE rating estimates. Coaches can also edit their students' preferences, including assigning custom problem sets to students. Coaches get a new tab on their stats page showing a list of their students, and summary of recent student accuracy. Coaches are also able to download problem history for their students, and can use the new problem history filtering on their students' problem attempts. To use the new coaching features, students must nominate their coach (or coaches) on the "Coaches" tab in their preference settings. To use these features, both the coach and the students must have active gold memberships. Several updates to the tags and tag descriptions have also been made since the last major update, including new tags for Avoiding Perpetual, Avoiding Stalemate, Coercion (Required to clarify Attraction), Counting, Defensive Move, Desperado, Mate Threat, Quiet Move and Unpinning. Please see the descriptions on Tactical Motifs page before applying these new tags. Some of definitions of the older tags have also been refined and clarified.
The forum has also recently added support for embedding PGN Viewers directly into forum posts by enclosing a PGN game description with [pgn][/pgn] tags.
29th November 2010 Chess Tempo now provides the ability to use chess engines running on your own machine to analyse positions and games on Chess Tempo. Any UCI engine such as Rybka, Stockfish etc can be used to supply analysis which is displayed directly in the Chess Database or Analysis board pages. Full access to this feature is available to Gold members only, you can view screen shots of the new analysis interface in action on the Membership descriptions page. The Engine Analysis section of the FAQ should be read before using the engine. There is also a new Engine Analysis Chapter in the user guide. Other changes already introduced since the last major update include: A new option to avoid problems with ALTS was added.
Performance graphs for play vs computer made available to gold members.
An Explore button was added to Play vs Computer page. This allows viewing games in the database page where opening explorer stats can be examined for the lines you played, and engine analysis and game annotation performed.
Custom problem sets now have personal ratings, allowing gold members to track future progress on previously unrated custom problem sets. For example you can get a personal rating for "Pin" or "Fork" sets.
Personal tagging stats have been added to help users understand the degree to which other users agree or disagree with their tags.
Alpha and "Good Companion" board piece styles now available. Another thanks to the volunteer translators and beta testers who have again helped greatly with this release. User "anti" is now kindly helping with the French translation. If you would like to help out with either an existing or new translation, please send an email to [email protected]
6th September 2010 Chess Tempo now has a new Chess Game Database. The database includes over 2 million games, including all the source games for the tactics and endgame problems. There is an opening explorer, to view statistics on the popularity and performance of opening moves. A player and openings search page are also available. The database can be searched via either a quick search or advanced search interface. Advanced search includes a material search and convenient endgame finder. Tactic and Endgame problems have convenient links to the source games the problems were extracted from. Custom problem sets can now include criteria based on matching the source game header information, such as player, player rating, opening name etc. The database is documented in a new user guide that covers most of the Chess Tempo features in detail. Many of the database features are available for all users, while some of the features such as material search, source game links and custom set integration are available only to Premium members, and some of those, only to Gold members. Please see the Membership descriptions page for details. This feature is still considered to be a public BETA release, please report any bugs you find in the Bug Report Forum. The ability to play rated games against a computer has also been added. Several computer opponents have been created, each with a different strength setting. The computer opponents are given ratings and the user is matched against the computer opponent closest to their rating. In unrated mode, users can choose the target rating of their computer opponent and the closest matching opponent will be chosen. Premium users can launch the play vs computer feature from database and problem pages in order to play positions against the computer. More details can be found in the Chess Tempo User Guide. The Play versus Computer feature is also still in public BETA stage, bug reports are welcome. You need to have Java installed on your machine to be able to use this feature. In order to fit more items into the navigation menu, a new navigation menu has been added to the top of all pages. Other changes introduced recently or as part of this release include: Changes to tag votes providing more visibility of voting patterns, and more convenient voting on existing tags.
A Chess Tempo puzzle widget.
A warning message is now shown to those users who are still using older browsers that may not work well on Chess Tempo. Logged in users can hide the warning permanently if desired.
Click and Click piece movement support has been enabled. It can be used simultaneously with Drag and Drop, but either Click and Click or Drag and Drop can be disabled in the preferences.
Piece movement sounds have been added to boards, they are turned off by default, but can be turned on in the preferences. The sounds are only available on the latest versions of Firefox, Google Chrome and Safari browsers. A final thank you to the volunteer translators and beta testers who helped with this release. If you would like to help out with either an existing or new translation, please send an email to [email protected]
10th May 2010 An analysis board is now available, it allows exploring different lines in a position after a problem is complete. The analysis board also allows your analysis to be copied directly to a new problem comment. The analysis moves (and annotations) are shown in problem comments, accompanied by a mini-board on which the position is displayed, and the moves can be played. Your own comments can now be deleted or edited (as long as they are the most recent comment to be made on a problem). Comments can also be voted up or down, comments with several down votes will be hidden from the default comment view, and users will need to click the "show all" button to see these comments. Please read the comment voting section of the Problem Comment Guidelines before using the voting system. For further details on these changes, and how to use them, please see the Analysis Board and Problem Commenting Questions section of the FAQ. Several new custom problem set filtering options have been added. The ability to control the order problems are served from custom sets has also been added, allowing for the creation of custom problem sets that progress in difficulty as you go through the set. You can read more about these in the Advanced Custom Set Filters and Custom Problem Set Sorting sections of the membership description page. The advanced filtering and sorting features are only available to Gold members.
24th November 2009 Several new translations have been added to the site. Dutch (by phdevos)
French (by arna, antoyo and najull)
Italian (by andreacoda and igrino)
Polish (by alefzero and szarlej)
Portuguese (by alvarofrota and dahora)
Spanish (by tornado)
Turkish (by philiposi) A big thank you to those who dedicated many hours to help with the translations, including those who helped with testing. You can access language specific pages via nl.chesstempo.com, fr.chesstempo.com, it.chesstempo.com, pl.chesstempo.com, pt.chesstempo.com, es.chesstempo.com and tr.chesstempo.com. In most cases these URLs will not be required as the site should choose the correct language automatically. If you find a bug in a translation, or if you would like to help translate ChessTempo into your language, either send an e-mail to [email protected] or post in the appropriate thread in the translation forum. The preferred language of problem comments and forum posts is still English, however new non-English forums have been added for users who would like to post in their native language. A new problem generator has also been completed. Improvements include: Computer analysis is now shown for the best opponent moves, this makes it easier to understand unexpected opponent moves.
All longer mates will now be considered as alternatives.
More human like responses for some opponent moves.
Improved Pruning - Less tactics finishing too early.
Deeper computer analysis on moves close to the alternative threshold to help clarify their status.
Tactics with moves just under the alternative threshold are rejected.
Many computer lines truncated to avoid showing silly moves at the edge of the search horizon.
Many other small improvements. Initially these improvements will only be apparent on new problems, however the generator will update existing problems over the next few weeks. These changes will not eliminate all "needs more moves" or "needs different opponent move" situations. Please continue to use those tags where appropriate. Several changes have been made to the stats graphs: A new candlestick graph shows rating fluctuations over each day in the last month, thick vertical lines show the start and end ratings for the day, and thin vertical lines show the high and low ratings for the day. Available to gold members only.
Rating and Percentile rank graphs are now zoomable by selecting the area to be zoomed with the mouse.
Axis labels now use more regular intervals.
Rating distribution graphs have been merged into one graph and so the correct/incorrect distribution is now available to both silver and gold members. For premium members, non auto-renewing membership subscriptions are now available. You can now pay for a single month or year without having to worry about your subscription auto-renewing after you have stopped using the site. Read more about the different membership options and their benefits. |
<filename>firmware/coreboot/src/mainboard/siemens/mc_tcu3/romstage.c
/*
* This file is part of the coreboot project.
*
* Copyright (C) 2013 Google Inc.
* Copyright (C) 2013 Sage Electronic Engineering, LLC.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#include <stddef.h>
#include <arch/cpu.h>
#include <lib.h>
#include <arch/io.h>
#include <arch/cbfs.h>
#include <arch/stages.h>
#include <console/console.h>
#include <cbmem.h>
#include <cpu/x86/mtrr.h>
#include <romstage_handoff.h>
#include <timestamp.h>
#include <soc/gpio.h>
#include <soc/iomap.h>
#include <soc/lpc.h>
#include <soc/pci_devs.h>
#include <soc/romstage.h>
#include <soc/acpi.h>
#include <soc/baytrail.h>
#include <drivers/intel/fsp1_0/fsp_util.h>
#include <hwilib.h>
/**
* /brief mainboard call for setup that needs to be done before fsp init
*
*/
void early_mainboard_romstage_entry()
{
}
/**
* Get function disables - most of these will be done automatically
* @param fd_mask
* @param fd2_mask
*/
void get_func_disables(uint32_t *fd_mask, uint32_t *fd2_mask)
{
}
/**
* /brief mainboard call for setup that needs to be done after fsp init
*
*/
void late_mainboard_romstage_entry()
{
}
const uint32_t mAzaliaVerbTableData13[] = {
/*
*ALC262 Verb Table - 10EC0262
*/
/* Pin Complex (NID 0x11 ) */
0x01171CF0,
0x01171D11,
0x01171E11,
0x01171F41,
/* Pin Complex (NID 0x12 ) */
0x01271CF0,
0x01271D11,
0x01271E11,
0x01271F41,
/* Pin Complex (NID 0x14 ) */
0x01471C10,
0x01471D40,
0x01471E01,
0x01471F01,
/* Pin Complex (NID 0x15 ) */
0x01571CF0,
0x01571D11,
0x01571E11,
0x01571F41,
/* Pin Complex (NID 0x16 ) */
0x01671CF0,
0x01671D11,
0x01671E11,
0x01671F41,
/* Pin Complex (NID 0x18 ) */
0x01871C20,
0x01871D98,
0x01871EA1,
0x01871F01,
/* Pin Complex (NID 0x19 ) */
0x01971C21,
0x01971D98,
0x01971EA1,
0x01971F02,
/* Pin Complex (NID 0x1A ) */
0x01A71C2F,
0x01A71D30,
0x01A71E81,
0x01A71F01,
/* Pin Complex (NID 0x1B ) */
0x01B71C1F,
0x01B71D40,
0x01B71E21,
0x01B71F02,
/* Pin Complex (NID 0x1C ) */
0x01C71CF0,
0x01C71D11,
0x01C71E11,
0x01C71F41,
/* Pin Complex (NID 0x1D ) */
0x01D71C01,
0x01D71DC6,
0x01D71E14,
0x01D71F40,
/* Pin Complex (NID 0x1E ) */
0x01E71CF0,
0x01E71D11,
0x01E71E11,
0x01E71F41,
/* Pin Complex (NID 0x1F ) */
0x01F71CF0,
0x01F71D11,
0x01F71E11,
0x01F71F41 };
const PCH_AZALIA_VERB_TABLE mAzaliaVerbTable[] = { {
/*
* VerbTable: (RealTek ALC262)
* Revision ID = 0xFF, support all steps
* Codec Verb Table For AZALIA
* Codec Address: CAd value (0/1/2)
* Codec Vendor: 0x10EC0262
*/
{
0x10EC0262, /* Vendor ID/Device IDA */
0x0000, /* SubSystem ID */
0xFF, /* Revision IDA */
0x01, /* Front panel support (1 = yes, 2 = no) */
0x000B, /* Number of Rear Jacks = 11 */
0x0002 /* Number of Front Jacks = 2 */
},
(uint32_t *)mAzaliaVerbTableData13 } };
const PCH_AZALIA_CONFIG mainboard_AzaliaConfig = {
.Pme = 1,
.DS = 1,
.DA = 0,
.HdmiCodec = 1,
.AzaliaVCi = 1,
.Rsvdbits = 0,
.AzaliaVerbTableNum = 1,
.AzaliaVerbTable = (PCH_AZALIA_VERB_TABLE *)mAzaliaVerbTable,
.ResetWaitTimer = 300 };
/** /brief customize fsp parameters here if needed
*/
void romstage_fsp_rt_buffer_callback(FSP_INIT_RT_BUFFER *FspRtBuffer)
{
uint8_t spd[0x80];
UPD_DATA_REGION *UpdData = FspRtBuffer->Common.UpdDataRgnPtr;
/* Initialize the Azalia Verb Tables to mainboard specific version. */
UpdData->AzaliaConfigPtr = (UINT32)&mainboard_AzaliaConfig;
/* Get SPD data from hwinfo block and set up memory down */
/* parameters for FSP accordingly. */
if (hwilib_find_blocks("hwinfo.hex")) {
printk(BIOS_ERR,
"HWInfo not found, use default timings for DDR3.\n");
return;
}
if (hwilib_get_field(SPD, spd, sizeof(spd)) != sizeof(spd)) {
printk(BIOS_ERR,
"SPD not found in HWInfo, use defaults for DDR3.\n");
return;
}
/*Set up DDR timings from HWInfo. */
UpdData->PcdMemoryParameters.EnableMemoryDown = 1;
UpdData->PcdMemoryParameters.DRAMType = spd[2];
UpdData->PcdMemoryParameters.DIMM0Enable = spd[3] & 0x01;
UpdData->PcdMemoryParameters.DIMM1Enable = (spd[3] >> 1) & 0x01;
UpdData->PcdMemoryParameters.DIMMDensity = spd[4];
UpdData->PcdMemoryParameters.DIMMDWidth = spd[5];
UpdData->PcdMemoryParameters.DIMMSides = spd[7];
UpdData->PcdMemoryParameters.DIMMBusWidth = spd[8];
UpdData->PcdMemoryParameters.DRAMSpeed = spd[12];
UpdData->PcdMemoryParameters.DIMMtCL = spd[14];
UpdData->PcdMemoryParameters.DIMMtWR = spd[17];
UpdData->PcdMemoryParameters.DIMMtRPtRCD = spd[18];
UpdData->PcdMemoryParameters.DIMMtRRD = spd[19];
UpdData->PcdMemoryParameters.DIMMtWTR = spd[26];
UpdData->PcdMemoryParameters.DIMMtRTP = spd[27];
UpdData->PcdMemoryParameters.DIMMtFAW = spd[28];
/*If one need output from MRC to be used in Intel RMT, simply */
/*enable the following line */
//UpdData->PcdMrcDebugMsg = 1;
}
|
<reponame>gradle-plugins/toolbox
package dev.gradleplugins.runnerkit;
import java.io.File;
import java.io.Writer;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
public interface GradleRunner {
// TODO: Maybe have compatible APIs to TestKit Gradle Runner
static GradleRunner create(GradleExecutor executor) {
return ClassUtils.newInstance("dev.gradleplugins.runnerkit.GradleRunnerImpl", new Class[] {GradleExecutor.class}, executor);
}
/**
* Configures the runner to execute the build with the version of Gradle specified.
* <p>
* Unless previously downloaded, this method will cause the Gradle runtime for the version specified
* to be downloaded over the Internet from Gradle's distribution servers.
* The download will be cached beneath the Gradle User Home directory, the location of which is determined by the following in order of precedence:
* <ol>
* <li>The system property {@code "gradle.user.home"}</li>
* <li>The environment variable {@code "GRADLE_USER_HOME"}</li>
* </ol>
* <p>
* If neither are present, {@code "~/.gradle"} will be used, where {@code "~"} is the value advertised by the JVM's {@code "user.dir"} system property.
* The system property and environment variable are read in the process using the runner, not the build process.
* <p>
* Alternatively, you may use {@link #withGradleInstallation(File)} to use an installation already on the filesystem.
* <p>
* To use a non standard Gradle runtime, or to obtain the runtime from an alternative location, use {@link #withGradleDistribution(URI)}.
*
* @param versionNumber the version number (e.g. "2.9")
* @return this
* @see #withGradleInstallation(File)
* @see #withGradleDistribution(URI)
*/
GradleRunner withGradleVersion(String versionNumber);
/**
* Configures the runner to execute the build using the installation of Gradle specified.
* <p>
* The given file must be a directory containing a valid Gradle installation.
* <p>
* Alternatively, you may use {@link #withGradleVersion(String)} to use an automatically installed Gradle version.
*
* @param installation a valid Gradle installation
* @return this
* @see #withGradleVersion(String)
* @see #withGradleDistribution(URI)
*/
GradleRunner withGradleInstallation(File installation);
/**
* Configures the runner to execute the build using the distribution of Gradle specified.
* <p>
* The given URI must point to a valid Gradle distribution ZIP file.
* This method is typically used as an alternative to {@link #withGradleVersion(String)},
* where it is preferable to obtain the Gradle runtime from "local" servers.
* <p>
* Unless previously downloaded, this method will cause the Gradle runtime at the given URI to be downloaded.
* The download will be cached beneath the Gradle User Home directory, the location of which is determined by the following in order of precedence:
* <ol>
* <li>The system property {@code "gradle.user.home"}</li>
* <li>The environment variable {@code "GRADLE_USER_HOME"}</li>
* </ol>
* <p>
* If neither are present, {@code "~/.gradle"} will be used, where {@code "~"} is the value advertised by the JVM's {@code "user.dir"} system property.
* The system property and environment variable are read in the process using the runner, not the build process.
*
* @param distribution a URI pointing at a valid Gradle distribution zip file
* @return this
* @see #withGradleVersion(String)
* @see #withGradleInstallation(File)
*/
GradleRunner withGradleDistribution(URI distribution);
/**
* The injected plugin classpath for the build.
* <p>
* The returned list is immutable.
* Returns an empty list if no classpath was provided with {@link #withPluginClasspath(Iterable)}.
*
* @return the classpath of plugins to make available to the build under test
*/
List<? extends File> getPluginClasspath();
/**
* Sets the plugin classpath based on the Gradle plugin development plugin conventions.
* <p>
* The 'java-gradle-plugin' generates a file describing the plugin under test and makes it available to the test runtime.
* This method configures the runner to use this file.
* Please consult the Gradle documentation of this plugin for more information.
* <p>
* This method looks for a file named {@code plugin-under-test-metadata.properties} on the runtime classpath,
* and uses the {@code implementation-classpath} as the classpath, which is expected to a {@link File#pathSeparatorChar} joined string.
* If the plugin metadata file cannot be resolved an {@link InvalidPluginMetadataException} is thrown.
* <p>
* Plugins from classpath are able to be resolved using the <code>plugins { }</code> syntax in the build under test.
* Please consult the TestKit Gradle User Manual chapter for more information and usage examples.
* <p>
* Calling this method will replace any previous classpath specified via {@link #withPluginClasspath(Iterable)} and vice versa.
* <p>
* <b>Note:</b> this method will cause an {@link InvalidRunnerConfigurationException} to be emitted when the build is executed,
* if the version of Gradle executing the build (i.e. not the version of the runner) is earlier than Gradle 2.8 as those versions do not support this feature.
* Please consult the TestKit Gradle User Manual chapter alternative strategies that can be used for older Gradle versions.
*
* @return this
* @see #withPluginClasspath(Iterable)
* @see #getPluginClasspath()
*/
GradleRunner withPluginClasspath() throws InvalidPluginMetadataException;
/**
* Sets the injected plugin classpath for the build.
* <p>
* Plugins from the given classpath are able to be resolved using the <code>plugins { }</code> syntax in the build under test.
* Please consult the TestKit Gradle User Manual chapter for more information and usage examples.
* <p>
* <b>Note:</b> this method will cause an {@link InvalidRunnerConfigurationException} to be emitted when the build is executed,
* if the version of Gradle executing the build (i.e. not the version of the runner) is earlier than Gradle 2.8 as those versions do not support this feature.
* Please consult the TestKit Gradle User Manual chapter alternative strategies that can be used for older Gradle versions.
*
* @param classpath the classpath of plugins to make available to the build under test
* @return this
* @see #getPluginClasspath()
*/
GradleRunner withPluginClasspath(Iterable<? extends File> classpath);
/**
* Sets the working directory to use.
*
* @param workingDirectory the working directory to use, must not be null
* @return a new {@link GradleRunner} instance configured with the specified working directory, never null
*/
GradleRunner inDirectory(File workingDirectory);
/**
* Sets the working directory to use.
*
* @param workingDirectory the working directory to use, must not be null
* @return a new {@link GradleRunner} instance configured with the specified working directory, never null
*/
GradleRunner inDirectory(Path workingDirectory);
/**
* Sets the working directory to use using the supplier.
*
* @param workingDirectorySupplier a working directory supplier to use, must not be null
* @return a new {@link GradleRunner} instance configured with the specified working directory, never null
*/
GradleRunner inDirectory(Supplier<?> workingDirectorySupplier);
/**
* The directory that the build will be executed in.
* <p>
* This is analogous to the current directory when executing Gradle from the command line.
*
* @return the directory to execute the build in
* @throws InvalidRunnerConfigurationException if the working directory is not configured
*/
File getWorkingDirectory() throws InvalidRunnerConfigurationException;
/**
* Executes the builds without adding the {@code "--stacktrace"} argument.
*
* @return a new {@link GradleRunner} instance configured without stacktrace, never null.
*/
GradleRunner withStacktraceDisabled();
/**
* Activates the build cache. Defaults to disabled.
*
* @return a {@link GradleRunner} instance configured with build cache, never null.
*/
GradleRunner withBuildCacheEnabled();
/**
* Sets the task names to execute. Defaults to an empty list.
*
* @param tasks the tasks to execute
* @return a new {@link GradleRunner} instance with the specified tasks to execute, never null.
*/
default GradleRunner withTasks(String... tasks) {
return withTasks(Arrays.asList(tasks));
}
/**
* Sets the task names to execute. Defaults to an empty list.
*
* @param tasks the tasks to execute
* @return a new {@link GradleRunner} instance with the specified tasks to execute, never null.
*/
GradleRunner withTasks(List<String> tasks);
/**
* Sets the additional command-line arguments to use when executing the build. Defaults to an empty list.
*
* @param args the new arguments to use, the old ones are discarded
* @return a {@link GradleRunner} instance configured with the specified arguments, never null.
*/
default GradleRunner withArguments(String... args) {
return withArguments(Arrays.asList(args));
}
/**
* Sets the additional command-line arguments to use when executing the build. Defaults to an empty list.
*
* @param args the new arguments to use, the old ones are discarded
* @return a new {@link GradleRunner} instance configured with the specified arguments, never null.
*/
GradleRunner withArguments(List<String> args);
/**
* Adds an additional command-line argument to use when executing the build.
*
* @param arg a new arguments to append to the old ones
* @return a new {@link GradleRunner} instance configured with the specified argument, never null.
*/
GradleRunner withArgument(String arg);
/**
* The build arguments.
* <p>
* Effectively, the command line arguments to Gradle.
* This includes all tasks, flags, properties etc.
* <p>
* The returned list is immutable.
*
* @return the build arguments
*/
List<String> getAllArguments();
/**
* Uses the given settings file by adding {@code "--settings-file"} argument.
*
* @param settingsFile the settings file to use
* @return a new {@link GradleRunner} instance configured with the specified settings file, never null.
*/
GradleRunner usingSettingsFile(File settingsFile);
/**
* Does not create an empty settings file when it's missing before execution.
*
* @return a new {@link GradleRunner} instance configured to ignore default behavior when settings file is missing, never null.
*/
GradleRunner ignoresMissingSettingsFile();
/**
* Uses the given build script by adding {@code "--build-file"} argument.
*
* @param buildScript the build script file to use
* @return a new {@link GradleRunner} instance configured with the specified build script file, never null.
*/
GradleRunner usingBuildScript(File buildScript);
/**
* Uses the given init script by adding {@code "--init-script"} argument.
*
* @param initScript the init script file to use
* @return a new {@link GradleRunner} instance configured with the specified init script file, never null.
*/
GradleRunner usingInitScript(File initScript);
/**
* Uses the given project directory by adding the {@code "--project-dir"} argument.
*
* @param projectDirectory the project directory to use
* @return a new {@link GradleRunner} instance configured with the specified project directory, never null.
*/
GradleRunner usingProjectDirectory(File projectDirectory);
/**
* Disable deprecation warning checks.
*
* @return a new {@link GradleRunner} without deprecation checking enabled, never null..
*/
GradleRunner withoutDeprecationChecks();
/**
* Sets the default character encoding to use.
*
* @param defaultCharacterEncoding the default character encoding to use
* @return a {@link GradleRunner} instance configured with the specified character encoding, never null.
*/
GradleRunner withDefaultCharacterEncoding(Charset defaultCharacterEncoding);
/**
* Sets the default locale to use.
*
* @param defaultLocale the default locale to use
* @return a new {@link GradleRunner} instance configured with the specified locale, never null.
*/
GradleRunner withDefaultLocale(Locale defaultLocale);
/**
* Renders the welcome message users see upon first invocation of a Gradle distribution with a given Gradle user home directory.
* By default the message is never rendered.
*
* @return a new {@link GradleRunner} instance configured with the welcome message on first invocation, never null.
*/
GradleRunner withWelcomeMessageEnabled();
/**
* Publishes build scans to the public enterprise server for each build ran by this executer.
* Calling this method implicitly accept the Gradle terms and services.
*
* @return a new {@link GradleRunner} instance configured to publish build scans for each builds executed, never null.
*/
GradleRunner publishBuildScans();
/**
* Sets the user's home dir to use when running the build. Implementations are not 100% accurate.
*
* @param userHomeDirectory the user home directory to use
* @return a new {@link GradleRunner} instance configured with the specified user home directory, never null.
*/
GradleRunner withUserHomeDirectory(File userHomeDirectory);
/**
* Sets the <em>Gradle</em> user home dir.
* Setting to null requests that the executer use the real default Gradle user home dir rather than the default used for testing.
*
* <p>Note: does not affect the daemon base dir.</p>
*
* @param gradleUserHomeDirectory the Gradle user home directory to use
* @return a new {@link GradleRunner} instance configured with the specified Gradle user home directory, never null.
*/
GradleRunner withGradleUserHomeDirectory(File gradleUserHomeDirectory);
/**
* Configures a unique Gradle user home directory for the test.
*
* The Gradle user home directory used will be underneath the {@link #getWorkingDirectory()} directory.
*
* <p>Note: does not affect the daemon base dir.</p>
*
* @return a new {@link GradleRunner} instance configured with a unique Gradle user home directory, neverl null.
*/
GradleRunner requireOwnGradleUserHomeDirectory();
/**
* Sets the environment variables to use when executing the build. Defaults to the environment of this process.
*
* @param environmentVariables the environment variables to use
* @return a new {@link GradleRunner} instance configured with the specified environment variables, never null.
*/
GradleRunner withEnvironmentVariables(Map<String, ?> environmentVariables);
/**
* Adds an additional environment variable to use when executing the build.
*
* @param key the environment variable key
* @param value the environment variable value
* @return a new {@link GradleRunner} instance configured with the specified additional environment variable, never null.
*/
GradleRunner withEnvironmentVariable(String key, String value);
/**
* Sets the environment variables to use when executing the build. Defaults to the environment of this process.
* <p>
* Convenience method to allow migration from, older, Gradle Executer API.
*
* @param environmentVariables the environment variables to use
* @return a new {@link GradleRunner} instance configured with the specified environment variables, never null.
* @see #withEnvironmentVariables(Map)
*/
@Deprecated
GradleRunner withEnvironmentVars(Map<String, ?> environmentVariables);
/**
* Sets the environment variables to use when executing the build. Defaults to the environment of this process.
* <p>
* Convenience method to allow migration from Gradle Test Kit API.
*
* @param environmentVariables the environment variables to use
* @return a new {@link GradleRunner} instance configured with the specified environment variables, never null.
*/
default GradleRunner withEnvironment(Map<String, String> environmentVariables) {
return withEnvironmentVariables(environmentVariables);
}
/**
* Forces the rich console output.
*
* @return a new {@link GradleRunner} instance configured with the rich console enabled, never null.
*/
GradleRunner withRichConsoleEnabled();
/**
* Configures current runner using the specified operator.
*
* @return a the {@link GradleRunner} instance returned by the configuration action, never null.
*/
GradleRunner configure(UnaryOperator<GradleRunner> action);
/**
* Configures the runner to forward standard output from builds to the given writer.
* <p>
* The output of the build is always available via {@link BuildResult#getOutput()}.
* This method can be used to additionally capture the output.
* <p>
* The given writer will not be closed by the runner.
* <p>
* When executing builds with Gradle versions earlier than 2.9 <b>in debug mode</b> and
* <b>using a tooling API-based executer</b>, any output produced by the build that was written
* directly to {@code System.out} or {@code System.err} will not be represented in {@link BuildResult#getOutput()}.
* This is due to a defect that was fixed in Gradle 2.9.
*
* @param writer the writer that build standard output should be forwarded to
* @return this
* @see #forwardStandardError(Writer)
*/
GradleRunner forwardStandardOutput(Writer writer);
/**
* Provided method for migration convenience from Gradle TestKit.
*
* @param writer the writer that build standard output should be forwarded to
* @return this
* @see #forwardStandardOutput(Writer)
*/
default GradleRunner forwardStdOutput(Writer writer) {
return forwardStandardOutput(writer);
}
/**
* Configures the runner to forward standard error output from builds to the given writer.
* <p>
* The output of the build is always available via {@link BuildResult#getOutput()}.
* This method can be used to additionally capture the error output.
* <p>
* The given writer will not be closed by the runner.
*
* @param writer the writer that build standard error output should be forwarded to
* @return this
* @see #forwardStandardOutput(Writer)
*/
GradleRunner forwardStandardError(Writer writer);
/**
* Provided method for migration convenience from Gradle TestKit.
*
* @param writer the writer that build standard error output should be forwarded to
* @return this
* @see #forwardStandardError(Writer)
*/
default GradleRunner forwardStdError(Writer writer) {
return forwardStandardError(writer);
}
/**
* Forwards the output of executed builds to the {@link System#out System.out} stream.
* <p>
* The output of the build is always available via {@link BuildResult#getOutput()}.
* This method can be used to additionally forward the output to {@code System.out} of the process using the runner.
* <p>
* This method does not separate the standard output and error output.
* The two streams will be merged as they typically are when using Gradle from a command line interface.
* If you require separation of the streams, you can use {@link #forwardStandardOutput(Writer)} and {@link #forwardStandardError(Writer)} directly.
* <p>
* Calling this method will negate the effect of previously calling {@link #forwardStandardOutput(Writer)} and/or {@link #forwardStandardError(Writer)}.
* <p>
* Forwarding the output is the process using the runner is the default behavior for all executor.
* This method is provided for convenience when migrating from Gradle TestKit.
*
* @return this
* @since 2.9
* @see #forwardStandardOutput(Writer)
* @see #forwardStandardError(Writer)
*/
GradleRunner forwardOutput();
/**
* Adds an action to modify the Gradle runner before it is executed.
* The modification to the Gradle runner only affects the current run.
*
* @param action the action that will configure the runner further before execution.
* @return a new {@link GradleRunner} configured with the specified additional action, never null.
*/
GradleRunner beforeExecute(UnaryOperator<GradleRunner> action);
/**
* Adds an action to assert the outcome based on the execution context right after the execution.
*
* @param action the action that will assert based on the execution context after execution.
* @return a new {@link GradleRunner} configured with the specified additional action, never null.
*/
GradleRunner afterExecute(Consumer<GradleExecutionContext> action);
/**
* Executes a build, expecting it to complete without failure.
*
* @throws InvalidRunnerConfigurationException if the configuration of this runner is invalid (e.g. project directory not set)
* @throws UnexpectedBuildFailure if the build does not succeed
* @return the build result
*/
BuildResult build() throws InvalidRunnerConfigurationException, UnexpectedBuildFailure;
/**
* Executes a build, expecting it to complete with failure.
*
* @throws InvalidRunnerConfigurationException if the configuration of this runner is invalid (e.g. project directory not set)
* @throws UnexpectedBuildSuccess if the build succeeds
* @return the build result
*/
BuildResult buildAndFail() throws InvalidRunnerConfigurationException, UnexpectedBuildSuccess;
}
|
Conservative activist Kelly Monroe Kullberg recently formed Evangelicals for Biblical Immigration as a response to the pro-reform Evangelical Immigration Table, and her new organization has received support from immigration critics like the American Family Association.
Like other Religious Right groups such as Eagle Forum and Family Research Council, the AFA is a staunch opponent of immigration reform and spokesman Bryan Fischer has even called for the US to require that all immigrations convert to Christianity and enforce a ban on Muslim immigration.
AFA talk show host Sandy Rios spoke to Kullberg this week about the supposed dangers of the Senate reform bill, including the laughable claim that the legislation will lead to “open borders” and therefore a rise in sex and human trafficking.
She also warned that people of “other faiths” and “incompatibly worldviews,” especially Muslims, will flood into America and “lessen the value on human life” in the US, making sex trafficking more acceptable. “As Islam takes root in America and other traditions take root, we are seeing a decline in human worth, human value and a rise in trafficking,” Kullberg said. |
Lotto nothing? The budgetary impact of state lotteries
Lottery revenues are often touted as an independent revenue source for states. Using 32 years of state financial data, the fallacy of such thinking is demonstrated. Being the first to control for the self-selection of being a lottery state, it is found that overall tax revenues decline with increased lottery sales. Moreover, it is discovered that this decline is driven by a decrease in revenues from general sales and excise taxes, which is only partially offset by increases in income tax receipts. Such findings are attributed to a combination of behavioural and political responses following the lottery's implementation. |
//
// Ensure there is a watch for the provider
// and inventory API kinds.
func (r *ProviderPredicate) ensureWatch(p *api.Provider) {
if !p.Status.HasCondition(libcnd.Ready) {
return
}
h, err := handler.New(r.client, r.channel, p)
if err != nil {
log.Trace(err)
return
}
err = h.Watch(&r.WatchManager)
if err != nil {
log.Trace(err)
return
}
} |
/**
* Converts an array of values into a rich text JSON array, accounting for
* the fact there may be only one or multiple.
*
* @param values
* the values to convert. Values are converted to instances of
* {@code RichText} via {@link #persuade(Object)}.
* @return a JSON array string containing all not {@code null} values of
* {@code texts}, {@code null} if {@code values} is {@code null} or
* the resulting JSON array would be empty.
*/
@Nullable
public static JsonArray toJson(@Nullable Iterable<?> values) {
if (values == null) {
return null;
}
JsonArray jsonTexts = new JsonArray();
Iterator<?> valuesI = values.iterator();
while (valuesI.hasNext()) {
Object value = valuesI.next();
if (value != null) {
jsonTexts.add(GSON.toJsonTree(persuade(value)));
}
}
return !jsonTexts.isEmpty() ? jsonTexts : null;
} |
/**
* "THE BEERWARE LICENSE" (Revision 42):
* Selicia wrote this code. As long as you retain this
* notice, you can do whatever you want with this stuff. If we
* meet someday, and you think this stuff is worth it, you can
* buy me a beer in return.
*/
package selicia.autolog;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(value = { ElementType.METHOD, ElementType.CONSTRUCTOR })
public @interface AutoMethodLog {} |
/**
* This method is called when the user presses the browse button for the original sequence file.
*/
private void browseSequencesPressed() {
File toOpen = null;
while (toOpen == null) {
JFileChooser jfc = new JFileChooser("/");
int returnVal = jfc.showOpenDialog(NetphosPredictionsParser.this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
toOpen = jfc.getSelectedFile();
String path = null;
try {
path = toOpen.getCanonicalPath();
iSequences = InnerFastaReader.toHashMap(toOpen);
txtSequences.setText(path);
} catch (IOException ioe) {
logger.error(ioe.getMessage(), ioe);
JOptionPane.showMessageDialog(NetphosPredictionsParser.this, new String[]{"Unable to read '" + toOpen.getName() + "' as a FASTA sequences file!", ioe.getMessage()}, "Unable to load Netphos output file!", JOptionPane.ERROR_MESSAGE);
toOpen = null;
}
} else {
break;
}
}
} |
Diurnal activity patterns of the temporary fish ectoparasite, Gnathia africana Barnard, 1914 (Isopoda, Gnathiidae), from the southern coast of South Africa
Gnathiid isopods are one of the most common fish ectoparasites, and are found in both temperate and tropical oceans. On coral reefs, gnathiids are most active at dusk and dawn, and contribute significantly to trophic dynamics, as they are a prey resource for cleaner fish and parasitize numerous fishes. Gnathiids also inhabit temperate intertidal waters, but their activity patterns and contribution to intertidal trophic dynamics remain unstudied. To provide the first ecological data on temperate intertidal gnathiid activity patterns, 172 gnathiid-free Clinus superciliosus were set in an intertidal system in Tsitsikamma National Park, South Africa, during early morning, morning, afternoon, early evening, and evening, high and low tide, and within the inter- and infra-tidal zone to examine gnathiid infestation levels. After exposure, gnathiids from each fish were identified to the species level, counted, and their developmental stage was recorded. All gnathiids were identified as Gnathiia africana. On average, 1 ± 5SD gnathiids were collected from each fish, and the majority of gnathiids collected were stage 1. Significantly more gnathiids were collected during morning and afternoon compared with all other time periods. The number of gnathiids collected was not influenced by the fish's exposure to high or low tide, or placement within the tide zone. Although G. africana is free from cleaner fish predation because cleaner fish do not reside in temperate intertidal habitat, G. africana abundance is surprisingly small. Future studies should examine what regulates G. africana population size and the role they play in temperate intertidal food webs. |
import React, { useEffect, useState } from "react";
import axios from "axios";
import Masonary from "react-masonry-css";
import { AppCard } from "../Card/AppCard";
import { useQuery } from "../../constants/breakpoints";
export const AppScreen = () => {
const { isTablet, isMobile } = useQuery();
const [apps, setApps] = useState([]);
useEffect(() => {
axios.get("https://data.dlux.io/new").then(({ data }) => {
setApps(data.result);
});
}, []);
const getCols = () => (isTablet ? (isMobile ? 1 : 2) : 3);
return (
<Masonary
breakpointCols={getCols()}
className="masonry-grid sm:mx-4"
columnClassName="masonry-grid_column"
>
{apps.map((app: any) => {
return <AppCard key={app.permlink} app={app} />;
})}
</Masonary>
);
};
|
<reponame>house117/recurso7client
import { Component, OnInit } from "@angular/core";
import { ModalController } from "@ionic/angular";
import { NgModel } from "@angular/forms";
import { ActivatedRoute } from "@angular/router";
import { MenuController } from "@ionic/angular";
import { HttpClient, HttpHeaders } from "@angular/common/http";
import { HttpParams } from "@angular/common/http";
import { Router } from "@angular/router";
import { FormBuilder, FormGroup, Validators } from "@angular/forms";
import { LoadingController } from '@ionic/angular';
import { IonList, ToastController } from '@ionic/angular';
import { ViewChild } from '@angular/core';
@Component({
selector: "app-dep-create",
templateUrl: "./dep-create.page.html",
styleUrls: ["./dep-create.page.scss"]
})
export class DepCreatePage implements OnInit {
deps: any;
nombre: string;
porcentaje: string;
saldo: string;
@ViewChild('lista') lista: IonList;
constructor(
private activatedRoute: ActivatedRoute,
private modalCtrl: ModalController,
private toastCtrl: ToastController,
private http: HttpClient,
private router: Router,
private loadingCtrl: LoadingController,
private menuCtrl: MenuController
) { }
ngOnInit() {
//let id = this.activatedRoute.snapshot.paramMap.get("id");
this.http
.get(`http://localhost:5000/api/departamentos/`)
.subscribe(res => {
this.deps = res;
});
}
closeModal() {
this.modalCtrl.dismiss();
}
createDep() {
console.log("nombre: " + this.nombre);
console.log("porcentaje: " + this.porcentaje);
console.log("saldo" + this.saldo);
this.closeModal();
const headerDict = {
"Content-Type": "application/json",
Accept: "application/json"
};
const requestOptions = {
headers: new HttpHeaders(headerDict)
};
const data = {
nombre: this.nombre,
porcentaje: this.porcentaje,
saldo: this.saldo
};
const body = new HttpParams()
.set("nombre", this.nombre)
.set("porcentaje", this.porcentaje)
.set("saldo", this.saldo);
this.http
.post("http://localhost:5000/api/departamentos/", data)
.subscribe(
val => {
console.log(
"POST call successful value returned in body",
val
);
this.present("Agregando departamento...");
},
response => {
console.log("POST call in error", response);
},
() => {
console.log("The POST observable is now completed.");
}
);
}
toggleMenu() {
this.menuCtrl.toggle();
}
isLoading = false;
async present(message: string) {
this.isLoading = true;
return await this.loadingCtrl.create({
message,
duration: 3000,
}).then(a => {
a.present().then(() => {
location.replace("/tabs-d/dep-results");
if (this.isLoading) {
a.dismiss().then(() => this.presentToast('Porcentaje invalido!'));
}
});
});
}
async dismiss() {
this.isLoading = false;
return await this.loadingCtrl.dismiss().then(() => console.log('dismissed'));
}
async presentToast(message: string) {
const toast = await this.toastCtrl.create({
message,
duration: 2000
});
toast.present();
}
}
|
//function untuk menghapus buku dari linked list
void hapus_buku (buku *head) {
buku current = *head;
buku temp1;
buku temp2;
char nama_buku[100];
int hapus = 0;
printf("Masukkan judul buku yang ingin dihapus : ");
scanf(" %[^\n]s", nama_buku);
while (current != NULL) {
if(strcmpi(nama_buku,current->nama_buku) == 0) {
printf("\nbuku dengan judul %s telah dihapus!\n", current->nama_buku);
hapus = 1;
if (current == *head)
{
temp2 = current->next;
*head = temp2;
}
else {
temp2 = current->next;
temp1->next = temp2;
}
save(*head);
system("pause");
}
temp1 = current;
current = current->next;
}
if(hapus == 0)
{
printf("\nBuku %s tidak ada\n", nama_buku);
system("pause");
}
save(*head);
} |
def mpirun(arg,mpiimpl=None,**kwargs):
if mpiimpl is None:
mpiimpl=detect_mpi()
return mpiimpl.mpirunner(arg,**kwargs) |
/**
* Definition for a Connection.
* class Connection {
* public:
* string city1, city2;
* int cost;
* Connection(string& city1, string& city2, int cost) {
* this->city1 = city1;
* this->city2 = city2;
* this->cost = cost;
* }
*/
class UnionFind {
vector<int> graph;
int size;
public:
UnionFind(int n)
{
graph.resize(n);
size = n;
for(int i = 0; i < n; ++i)
{
graph[i] = i;
}
}
int find(int node)
{
if (graph[node] == node)
{
return node;
}
graph[node] = find(graph[node]);
return graph[node];
}
bool query(int a, int b)
{
return find(a) == find(b);
}
void connect(int a, int b)
{
int root_a = find(a);
int root_b = find(b);
if (root_a != root_b)
{
graph[root_a] = root_b;
size--;
}
}
bool allConnected()
{
return size == 1;
}
} |
package com.adform.trackingsdk.demoapp;
import android.app.Application;
import android.util.Log;
import com.adform.sdk.AdformAdobeExtension;
import com.adobe.marketing.mobile.AdobeCallback;
import com.adobe.marketing.mobile.InvalidInitException;
import com.adobe.marketing.mobile.Lifecycle;
import com.adobe.marketing.mobile.LoggingMode;
import com.adobe.marketing.mobile.MobileCore;
import com.adobe.marketing.mobile.Signal;
public class DemoApp extends Application {
public static final String TAG = "DemoApp";
@Override
public void onCreate() {
super.onCreate();
MobileCore.setApplication(this);
MobileCore.setLogLevel(LoggingMode.VERBOSE);
try {
Log.d(TAG, "Starting app registration");
Lifecycle.registerExtension();
Signal.registerExtension();
AdformAdobeExtension.registerExtension();
MobileCore.start(new AdobeCallback() {
@Override
public void call(Object o) {
Log.d(TAG, "Configuring MobileCore");
MobileCore.configureWithAppID("<APP_ID>");
}
});
} catch (InvalidInitException e) {
Log.e(TAG, "Exception in init", e);
e.printStackTrace();
}
}
}
|
package mondemand
type MessageType string
const (
StatMsg = MessageType("MonDemand::StatsMsg")
StatsMsgType = MessageType("MonDemand::StatsMsg")
PerfMsgType = MessageType("MonDemand::PerfMsg")
TraceMsgType = MessageType("MonDemand::TraceMsg")
)
|
import typing as tp
from racketinterpreter import constants
import racketinterpreter.classes.data as d
import racketinterpreter.classes.tokens as t
from racketinterpreter.processes import Interpreter
from racketinterpreter.processes import Lexer
from racketinterpreter.processes import Parser
class Util:
@staticmethod
def text_to_interpreter_result(
text: str,
should_log_scope: bool = False,
should_log_stack: bool = False
) -> tp.Tuple[tp.List[d.Data], tp.List[tp.Tuple[bool, t.Token, d.Data, d.Data]]]:
constants.set_globals(should_log_scope=should_log_scope, should_log_stack=should_log_stack)
lexer = Lexer(text)
lexer.process()
parser = Parser(lexer)
tree = parser.parse()
interpreter = Interpreter()
output, test_output = interpreter.interpret(tree)
return output, test_output
|
// Turns all lights green to signal that the field is safe to enter.
func (lights *Lights) SetFieldReset() {
lights.packets["red"].setAllColor("green")
lights.packets["blue"].setAllColor("green")
lights.sendLights()
} |
<filename>tools/exp/codegen/interface.go
package codegen
import (
"github.com/dave/jennifer/jen"
)
type FunctionSignature struct {
Name string
Params []jen.Code
Ret []jen.Code
Comment string
}
type Interface struct {
qual *jen.Statement
name string
functions []FunctionSignature
comment string
}
func NewInterface(pkg, name string,
funcs []FunctionSignature,
comment string) *Interface {
return &Interface{
qual: jen.Qual(pkg, name),
name: name,
functions: funcs,
comment: comment,
}
}
func (i Interface) Definition() jen.Code {
stmts := jen.Empty()
if len(i.comment) > 0 {
stmts = jen.Comment(i.comment).Line()
}
defs := make([]jen.Code, 0, len(i.functions))
for _, fn := range i.functions {
def := jen.Empty()
if len(fn.Comment) > 0 {
def.Comment(fn.Comment).Line()
}
def.Id(fn.Name).Params(fn.Params...)
if len(fn.Ret) > 0 {
def.Params(fn.Ret...)
}
defs = append(defs, def)
}
return stmts.Type().Id(i.name).Interface(defs...)
}
|
CONCENTRATED ACID CONVERSION OF PINE SOFTWOOD TO SUGARS. PART I: USE OF A TWIN-SCREW REACTOR FOR HYDROLYSIS PRETREATMENT
The first stage of a two-step concentrated sulfuric acid process that converts softwood sawdust to sugars has been explored. The research focuses on the ability of an in-house custom fabricated corotating twin-screw reactor (TSR) to effectively break down and solubilize crystalline cellulose into low molecular weight carbohydrates. Based on design of experiment (DOE) screening results, a four-level, two-factor experimental model building DOE was undertaken. Solid sawdust conversion to liquid, screw torque, and TSR exit pressure were measured or recorded at each experimental condition to yield percent conversion of solids, processed material viscosity, and material energy requirements. Thereafter, model quadratic equations were fitted to the experimental data and found to be statistically significant. Based on data obtained in the DOE the process was optimized to establish a base case operating condition. The acid-treated product made at base case twin-screw operating conditions showed a 38.2% conversion of dry sawdust solids to soluble liquids. The dry solids conversion reduced 73.8% of all hemicellulose and 44.4% of all cellulose to soluble monomers and oligosaccharides. |
/**
* Returns the method in its super interface that needs a bridge method delegating to {@code
* bridgeMethod}.
*
* <p>If a method in the super interfaces of {@code type} is a method with more specific type
* arguments, and it is overridden by a generic method, it needs a bridge method that delegates to
* the generic method.
*/
private static MethodDescriptor findBridgeDueToAccidentalOverride(
MethodDescriptor bridgeMethodDescriptor, TypeDeclaration typeDeclaration) {
for (DeclaredTypeDescriptor superInterface :
typeDeclaration.getTransitiveInterfaceTypeDescriptors()) {
for (MethodDescriptor methodDescriptor : superInterface.getDeclaredMethodDescriptors()) {
if (methodDescriptor == methodDescriptor.getDeclarationDescriptor()
&& methodDescriptor.isJsOverride(bridgeMethodDescriptor)
&& !methodDescriptor.isJsOverride(bridgeMethodDescriptor.getDeclarationDescriptor())) {
return methodDescriptor;
}
}
}
return null;
} |
Disney has officially found its Aladdin, Jasmine, and Genie for the live-action “Aladdin” remake. The three cast members were announced at the D23 Expo on Saturday.
The House of Mouse may have been having problems finding its titular Aladdin for the remake of the 1992 animated classic prior to D23 this weekend, but the same cannot be said for Jasmine and Genie. Naomi Scott has officially been cast as Princess Jasmine, while Will Smith will be taking on the Genie. Relative newcomer Mena Massoud has also joined the cast as Aladdin.
Guy Ritchie was previously announced as director.
John August, whose credits include “Big Fish,” wrote the new “Aladdin” script. Dan Lin, who produced Ritchie’s two “Sherlock Holmes” movies for Warner Bros., is also producing “Aladdin” through his Lin Pictures company. Jonathan Eirich is exec producing.
The original animated movie was a comic take on the Arabic folk tale of a young man granted three wishes by a genie trapped in a lamp.
“Aladdin” was the highest-grossing film of the year in 1992, and won Academy Awards for best score and best song for “A Whole New World.”
Scott was most recently seen as the Pink Ranger in Saban’s “Power Rangers.” Smith can be seen next in Netflix’s fantasy actioner “Bright.” He is repped by CAA and Overbrook Entertainment. |
class Exclusions:
"""Contains the exclusions configuration as an object"""
def __init__(self, exclusions_config=DEFAULT_EXCLUSIONS_CONFIG):
check_exclusions_schema(exclusions_config)
self.config = exclusions_config
self.include_actions = self._include_actions()
self.exclude_actions = self._exclude_actions()
self.roles = self._roles()
self.users = self._users()
self.groups = self._groups()
self.policies = self._policies()
def _roles(self):
provided_roles = self.config.get("roles", None)
roles = []
# Normalize for comparisons
if provided_roles:
for role in provided_roles:
roles.append(role.lower())
return roles
def _users(self):
provided_users = self.config.get("users", None)
users = []
if provided_users:
# Normalize for comparisons
for user in provided_users:
users.append(user.lower())
return users
def _groups(self):
provided_groups = self.config.get("groups", None)
groups = []
if provided_groups:
# Normalize for comparisons
for group in provided_groups:
groups.append(group.lower())
return groups
def _policies(self):
provided_policies = self.config.get("policies", None)
policies = []
if provided_policies:
# Normalize for comparisons
for policy in provided_policies:
policies.append(policy.lower())
return policies
def _include_actions(self):
include_actions = self.config.get("include-actions", None)
# Set to lowercase so subsequent evaluations are faster.
if include_actions:
always_include_actions = [x.lower() for x in include_actions]
return always_include_actions
else:
return []
def _exclude_actions(self):
exclude_actions = self.config.get("exclude-actions", None)
if exclude_actions:
always_exclude_actions = [x.lower() for x in exclude_actions]
return always_exclude_actions
else:
return []
def is_action_always_included(self, action_in_question):
"""
Supply an IAM action, and get a decision about whether or not it is excluded.
:return:
"""
if self.include_actions:
if action_in_question.lower() in self.include_actions:
return action_in_question
else:
return False
else:
return False
def is_action_always_excluded(self, action_in_question):
"""
Supply an IAM action, and get a decision about whether or not it is always included.
:return:
"""
if self.exclude_actions:
return bool(
is_name_excluded(action_in_question.lower(), self.exclude_actions)
)
else:
return False # pragma: no cover
def is_policy_excluded(self, policy_name):
"""
Supply a policy name or path, and get a decision about whether or not it is excluded.
:param policy_name: Policy name or Policy path
:return:
"""
return bool(is_name_excluded(policy_name, self.policies))
def is_principal_excluded(self, principal, principal_type):
"""
Supply a principal name or path, and get a decision about whether or not it is excluded.
:param principal: a principal name or path
:param principal_type: User, Group, or Role
:return: a boolean decision
"""
if principal_type == "User":
return bool(is_name_excluded(principal.lower(), self.users))
elif principal_type == "Group":
return bool(is_name_excluded(principal.lower(), self.groups))
elif principal_type == "Role":
return bool(is_name_excluded(principal.lower(), self.roles))
else: # pragma: no cover
raise Exception(
"Please supply User, Group, or Role as the principal argument."
)
def get_allowed_actions(self, requested_actions):
"""Given a list of actions, it will evaluate those actions against the exclusions configuration and return a
list of actions after filtering for exclusions."""
always_include_actions = []
# ALWAYS INCLUDE ACTIONS
for action in requested_actions:
for include_action in self.include_actions:
if action.lower() == include_action.lower():
always_include_actions.append(action)
# RULE OUT EXCLUDED ACTIONS
actions_minus_exclusions = []
for action in requested_actions:
if not is_name_excluded(action.lower(), self.exclude_actions):
actions_minus_exclusions.append(action)
results = always_include_actions + actions_minus_exclusions
results = list(dict.fromkeys(results))
return results |
/**
* <p> This is the customer removal manager</p>
* @param targetCustomer the customer to be removed
* @since 1.0
*/
public void RemovePatron(Customer targetCustomer)
{
if(associate.containsKey(targetCustomer.GetEmailAddress()))
{
patron.remove(targetCustomer.GetEmailAddress());
associate.remove(targetCustomer.GetEmailAddress());
}
else
{
patron.remove(targetCustomer.GetEmailAddress());
}
} |
/**
* Decrements the use count of the control.
*
* @throws IllegalStateException if try to decrement the use count to less than zero
*/
void decrementUseCount() {
synchronized (mLock) {
if (mUseCount == 0) {
throw new IllegalStateException("Decrementing use count occurs more times than "
+ "incrementing");
}
mUseCount--;
}
} |
Police are collecting a blood sample from a Surrey man who bears an uncanny resemblance to an age-enhanced rendering of what missing person Michael Dunahee might look like today.
Dunahee vanished from the playground of Blanshard Elementary School in Victoria on March 24, 1991, when he was just four years old. The disappearance sparked one of the biggest investigations in Canadian history, and resulted in at least 11,000 tips.
On Wednesday, the Victoria Police Department confirmed it recently received a new tip about an unnamed Metro Vancouver resident who looks strikingly similar to a computer rendering of Dunahee created by the National Centre for Missing and Exploited Children.
Const. Mike Russell said ordering DNA testing is part of the department’s standard procedure, and authorities are not confident that the decades-old open investigation is nearing a close.
“We don’t believe that he is Michael,” Russell told CTV News. “But we do owe it to [Dunahee’s parents] Crystal and Bruce and everybody in B.C. and across Canada who’s followed this case over the last 20 years to just go that extra mile and make sure.”
Police have released few details about why they’re skeptical, but said it involves information investigators have gathered about the unidentified lookalike’s family background.
The Surrey man recently shared his story on an online Canucks fan forum, and the blog VanCity Buzz posted his picture Wednesday to highlight the facial similarities between him and Dunahee.
Russell said this case is just one of many instances when police have requested DNA evidence from an individual to confirm he’s not Dunahee.
Anyone with possible information about the missing man’s whereabouts can share it at the Missing Kids website. |
// tslint:disable:no-string-literal
// https://codecraft.tv/courses/angular/unit-testing/model-driven-forms/
import {
waitForAsync,
ComponentFixture,
TestBed,
getTestBed,
} from '@angular/core/testing';
import { ReactiveFormsModule, FormsModule } from '@angular/forms';
import { ActivatedRoute, RouterModule, Routes } from '@angular/router';
import { LoginComponent } from './login.component';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { Observable, of } from 'rxjs';
import { LogoutComponent } from '../logout/logout.component';
import { TranslateModule } from '@ngx-translate/core';
import {UserInterface} from '../../../_rms/interfaces/user/user.model';
import {AuthService} from '../../../_rms/services/auth/auth.service';
const fakeAuth = {
email: '<EMAIL>',
password: '<PASSWORD>',
};
const mockActivatedRoute = {
snapshot: {
params: {},
queryParams: {},
},
};
const fakeRoutes: Routes = [
{ path: 'auth/login', component: LoginComponent },
{ path: 'auth/logout', component: LogoutComponent },
{ path: '', redirectTo: 'auth/login', pathMatch: 'full' },
];
class FakeAuthService {
login(email: string, password: string): Observable<UserInterface> {
const isChecked =
email === fakeAuth.email && password === fakeAuth.password;
if (!isChecked) {
return of(undefined);
}
const user = new UserInterface();
user.username = 'admin';
user.password = '<PASSWORD>';
user.email = '<EMAIL>';
return of(user);
}
}
describe('LoginComponent', () => {
let component: LoginComponent;
let fixture: ComponentFixture<LoginComponent>;
let injector;
let authService: AuthService;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
imports: [
ReactiveFormsModule,
FormsModule,
HttpClientTestingModule,
RouterModule.forRoot(fakeRoutes),
TranslateModule.forRoot(),
],
declarations: [LoginComponent],
providers: [
{
provide: ActivatedRoute,
useValue: mockActivatedRoute,
},
{
provide: AuthService,
useClass: FakeAuthService,
},
],
}).compileComponents();
injector = getTestBed();
authService = injector.get(AuthService);
}));
beforeEach(() => {
fixture = TestBed.createComponent(LoginComponent);
component = fixture.componentInstance;
fixture.detectChanges();
component.ngOnInit();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('form valid with default data', () => {
expect(component.loginForm.valid).toBeTruthy();
});
it('email field validity', () => {
let errors = {};
const email = component.loginForm.controls.email;
expect(email.valid).toBeTruthy();
// Email field is required
// Set empty email first
email.setValue('');
errors = email.errors || {};
// tslint:disable-next-line:no-string-literal
expect(errors['required']).toBeTruthy();
// Set email to something
email.setValue('te');
errors = email.errors || {};
expect(errors['required']).toBeFalsy();
expect(errors['email']).toBeTruthy();
expect(errors['minlength']).toBeTruthy();
// Set email to something correct
email.setValue('<EMAIL>');
errors = email.errors || {};
expect(errors['required']).toBeFalsy();
expect(errors['email']).toBeFalsy();
expect(errors['minlength']).toBeFalsy();
expect(errors['maxlength']).toBeFalsy();
});
it('password field validity', () => {
let errors;
const password = component.loginForm.controls['password'];
expect(password.valid).toBeTruthy();
password.setValue('12');
expect(password.value).toBe('12');
errors = password.errors || {};
expect(errors['minlength']).toBeTruthy();
password.setValue('');
expect(password.value).toBe('');
expect(password.valid).toBeFalsy();
errors = password.errors || {};
expect(errors['required']).toBeTruthy();
// Set password to something correct
password.setValue('<PASSWORD>');
errors = password.errors || {};
expect(errors['required']).toBeFalsy();
expect(errors['minlength']).toBeFalsy();
expect(errors['maxlength']).toBeFalsy();
});
});
|
<reponame>PremierLangage/platon-front
import {
apply,
chain,
mergeWith,
move,
Rule,
template,
Tree,
url,
SchematicsException
} from '@angular-devkit/schematics';
import { strings } from '@angular-devkit/core';
import { Change, findNodes, InsertChange, insertImport } from '@nrwl/workspace/src/utils/ast-utils';
// import { Change, InsertChange } from '@schematics/angular/utility/change';
import { getWorkspace } from '@schematics/angular/utility/config';
import { join } from 'path';
import * as ts from 'typescript';
function readSourceFile(tree: Tree, filePath: string) {
const registryFileContent = tree.read(filePath)?.toString('utf8') as string;
return ts.createSourceFile(
filePath.split('/').pop() as string,
registryFileContent,
ts.ScriptTarget.Latest,
true
);
}
function addImport(args: {
tree: Tree,
filePath: string;
symbolName: string,
importPath: string,
}): Rule {
const source = readSourceFile(
args.tree,
args.filePath,
);
const change = insertImport(
source,
args.filePath,
args.symbolName,
args.importPath
) as InsertChange;
return tree => {
const recorder = tree.beginUpdate(args.filePath);
recorder.insertLeft(change.pos, change.toAdd);
tree.commitUpdate(recorder);
return tree;
};
}
function addElementInArray(args: {
tree: Tree,
arrayName: string;
filePath: string;
toAdd: string
}): Rule {
return tree => {
const source = readSourceFile(
args.tree,
args.filePath,
);
const keywords = findNodes(source, ts.SyntaxKind.VariableStatement);
const changes: Change[] = [];
for (const keyword of keywords) {
if (ts.isVariableStatement(keyword)) {
const [ declaration ] = keyword.declarationList.declarations;
if (
ts.isVariableDeclaration(declaration) &&
declaration.initializer &&
declaration.name.getText() === args.arrayName
) {
const node = declaration.initializer.getChildAt(1);
const lastToken = node.getLastToken();
let commas = ',';
let pos = node.getEnd() + 1;
if (lastToken) {
let trailingCommaFound = false;
if (lastToken.kind === ts.SyntaxKind.CommaToken) {
trailingCommaFound = true;
} else {
changes.push(new InsertChange(args.filePath, lastToken.getEnd(), ','));
}
commas = trailingCommaFound ? ',' : '';
pos = lastToken.getEnd() + 1;
}
changes.push(new InsertChange(
args.filePath,
pos,
args.toAdd + commas + '\n'
));
break;
}
}
};
const recorder = tree.beginUpdate(args.filePath);
for (const change of changes) {
if (change instanceof InsertChange) {
recorder.insertLeft(change.pos, change.toAdd);
}
}
tree.commitUpdate(recorder);
return tree;
};
}
function addComponentToRegistry(tree: Tree, schema: SchematicOptions): Rule {
const className = strings.classify(schema.name);
const selector = "wc-" + schema.name;
const toAdd = ` { selector: '${selector}', loadChildren: () => import( /* webpackChunkName: "${selector}" */ '${schema.modulePath}.module').then(m => m.${className}Module) }`;
return addElementInArray({
tree,
toAdd,
filePath: schema.registryFilePath,
arrayName: 'WEB_COMPONENTS_BUNDLES',
});
}
function addComponentToProviders(tree: Tree, schema: SchematicOptions): Rule {
const className = strings.classify(schema.name);
const toAdd = ` { provide: WEB_COMPONENT_DEFINITIONS, multi: true, useValue: ${className}ComponentDefinition }`;
return addElementInArray({
tree,
toAdd,
filePath: schema.registryFilePath,
arrayName: 'WEB_COMPONENTS_REGISTRY',
});
}
export default function (schema: SchematicOptions): Rule {
if (!schema.name) {
throw new SchematicsException('name option is required.');
}
if (!schema.type) {
throw new SchematicsException('type option is required.');
}
return (tree: Tree) => {
const workspace = getWorkspace(tree);
const components = workspace.projects['feature-web-component'];
const sourceRoot = components.sourceRoot as string;
schema.name = strings.dasherize(schema.name);
if (schema.name.startsWith('wc-')) {
schema.name = schema.name.substring(3, schema.name.length)
}
schema.modulePath = `./${schema.type}s/${schema.name}/${schema.name}`;
schema.registryFilePath = 'libs/feature/web-component/src/lib/web-component-registry.ts';
console.log(schema);
const sources = apply(url('./files'), [
template({
...strings,
...schema,
}),
move(join(sourceRoot, 'lib', schema.type + 's', schema.name)),
]);
return chain([
addImport({
tree,
filePath: schema.registryFilePath,
symbolName: `${strings.classify(schema.name)}ComponentDefinition`,
importPath: schema.modulePath,
}),
addComponentToRegistry(tree, schema),
addComponentToProviders(tree, schema),
mergeWith(sources)
]);
};
}
export interface SchematicOptions {
name: string;
type: 'form' | 'widget';
registryFilePath: string;
registryFileName: string;
modulePath: string;
}
|
<gh_stars>10-100
import {
Component,
ChangeDetectionStrategy,
OnInit,
OnDestroy,
AfterViewInit
} from "@angular/core";
import { MegaStructure } from "src/app/model/units/megaStructure";
import { BaseComponentComponent } from "src/app/base-component/base-component.component";
import { CdkDragDrop, moveItemInArray } from "@angular/cdk/drag-drop";
@Component({
selector: "app-auto-mega",
templateUrl: "./auto-mega.component.html",
styleUrls: ["./auto-mega.component.scss"],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class AutoMegaComponent
extends BaseComponentComponent
implements OnInit, OnDestroy, AfterViewInit {
maxQueue = 20;
ngOnInit(): void {
// Nothing
}
ngOnDestroy(): void {
this.ms.game.automationManager.autoMega.megaQueue = this.ms.game.automationManager.autoMega.megaQueue.filter(
(q) => q.mega && q.quantity > 0
);
super.ngOnDestroy();
}
add() {
if (
this.ms.game.automationManager.autoMega.megaQueue.length > this.maxQueue
) {
return false;
}
this.animationDisabled = true;
this.ms.game.automationManager.autoMega.megaQueue.push({
mega: null,
quantity: 1
});
setTimeout(() => {
this.animationDisabled = false;
});
}
// eslint-disable-next-line @typescript-eslint/ban-types
drop(event: CdkDragDrop<{}>) {
moveItemInArray(
this.ms.game.automationManager.autoMega.megaQueue,
event.previousIndex,
event.currentIndex
);
}
getMegaId(index: number, mega: MegaStructure) {
return mega.id;
}
getQId(index: number, megaQ: any) {
return megaQ.mega ? megaQ.mega.id : index;
}
delete(mq: any) {
this.ms.game.automationManager.autoMega.megaQueue = this.ms.game.automationManager.autoMega.megaQueue.filter(
(q) => q !== mq
);
}
}
|
Get the biggest daily news stories by email Subscribe Thank you for subscribing We have more newsletters Show me See our privacy notice Could not subscribe, try again later Invalid Email
Large numbers of NHS nurses are quitting because of staff shortages and poor pay, it is claimed today.
Britain’s top nurse Janet Davies spoke out as it emerged the health service faced an “unprecedented” crisis with the number of unfilled posts doubling in three years to 40,000.
It comes as new polling suggests major public concern for hospital safety with seven in 10 people believing nurses are underpaid and similar numbers saying there are not enough of them.
Ms Davies, general secretary of the Royal College of Nursing , said: “There is a perfect storm engulfing nursing and the stakes could scarcely be higher.
(Image: Getty)
(Image: Daily Mirror)
“After years of warnings, the nursing profession is officially shrinking. The best nurses feel forced to throw in the towel because of unprecedented staff shortages, relentless pressure and poor pay.
"The NHS is being dragged down by the worst nursing shortage in its history. Ministers cannot be caught idle.”
She added: “Experienced nursing staff are leaving in droves, not because they don’t like the job, but because they can’t afford to stay.”
Today the RCN will stage a protest outside Parliament against the below-inflation 1% public sector pay cap. It has caused nursing pay to fall by 14% – about £3,000 –in real terms since 2010. About 2,000 health workers will take a day of leave to participate.
(Image: Getty)
(Image: Getty)
(Image: Getty)
It has also emerged they have support from the public. A YouGov poll of 1,624 people found 72% believe there are too few nurses to provide safe care.
Some 68% said nurses were underpaid – including 58% who vote Tory. And 57% said they would be willing to pay more tax to make the NHS safer, including a majority of Tories.
Pressure is mounting on Theresa May and Health Secretary Jeremy Hunt after reports suggested the PM planned to lift the pay cap for nurses, teachers and other public sector staff later this year.
Shadow Health Secretary Jon Ashworth said: “The Tory Government has taken NHS staff for granted for years and the result is staffing shortages across the health service and even longer waits for patients.”
(Image: Getty)
(Image: Birmingham Mail)
A&E waiting time targets have been consistently missed under the Tories.
In January we told how a one-year-old boy with suspected meningitis spent five hours on a makeshift bed of two chairs.
The NHS does not publish national data on nurse vacancies. But the RCN found 40,000 vacant posts earlier this year after freedom of information requests to trusts. That figure stood at 20,000 in 2013.
As of March 2017, the NHS employed 285,893 nurses and health visitors. Last year it spent £60million on agency staff. Fully qualified nurses start on a salary of about £22,000.
Now the RCN is warning industrial action could be an option if the pay cap is not lifted. Ms Davies said: “If the Government fails to announce a change of direction in the Budget, industrial action by nursing staff immediately goes on the table.” The warning comes after junior doctors went on strike last year in a contract dispute.
A Department of Health spokesman said: "We are helping the NHS to make sure it has the right staff, in the right place, at the right time to provide safe care — that's why there are over 31,100 more professionally qualified clinical staff, including over 11,600 more doctors, and almost 12,000 more nurses on our wards since May 2010.”
The spokesman added: “Support and welfare of NHS staff is a top priority, and the government is committed to ensuring they can continue to deliver world-class patient care.” |
def decode_frame(self):
timestamp_ptr = ffi.new('int*')
buf_ptr = ffi.new('uint8_t**')
if lib.WebPAnimDecoderGetNext(self.ptr, buf_ptr, timestamp_ptr) == 0:
raise WebPError('decoding error')
size = self.anim_info.height * self.anim_info.width * 4
buf = ffi.buffer(buf_ptr[0], size)
arr = np.copy(np.frombuffer(buf, dtype=np.uint8))
arr = np.reshape(arr, (self.anim_info.height, self.anim_info.width, 4))
timestamp_ms = timestamp_ptr[0]
return arr, timestamp_ms |
Book Reviews : Political Terror in Communist Systems. By ALEXANDER DALLIN and GEORGE W. BRESLAUER. (Stanford: Stanford University Press, 1970. Pp. xi, 172. $5.95.)
chapters. The third chapter is unique because it consists of a working paper prepared by Dexter for a Law and Psychiatry Project in 1959. Dexter also includes a short chapter prepared by Charles Morrissey dealing with Oral History Interviewing and a chapter built around Dean’s and Whyte’s 1958 Human Organization article, &dquo;How Do You Know If the Informant Is Telling the Truth?&dquo; Dexter makes his most important contribution in the final chapter where he begins to develop a &dquo;transactional theory of interviewing.&dquo; His concern is with the |
def add_booking(timeslot) -> None:
timeslot["number"] += 1
if timeslot["number"] >= timeslot["capacity"]:
timeslot["booked"] = True |
Antibiotic overuse remains a problem in the U.S., researchers say
Public health officials worry about antibiotic overuse and misuse because they can promote the development of drug-resistant strains of bacteria that are invulnerable to medical treatment. Some worry that the U.S. may be headed to a “post-antibiotic” era, when the medicines that have worked so well to wipe out infectious disease no longer remain effective.
Overall, the rate of antibiotic prescriptions in the U.S. declined from 966 prescriptions for each 1,000 residents in 1999 to 801 in 2010.
According to the Washington-based Center for Disease Dynamics, Economics and Policy , Kentucky, West Virginia, Tennessee, Mississippi and Louisiana were the states with the highest rates of antibiotic use in 2010. Those states had more than one antibiotic prescription per capita in 2010. The states with the lowest use of antibiotics that year were Alaska, Hawaii, California, Oregon and Washington, with just over one prescription for every two people.
Patterns of antibiotic overuse vary in the U.S. vary by region, with residents of some Southeastern states taking about twice as many antibiotics per capita as residents in some Western states.
The new data on antibiotic use, which are available for viewing on the center's "resistance map," came from new analysis by the center's Extending the Cure project, which tracks antibiotic resistance over time, incorporating data from bacterial cultures from labs nationwide as well as prescription data from U.S. pharmacies. The map allows users to examine antibiotic use and bacterial resistance on a state-by-state and microbe-by-microbe basis.
The map was unveiled Tuesday as part of Get Smart About Antibiotics Week — an annual event that focuses on antibiotic resistance. That morning, representatives from the 25 health organizations that signed on to a new policy statement regarding antibiotic overuse spoke with reporters about some of the findings, noting that infections that once were easy to treat, including urinary tract infections, are now resistant to medication.
"It's something we see on a weekly basis in our emergency room," said Dr. Sara Cosgrove of the Johns Hopkins Hospital in Baltimore. "We have women who need to be admitted to the hospital for a urinary tract infection. In the past we would have sent them home with a pill."
While research is continuing on new antibiotics designed to wipe out so-called super bugs, the rate of misuse outflanks the progress of new drug development, said Ramanan Laxminarayan, director of Extending the Cure.
"It cannot keep up," he said, recommending that doctors and patients take precautions to prevent antibiotic overuse, including by encouraging hand washing and flu vaccination (which, by preventing infection with the influenza virus, reduces the number of secondary bacterial infections.)
One factor the experts did not address in great detail were efforts to cut the use of antibiotics in U.S. livestock, a widespread practice that promotes animal growth but also contributes to drug resistance.
On Monday, Consumers Union, publisher of Consumer Reports, called for a "major reduction in the use of antibiotics in food animal production." About 80% of all antibiotics sold in the U.S. are used on food animals, the consumer group said. It also reported that earlier this year, the U.S. Food and Drug Administration asked livestock growers to voluntarily cut antibiotic use in animals to promote growth. |
"Gov. Walker and his administration should stop ignoring the workers and children at Lincoln Hills and work to face the problems there head-on," said Democratic Party of Wisconsin Chair Martha Laning.
MADISON – As Gov. Scott Walker returns home from a trip to Cuba this week, a crisis continues on at the Lincoln Hills School for Boys for juveniles at the facility, staff, and their families. Investigative news reports highlight a trend of troublesome incidents at the facility, which were brought to the executive’s office multiple times, persisted for years and in some cases continue to grow worse. [1]
“I can’t imagine what the parents of juveniles located at Lincoln Hills go through each night knowing that the problems revealed to the public a year ago have not been properly addressed by Gov. Walker and his administration,” Democratic Party of Wisconsin Chair Martha Laning said on Saturday. “The way Gov. Walker and his administration have treated some of Wisconsin’s most vulnerable citizens is completely unacceptable. He still hasn’t visited the facility to this day, but he’ll wind down the year after making a trip to an island over 1,700 miles away.”
According to media reports, Scott Walker’s office knew about problems at Lincoln Hills dating all the way back to 2012, including a letter sent by a Racine Judge detailing a sexual assault at the state’s youth prison. The Governor’s office was alerted multiple times in 2014 about problems at the facility as well, however, safety concerns persisted and no changes were made in leadership until well after the Department of Justice’s raid in 2015. [2] [3]
“Gov. Walker and his administration should stop ignoring the workers and children at Lincoln Hills and work to face the problems there head-on. Several Democratic lawmakers have toured the facility and spoken with the people who work and live there. Governor Walker should do the same and plan to work with Democrats in the next session to eliminate the numerous safety concerns that arise at all of our state’s correction’s facilities due to staff shortages and pay cuts.”
Background
[1] Crisis at Lincoln Hill juvenile prison years in making, Milwaukee Journal Sentinel
For years, officials knew or should have known about the thicket of problems at Lincoln Hills and its sister facility on the same campus, Copper Lake School for Girls. “It all went on in plain view of the Department of Corrections, but nobody at the Department of Corrections knew how juvenile corrections worked or how Lincoln Hills operated or what was going on,” said Troy Bauch, who until recently was the union representative for workers there. “Nobody cared.” [12/30/16]
[2] Walker’s office heard multiple warnings about Lincoln Hills, Milwaukee Journal Sentinel
Gov. Scott Walker’s office was told multiple times over the past year about problems at a troubled juvenile prison in northern Wisconsin, including claims of violence against youths and staff, inadequate classroom time, and the need to improve sexual assault safeguards, documents show. [12/17/15]
[3] Scott Walker’s office warned in 2012 of safety issues at Lincoln Hills, Wisconsin State Journal
A Racine County judge warned Gov. Scott Walker’s office in 2012 of severe safety issues at the state’s youth prison, and later the county stopped sending its juvenile offenders to the Irma facility that is now the subject of a criminal investigation. [2/12/16] |
def makeID(board):
board.find_one_and_update({'_id':'count'}, {'$inc':{'count':1}}, upsert=True)
return board.find_one({'_id':'count'}).get('count') |
/*!
* Copy the contents of @from to @to
*/
void
copy_genome (const array<genosect_t, N_Genes>& from, array<genosect_t, N_Genes>& to)
{
for (unsigned int i = 0; i < N_Genes; ++i) {
to[i] = from[i];
}
} |
Comparison between laparotomy and operative laparoscopy in the treatment of moderate and severe stages of endometriosis.
A retrospective study was designed to compare the results of treatment of moderate and severe stages of endometriosis by laparotomy and by laparoscopy. Patients were divided into three groups: the first consisted of 42 patients treated by laparotomy followed immediately with danazol treatment for 6-9 months; the second, 44 patients treated by operative laparoscopy followed immediately with danazol treatment for 4-6 months; and the third, 62 patients treated by operative laparoscopy followed immediately with danazol treatment for 6-10 weeks. The cumulative pregnancy rate in the laparoscopy groups was better than that of the laparotomy group. Most patients who failed to conceive underwent a second-look laparoscopy for reevaluation. Residual endometriosis and associated adhesions were noticed least in the third group. It is concluded that operative laparoscopy could be efficiently used for the treatment of moderate or even severe endometriosis. |
/**
* Adds the Gitlab user to the groups. It will be given a different access level
* based on the group type (instructors are given the MAINTAINER level and teaching
* assistants REPORTED).
*
* @param gitlabUserId the user id of the Gitlab user
* @param groups the new groups
*/
private void addUserToGroups(int gitlabUserId, Set<String> groups) {
if (groups == null || groups.isEmpty()) {
return;
}
List<ProgrammingExercise> exercises = programmingExerciseRepository.findAllByInstructorOrEditorOrTAGroupNameIn(groups);
for (var exercise : exercises) {
Course course = exercise.getCourseViaExerciseGroupOrCourseMember();
Optional<AccessLevel> accessLevel = getAccessLevelFromUserGroups(groups, course);
accessLevel.ifPresent(level -> addUserToGroup(exercise.getProjectKey(), gitlabUserId, level));
}
} |
CommonSpace columnist Steve Topple questions how much unions in the UK today are really representing workers' interests
IT WAS an interesting weekend for the debate surrounding the renewal of Trident.
A massive/average (delete depending on which side of the fence you sit on) demonstration took place in London on Saturday, with political leaders and campaign groups occupying Trafalgar Square in a show of unity regarding supporting its abolition.
More interesting, however, was an interview on BBC Sunday Politics Scotland with the leader of the Scottish GMB union, Gary Smith, making a feverish diatribe in support of the nuclear "deterrent", after the union itself issued a stark warning to Labour on Thursday about the threat to jobs.
Trade unions generally appear to be having a crisis of character at the moment.
Referring to anti-Trident MPs and MSPs, Smith called their position "an indulgent debate being played out by people who are happy doing student politics. This was a policy position that was hatched over lattes in Islington and Holyrood, and we have the whole Scottish political elite in direct confrontation with the organised working class".
Nearly as intriguing as his assertion that anti-Trident supporters the Greens and Rise are the "Scottish political elite" are his presumptions that this is "diversionary politics" and that he speaks for the "organised working class" (an assertion oft made by those who actually don't).
Diversionary? Yes, if you ignore the fact that the Trident system is becoming more obsolete as each year passes (note the US developing anti-submarine drones and Russia's upcoming unmanned, underwater defence system), that David Cameron will be holding a vote on the subject after the EU referendum and that 48 per cent of Scots want the system scrapped .
However, it is of course the GMB Union which openly flirts with fracking , under the wafer-thin guise of "jobs for workers" and energy security for the UK; ignoring the fact that the US shale industry is majorly funded by high yield or 'junk bonds' - toxic debt which caused, in part, the last financial crash - and conveniently forgetting fracking is linked to banks like HSBC which have been responsible for plundering millions of pounds from the UK taxpayer.
The unions in the UK maybe should've gone to a well-known high street optician, as their short-sightedness is blindly obvious.
But then, trade unions generally appear to be having a crisis of character at the moment. Take Usdaw, the UK's fourth largest union. Obsequious in their pandering to Tesco, the terms and conditions of being a union representative for them openly states : "All representatives must be mutually acceptable to Tesco and Usdaw."
I'm sorry - but did I miss the seventh point on the People's Charter where it says 'forelock-tugging of the Bosses'?
This bizarre statement, which fundamentally goes against everything trade unionism should stand for, seems all the more pertinent when viewed in terms of Usdaw's response to last year's announcement by Tesco of 2,000 possible job losses: "Our priority is to maximise employment within Tesco. Where redundancy is unavoidable ... (we will) support members through the joint lifelong learning initiative with Tesco."
Is there a hidden agenda at work? You could also include the PCS union in this rollcall of 'workers' organisations who don't appear to be standing up for the workers' - they have come in for increasing criticism from campaign groups for their failure to act on numerous scandals within the DWP, most notably their inaction on sanctioning of welfare claimants and the big business benefit-scheme 'Workfare'.
And as for Unison? An organisation which was mired in controversy over possible election rigging last year, and is currently (I'm told, by anonymous sources) awash with rumours that it is running a campaign to silence dissenters within its ranks.
While espousing to support worker's jobs they are actually doing no such thing; their incessant capitulation to big business and government is proving to be nothing but detrimental to those they claim to want to protect.
The unions in the UK maybe should've gone to a well-known high street optician, as their short-sightedness is blindly obvious.
While espousing to support worker's jobs, livelihoods and communities - they are actually doing no such thing; their incessant capitulation to big business and government, ineffective facilitating of a structured, cross-organisational national level programme of action and general hand-wringing when it comes to making decisions, is proving to be nothing but detrimental to those they claim to want to protect.
UK Chancellor George Osborne announced at the weekend that additional spending cuts will have to be implemented, and yet there has been not one cross-union national day of action since July 2014 .
I'm sympathetic to those lay activists who say that trade unions are there first and foremost to protect their members' jobs and lives - although I'd question whether that features on crony-capitalist stooges Len McCluskey and Dave Prentis to-do lists.
But sadly their union bosses are proving to be nothing but harmful to the cause. By wanting to protect jobs (like those at Trident) that are directly linked to the very financial institutions and political groups that are responsible for poverty and destitution in this country (and the world), they are merely signing a deal with the devil - and one that will be their downfall.
There is no justification, whatsoever, for fighting for employment like that provided at Trident when it comes directly at the expense of other workers.
There is no justification, whatsoever, for fighting for employment like that provided at Trident ( funded by multinational financial institutions), when it comes directly at the expense of other workers; how many public sector jobs have been cut because of the 2008 banker-induced crash, and how many more will follow after the next one?
As the Tories push through even more anti-trade union laws and the number of employees who are members of a union is at its lowest level since 1995, these organisations need to stop thinking and acting like we're still in the 1970s.
Fighting battles over job security is no-longer enough, as the enemy is now a completely different beast - it's not the government who are generals in this war, but the multinational corporations.
By thinking and behaving like a modern-day Ouroboros (going round and round in circles, consuming themselves and spawning the same being with each cycle), they are merely playing into the hands of right-wing corporatism - but in reality, far from recreating themselves after eating their own tails, they are being manipulated into cannibalising other working-class and left-wing organisations.
Fighting battles over job security is no-longer enough, as the enemy is now a completely different beast - it's not the government who are generals in this war, but the multinational corporations.
The upper echelons of the trade union movement fail to see (or are complicit in the fact) that their actions will eventually lead to them devouring themselves like the mythological serpent, but without rebirthing - and their membership will be scattered across various fringe movements.
They need to be dragged, kicking and screaming, into the 21st century; one where globalised crony-capitalism and the corporatisation of nations is the enemy, and get on-board with the anti-capitalist movement.
And if they don't? They will, in the next decade, become as obsolete as the nuclear deterrent they seem hell-bent on protecting.
Picture courtesy of Steve Topple |
<reponame>Rithish288/Portfolio<gh_stars>1-10
import { Component, ChangeDetectionStrategy, AfterViewInit, OnDestroy, ElementRef, ViewChild } from '@angular/core';
@Component({
selector: 'app-demo2',
templateUrl: './demo2.component.html',
styleUrls: ['./demo2.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class Demo2Component implements AfterViewInit, OnDestroy {
@ViewChild("canvas") canvas: ElementRef<HTMLCanvasElement>;
private c: CanvasRenderingContext2D;
private data: ImageData;
private metaballs = [];
private const: number = 10;
constructor() { }
ngAfterViewInit(): void {
this.c = this.canvas.nativeElement.getContext('2d');
this.canvas.nativeElement.width = 200;
this.canvas.nativeElement.height = 200;
}
ngOnDestroy(): void { }
}
|
def with_retries(f: Callable[[], Any], max_attempts: int = 3) -> Any:
for n in range(max_attempts + 1):
try:
return f()
except Exception as e:
if n < max_attempts:
logging.warning(f"Got an error, {n+1} of {max_attempts} attempts: {e}")
time.sleep(2 ** n + random.random())
else:
raise e |
<gh_stars>0
from simclr import SimCLR
import yaml
from data_aug.dataset_wrapper import DataSetWrapper
import torch
losses = []
def get_valid_callback(save_loc:str):
if save_loc is None:
return lambda m,e,l: None
else:
def each_valid(model, epoch, valid_loss):
losses.append(valid_loss)
state = {
"model": model.state_dict(),
"epoch": epoch,
"valid_losses": losses
}
torch.save(state, save_loc)
return each_valid
def main(save_loc:str=None):
config = yaml.load(open("config.yaml", "r"), Loader=yaml.FullLoader)
dataset = DataSetWrapper(config['batch_size'], **config['dataset'])
simclr = SimCLR(dataset, config)
callback = get_valid_callback(save_loc)
simclr.train(callback)
if __name__ == "__main__":
main()
|
<filename>src/main/java/com/jhkj/weapp/common/exception/AdminUnauthenticatedException.java
package com.jhkj.weapp.common.exception;
/**
* @author 呉真 Kuretru < <EMAIL> >
*/
public class AdminUnauthenticatedException extends Exception {
public AdminUnauthenticatedException(String message) {
super(message);
}
}
|
<filename>pkg/accountmanager/iam_test.go<gh_stars>100-1000
package accountmanager
import (
"testing"
"github.com/Optum/dce/pkg/account"
"github.com/Optum/dce/pkg/accountmanager/mocks"
"github.com/Optum/dce/pkg/arn"
awsMocks "github.com/Optum/dce/pkg/awsiface/mocks"
commonMocks "github.com/Optum/dce/pkg/common/mocks"
"github.com/Optum/dce/pkg/errors"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/iam"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
var testConfig = ServiceConfig{
S3BucketName: "DefaultArtifactBucket",
S3PolicyKey: "DefaultPrincipalPolicyS3Key",
PrincipalIAMDenyTags: []string{"DefaultPrincipalIamDenyTags"},
PrincipalMaxSessionDuration: 3600,
AllowedRegions: []string{"us-east-1"},
TagEnvironment: "DefaultTagEnvironment",
TagContact: "DefaultTagContact",
TagAppName: "DefaultTagAppName",
PrincipalRoleDescription: "Role for principal users of DCE",
PrincipalPolicyDescription: "Policy for principal users of DCE",
}
func TestPrincipalMergePolicyAccess(t *testing.T) {
type createPolicyOutput struct {
output *iam.CreatePolicyOutput
err awserr.Error
}
type attachRolePolicyOutput struct {
output *iam.AttachRolePolicyOutput
err awserr.Error
}
type listPolicyVersionsOutput struct {
output *iam.ListPolicyVersionsOutput
err error
}
type createPolicyVersionOutput struct {
output *iam.CreatePolicyVersionOutput
err error
}
tests := []struct {
name string
exp error
account *account.Account
createPolicyOutput createPolicyOutput
attachRolePolicyOutput attachRolePolicyOutput
listPolicyVersionsOutput listPolicyVersionsOutput
createPolicyVersionOutput createPolicyVersionOutput
}{
{
name: "should create role and policy and pass",
account: &account.Account{
ID: aws.String("123456789012"),
PrincipalRoleArn: arn.New("aws", "iam", "", "123456789012", "role/DCEPrincipal"),
AdminRoleArn: arn.New("aws", "iam", "", "123456789012", "role/AdminAccess"),
PrincipalPolicyArn: arn.New("aws", "iam", "", "123456789012", "policy/DCEPrincipalDefaultPolicy"),
},
createPolicyOutput: createPolicyOutput{
output: &iam.CreatePolicyOutput{},
err: nil,
},
attachRolePolicyOutput: attachRolePolicyOutput{
output: &iam.AttachRolePolicyOutput{},
err: nil,
},
listPolicyVersionsOutput: listPolicyVersionsOutput{},
},
{
name: "should get duplicate errors and still work",
account: &account.Account{
ID: aws.String("123456789012"),
PrincipalRoleArn: arn.New("aws", "iam", "", "123456789012", "role/DCEPrincipal"),
PrincipalPolicyArn: arn.New("aws", "iam", "", "123456789012", "policy/DCEPrincipalDefaultPolicy"),
AdminRoleArn: arn.New("aws", "iam", "", "123456789012", "role/AdminAccess"),
},
exp: nil,
createPolicyOutput: createPolicyOutput{
output: nil,
err: awserr.New(iam.ErrCodeEntityAlreadyExistsException, "Already Exists", nil),
},
listPolicyVersionsOutput: listPolicyVersionsOutput{
output: &iam.ListPolicyVersionsOutput{},
err: nil,
},
createPolicyVersionOutput: createPolicyVersionOutput{
output: &iam.CreatePolicyVersionOutput{},
err: nil,
},
attachRolePolicyOutput: attachRolePolicyOutput{
output: nil,
err: awserr.New(iam.ErrCodeEntityAlreadyExistsException, "Already Exists", nil),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
iamSvc := &awsMocks.IAM{}
iamSvc.On("CreatePolicy", mock.AnythingOfType("*iam.CreatePolicyInput")).
Return(tt.createPolicyOutput.output, tt.createPolicyOutput.err)
iamSvc.On("ListPolicyVersions", mock.AnythingOfType("*iam.ListPolicyVersionsInput")).
Return(tt.listPolicyVersionsOutput.output, tt.listPolicyVersionsOutput.err)
iamSvc.On("CreatePolicyVersion", mock.AnythingOfType("*iam.CreatePolicyVersionInput")).
Return(tt.createPolicyVersionOutput.output, tt.createPolicyVersionOutput.err)
iamSvc.On("AttachRolePolicy", mock.AnythingOfType("*iam.AttachRolePolicyInput")).
Return(tt.attachRolePolicyOutput.output, tt.attachRolePolicyOutput.err)
storagerSvc := &commonMocks.Storager{}
storagerSvc.On(
"GetTemplateObject", "DefaultArtifactBucket", "DefaultPrincipalPolicyS3Key",
mock.Anything).Return("", "123", nil)
clientSvc := &mocks.Clienter{}
clientSvc.On("IAM", mock.Anything).Return(iamSvc)
principalSvc := principalService{
iamSvc: iamSvc,
storager: storagerSvc,
account: tt.account,
config: testConfig,
}
err := principalSvc.MergePolicy()
assert.True(t, errors.Is(err, tt.exp), "actual error %+v doesn't match expected error %+v", err, tt.exp)
if tt.exp == nil {
assert.Equal(t, *tt.account.PrincipalPolicyHash, "123")
}
})
}
}
|
<reponame>ethanent/discordkvs
package main
import (
"bytes"
"crypto/rand"
"flag"
"fmt"
"github.com/bwmarrin/discordgo"
"github.com/ethanent/discordkvs"
"os"
"os/signal"
"strings"
"time"
)
const UseGuild = "732134812499836941"
var integrityTest = flag.Bool("i", false, "should bot perform integrity testing?")
func main() {
flag.Parse()
s, err := discordgo.New("Bot " + os.Getenv("DISCORD_BOT_TOKEN"))
if err != nil {
panic(err)
}
// discordkvs.AcceptDataFromOtherUsers will allow the application to use messages
// from other users to get values.
app, err := discordkvs.NewApplication(s, "DemoApp", discordkvs.AcceptDataFromOtherUsers)
if err != nil {
panic(err)
}
if err := s.Open(); err != nil {
panic(err)
}
fmt.Println("OPEN")
if *integrityTest {
kvsChannelID, err := app.GetKVSChannelID(UseGuild)
if err != nil {
panic(err)
}
fmt.Println("KVS ID: " + kvsChannelID)
// Test data
fmt.Println(app.Get(UseGuild, "testKey"))
testMessageIntegrity(s, app, 100)
testMessageIntegrity(s, app, 100)
testMessageIntegrity(s, app, 300)
testMessageIntegrity(s, app, 600)
testMessageIntegrity(s, app, 1000)
testMessageIntegrity(s, app, 100)
os.Exit(0)
}
// Run simple data storage bot
s.AddHandler(func (_ *discordgo.Session, m *discordgo.MessageCreate) {
d := strings.Split(m.Content, " ")
if d[0] != "kvs" {
return
}
if d[1] == "set" {
if len(d) < 4 {
s.ChannelMessageSend(m.ChannelID, "err: not enough args")
return
}
start := time.Now()
err := app.Set(m.GuildID, d[2], []byte(d[3]))
end := time.Now()
if err != nil {
s.ChannelMessageSend(m.ChannelID, "err: " + err.Error())
return
}
s.ChannelMessageSend(m.ChannelID, ":white_check_mark: " + end.Sub(start).String())
} else if d[1] == "get" {
if len(d) < 3 {
s.ChannelMessageSend(m.ChannelID, "err: not enough args")
return
}
start := time.Now()
d, err := app.Get(m.GuildID, d[2])
end := time.Now()
if err != nil {
s.ChannelMessageSend(m.ChannelID, "err: " + err.Error())
return
}
s.ChannelMessageSend(m.ChannelID, ":white_check_mark: " + string(d) + " " + end.Sub(start).String())
} else if d[1] == "del" {
if len(d) < 3 {
s.ChannelMessageSend(m.ChannelID, "err: not enough args")
return
}
start := time.Now()
err := app.Del(m.GuildID, d[2])
end := time.Now()
if err != nil {
s.ChannelMessageSend(m.ChannelID, "err: " + err.Error())
return
}
s.ChannelMessageSend(m.ChannelID, ":white_check_mark: " + end.Sub(start).String())
}
})
f := make(chan os.Signal)
signal.Notify(f, os.Interrupt)
<- f
}
func testMessageIntegrity(s *discordgo.Session, a *discordkvs.Application, dataSize int) {
genData := make([]byte, dataSize)
_, err := rand.Read(genData)
if err != nil {
panic(err)
}
setStart := time.Now()
err = a.Set(UseGuild, "testKey", genData)
if err != nil {
panic(err)
}
setEndReadStart := time.Now()
readData, err := a.Get(UseGuild, "testKey")
if err != nil {
panic(err)
}
readEnd := time.Now()
fmt.Println("-----------------")
fmt.Println("Size:", dataSize)
fmt.Println("SetTime:", setEndReadStart.Sub(setStart))
fmt.Println("ReadTime:", readEnd.Sub(setEndReadStart))
fmt.Println("Integrity:", bytes.Equal(readData, genData))
}
|
def original(w):
if w % 2 == 0:
return "YES"
else:
return "NO"
# however, original(w) is wrong so we need to patch it
def original_v2(w):
return "NO"
def cond(w):
if w == 2:
# fix the bad case
return 1
else:
return 0
def _generate(cond, functions):
def _choose(*args, **kwargs):
return functions[cond(*args, **kwargs)](*args, **kwargs)
return _choose
original = _generate(cond, [original, original_v2])
# original(w) is correct now
w = int(input())
print(original(w))
|
def fetch():
url = 'https://prometheus-us-central1.grafana.net/api/prom/api/v1/label/__name__/values'
all_series = requests.get(url, auth=auth).json()["data"]
try:
os.mkdir("data")
except FileExistsError:
pass
url = 'https://prometheus-us-central1.grafana.net/api/prom/api/v1/series'
for series in all_series:
data = requests.get(url, {
"match[]": series
}, auth=auth).json()
with open(f"data/{series}.json", "w") as f:
json.dump(data, f) |
#ifndef ZMONITORS_UI_H
#define ZMONITORS_UI_H
#include <zmonitors-backend.h>
/* ui base */
struct zms_ui_base;
struct zms_ui_base_interface {
void (*setup)(struct zms_ui_base* ui_base); /* nonnull */
void (*teardown)(struct zms_ui_base* ui_base); /* nonnull */
void (*reconfigure)(struct zms_ui_base* ui_base); /* nonnull */
void (*repaint)(struct zms_ui_base* ui_base); /* nullable */
void (*frame)(struct zms_ui_base* ui_base, uint32_t time); /* nullable */
bool (*ray_enter)(struct zms_ui_base* ui_base, uint32_t serial, vec3 origin,
vec3 direction); /* nullable */
bool (*ray_leave)(
struct zms_ui_base* ui_base, uint32_t serial); /* nullable */
bool (*ray_motion)(struct zms_ui_base* ui_base, uint32_t time, vec3 origin,
vec3 direction); /* nullable */
bool (*ray_button)(struct zms_ui_base* ui_base, uint32_t serial,
uint32_t time, uint32_t button, uint32_t state); /* nullable */
bool (*keyboard_enter)(struct zms_ui_base* ui_base, uint32_t serial,
struct wl_array* keys); /* nullable */
bool (*keyboard_leave)(
struct zms_ui_base* ui_base, uint32_t serial); /* nullable */
bool (*keyboard_key)(struct zms_ui_base* ui_base, uint32_t serial,
uint32_t time, uint32_t key, uint32_t state); /* nullable */
bool (*keyboard_modifiers)(struct zms_ui_base* ui_base, uint32_t serial,
uint32_t mods_depressed, uint32_t mods_latched, uint32_t mods_locked,
uint32_t group); /* nullable */
bool (*cuboid_window_moved)(
struct zms_ui_base* ui_base, vec3 face_direction); /* nullable */
};
struct zms_ui_base {
void* user_data;
const struct zms_ui_base_interface* interface;
struct zms_ui_root* root;
struct zms_ui_base* parent;
struct wl_list link; // -> zms_ui_base.children
struct wl_list children;
vec3 position;
vec3 half_size;
bool setup;
};
struct zms_ui_base* zms_ui_base_create(void* user_data,
const struct zms_ui_base_interface* interface, struct zms_ui_base* parent);
void zms_ui_base_destroy(struct zms_ui_base* ui_base);
void zms_ui_base_schedule_repaint(struct zms_ui_base* ui_base);
/* root */
struct zms_ui_root {
struct zms_ui_base* base;
struct zms_cuboid_window* cuboid_window;
struct wl_list frame_callback_list;
uint32_t frame_state; // enum zms_ui_frame_state
};
struct zms_ui_root* zms_ui_root_create(void* user_data,
const struct zms_ui_base_interface* interface, struct zms_backend* backend,
vec3 half_size, versor quaternion);
void zms_ui_root_destroy(struct zms_ui_root* root);
#endif // ZMONITORS_UI_H
|
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.async;
import java.util.concurrent.CompletableFuture;
import java.util.function.Supplier;
import org.apache.commons.lang3.exception.ExceptionUtils;
/**
* A value to be completed once upon the first request, asynchronously
*
* This contains a single lazy value. It is computed only if requested. When requested, a future is
* returned and the computation is started. If the computation succeeds, the completed future is
* cached indefinitely. Any subsequent requests return the same future, even if the computation has
* not yet completed. Thus, when it completes, all requests will be fulfilled by the result of the
* first request. If the computation completes exceptionally, the result is immediately discarded.
* Thus, a subsequent request will retry the computation.
*
* @param <T> the type of the value
*/
public class AsyncLazyValue<T> {
private CompletableFuture<T> future;
private Throwable lastExc = null;
private Supplier<CompletableFuture<T>> supplier;
/**
* Construct a lazy value for the given computation
*
* @param supplier specifies the computation
*/
public AsyncLazyValue(Supplier<CompletableFuture<T>> supplier) {
this.supplier = supplier;
}
/**
* Request the value
*
* If this is called before {@link #provide()}, the computation given at construction is
* launched. The {@link CompletableFuture} it provides is returned immediately. Subsequent calls
* to either {@link #request()} or {@link #provide()} return the same future without starting
* any new computation.
*
* @return a future, possibly already completed, for the value
*/
public synchronized CompletableFuture<T> request() {
if (future == null) {
future = supplier.get();
future.exceptionally((exc) -> {
synchronized (this) {
lastExc = exc;
future = null;
}
// We return the future, not the result of exceptionally
// So no need to rethrow here
return null;
});
}
// It's possible the future completed exceptionally on this thread, so future may be null
if (future == null) {
return CompletableFuture.failedFuture(lastExc);
}
return future;
}
/**
* Provide the value out of band
*
* If this is called before {@link #request()}, the computation given at construction is
* ignored. A new {@link CompletableFuture} is returned instead. The caller must see to this
* future's completion. Subsequent calls to either {@link #request()} or {@link #provide()}
* return this same future without starting any computation.
*
* Under normal circumstances, the caller cannot determine whethor or not is has "claimed" the
* computation. If the usual computation is already running, then the computations are
* essentially in a race. As such, it is essential that alternative computations result in the
* same value as the usual computation. In other words, the functions must not differ, but the
* means of computation can differ. Otherwise, race conditions may arise.
*
* @return a promise that the caller must fulfill or arrange to have fulfilled
*/
public synchronized CompletableFuture<T> provide() {
if (future == null) {
future = new CompletableFuture<>();
future.exceptionally((exc) -> {
synchronized (this) {
future = null;
}
return ExceptionUtils.rethrow(exc);
});
}
return future;
}
/**
* Forget the value
*
* Instead of returning a completed (or even in-progress) future, the next request will cause
* the value to be re-computed.
*/
public synchronized void forget() {
future = null;
}
@Override
public synchronized String toString() {
if (future == null) {
return "(lazy)";
}
if (!future.isDone()) {
return "(lazy-req)";
}
if (future.isCompletedExceptionally()) {
return "(lazy-err)";
}
return future.getNow(null).toString();
}
/**
* Check if the value has been requested, but not yet completed
*
* <p>
* This will also return true if something is providing the value out of band.
*
* @return true if {@link #request()} or {@link #provide()} has been called, but not completed
*/
public synchronized boolean isBusy() {
return future != null && !future.isDone();
}
/**
* Check the the value is available immediately
*
* @return true if {@link #request()} or {@link #provide()} has been called and completed.
*/
public synchronized boolean isDone() {
return future != null && future.isDone();
}
}
|
package manager
import (
"context"
"encoding/json"
"errors"
"fmt"
"sync"
"github.com/ipfs/go-datastore"
"github.com/ipfs/go-datastore/namespace"
logging "github.com/ipfs/go-log/v2"
"github.com/textileio/powergate/ffs"
"github.com/textileio/powergate/ffs/api"
"github.com/textileio/powergate/ffs/auth"
"github.com/textileio/powergate/ffs/scheduler"
)
var (
// ErrAuthTokenNotFound returns when an auth-token doesn't exist.
ErrAuthTokenNotFound = errors.New("auth token not found")
log = logging.Logger("ffs-manager")
// zeroConfig is a safe-initial value for a default
// CidConfig for a manager. A newly (not re-loaded) created
// manager will have this configuration by default. It can be
// later changed with the Get/Set APIs. A re-loaded manager will
// recover its laste configured CidConfig from the datastore.
zeroConfig = ffs.DefaultConfig{
Hot: ffs.HotConfig{
Enabled: true,
Ipfs: ffs.IpfsConfig{
AddTimeout: 30,
},
},
Cold: ffs.ColdConfig{
Enabled: true,
Filecoin: ffs.FilConfig{
RepFactor: 1,
DealMinDuration: 1000,
},
},
}
dsDefaultCidConfigKey = datastore.NewKey("defaultcidconfig")
)
// Manager creates Api instances, or loads existing ones them from an auth-token.
type Manager struct {
wm ffs.WalletManager
pm ffs.PaychManager
sched *scheduler.Scheduler
lock sync.Mutex
ds datastore.Datastore
auth *auth.Auth
instances map[ffs.APIID]*api.API
defaultConfig ffs.DefaultConfig
closed bool
}
// New returns a new Manager.
func New(ds datastore.Datastore, wm ffs.WalletManager, pm ffs.PaychManager, sched *scheduler.Scheduler) (*Manager, error) {
cidConfig, err := loadDefaultCidConfig(ds)
if err != nil {
return nil, fmt.Errorf("loading default cidconfig: %s", err)
}
return &Manager{
auth: auth.New(namespace.Wrap(ds, datastore.NewKey("auth"))),
ds: ds,
wm: wm,
pm: pm,
sched: sched,
instances: make(map[ffs.APIID]*api.API),
defaultConfig: cidConfig,
}, nil
}
// Create creates a new Api instance and an auth-token mapped to it.
func (m *Manager) Create(ctx context.Context) (ffs.APIID, string, error) {
m.lock.Lock()
defer m.lock.Unlock()
log.Info("creating instance")
iid := ffs.NewAPIID()
fapi, err := api.New(ctx, namespace.Wrap(m.ds, datastore.NewKey("api/"+iid.String())), iid, m.sched, m.wm, m.pm, m.defaultConfig)
if err != nil {
return ffs.EmptyInstanceID, "", fmt.Errorf("creating new instance: %s", err)
}
auth, err := m.auth.Generate(fapi.ID())
if err != nil {
return ffs.EmptyInstanceID, "", fmt.Errorf("generating auth token for %s: %s", fapi.ID(), err)
}
m.instances[iid] = fapi
return fapi.ID(), auth, nil
}
// SetDefaultConfig sets the default CidConfig to be set as default to newly created
// FFS instances.
func (m *Manager) SetDefaultConfig(dc ffs.DefaultConfig) error {
m.lock.Lock()
defer m.lock.Unlock()
if err := m.saveDefaultConfig(dc); err != nil {
return fmt.Errorf("persisting default configuration: %s", err)
}
return nil
}
// List returns a list of all existing API instances.
func (m *Manager) List() ([]ffs.APIID, error) {
m.lock.Lock()
defer m.lock.Unlock()
res, err := m.auth.List()
if err != nil {
return nil, fmt.Errorf("listing existing instances: %s", err)
}
return res, nil
}
// GetByAuthToken loads an existing instance using an auth-token. If auth-token doesn't exist,
// it returns ErrAuthTokenNotFound.
func (m *Manager) GetByAuthToken(token string) (*api.API, error) {
m.lock.Lock()
defer m.lock.Unlock()
iid, err := m.auth.Get(token)
if err == auth.ErrNotFound {
return nil, ErrAuthTokenNotFound
}
i, ok := m.instances[iid]
if !ok {
log.Infof("loading uncached instance %s", iid)
i, err = api.Load(namespace.Wrap(m.ds, datastore.NewKey("api/"+iid.String())), iid, m.sched, m.wm, m.pm)
if err != nil {
return nil, fmt.Errorf("loading instance %s: %s", iid, err)
}
m.instances[iid] = i
} else {
log.Infof("using cached instance %s", iid)
}
return i, nil
}
// GetDefaultConfig returns the current default CidConfig used
// for newly created FFS instances.
func (m *Manager) GetDefaultConfig() ffs.DefaultConfig {
m.lock.Lock()
defer m.lock.Unlock()
return m.defaultConfig
}
// Close closes a Manager and consequently all loaded instances.
func (m *Manager) Close() error {
m.lock.Lock()
defer m.lock.Unlock()
if m.closed {
return nil
}
for _, i := range m.instances {
if err := i.Close(); err != nil {
log.Errorf("closing instance %s: %s", i.ID(), err)
}
}
m.closed = true
return nil
}
// saveDefaultConfig persists a new default configuration and updates
// the cached value. This method must be guarded.
func (m *Manager) saveDefaultConfig(dc ffs.DefaultConfig) error {
buf, err := json.Marshal(dc)
if err != nil {
return fmt.Errorf("marshaling default config: %s", err)
}
if err := m.ds.Put(dsDefaultCidConfigKey, buf); err != nil {
return fmt.Errorf("saving default config to datastore: %s", err)
}
m.defaultConfig = dc
return nil
}
func loadDefaultCidConfig(ds datastore.Datastore) (ffs.DefaultConfig, error) {
d, err := ds.Get(dsDefaultCidConfigKey)
if err == datastore.ErrNotFound {
return zeroConfig, nil
}
if err != nil {
return ffs.DefaultConfig{}, fmt.Errorf("get from datastore: %s", err)
}
var defaultConfig ffs.DefaultConfig
if err := json.Unmarshal(d, &defaultConfig); err != nil {
return ffs.DefaultConfig{}, fmt.Errorf("unmarshaling default cidconfig: %s", err)
}
return defaultConfig, nil
}
|
<reponame>marksweissma/donatello<gh_stars>1-10
import os
from sklearn.externals import joblib
import pandas as pd
import inspect
if hasattr(inspect, 'signature'):
funcsigs = inspect
else:
import funcsigs
def now_string(strFormat="%Y_%m_%d_%H_%M"):
"""
Pandas formatted string from time
Args:
strFormat (str): format for time
"""
return pd.datetime.now().strftime(strFormat)
def nvl(*args):
"""
SQL like coelesce / redshift NVL, returns first non Falsey arg
"""
for arg in args:
try:
if arg:
return arg
except ValueError:
if arg is not None:
return arg
return args[-1]
# dispatch on shape make registers
def reformat_aggs(d, idx=0, sortValues=None, indexName=None, filterNulls=.95):
information = [pd.Series(value[value.columns[idx]], name=key) for key, value in d.items()]
df = pd.concat(information, axis=1, sort=True)
df = df.sort_values(sortValues) if sortValues else df
if indexName:
df.index.name = indexName
if filterNulls:
rates = df.isnull().mean()
df = df.drop([column for column, rate in zip(df, rates) if rate > filterNulls], axis=1)
return df
def has(obj, attr, slicers):
condition = attr in obj if (slicers and isinstance(obj, slicers)) else hasattr(obj, attr)
return condition
def _get(obj, attr, slicers):
value = obj[attr] if isinstance(obj, slicers) else getattr(obj, attr)
return value
def get(obj, attr, slicers, errors, default):
"""
Get from object
"""
condition = (errors == 'raise') or has(obj, attr, slicers)
value = _get(obj, attr, slicers) if condition else default
return value
def access(obj=None, attrPath=None,
method=None, methodArgs=None, methodKwargs=None,
cb=None, cbArgs=None, cbKwargs=None,
slicers=(dict, list, tuple, pd.np.ndarray, pd.Series, pd.DataFrame),
default=None, errors='raise'):
"""
Access information from nested object
Args:
obj (object): object to access from
attrPath (list): sequence of traversal
method (str): (optional) method to call at end of path
methodArgs (tuple): positional args for method
methodKwargs (tuple): keyword args for method
cb (str): (optional) cb to call at end of path
cbArgs (tuple): positional args for cb
cbKwargs (tuple): keyword args for cb
slicers (tuple): object types to use ``__getitem__`` slice rather than getattr
default (obj): option to return default (if not rasiing errors)
errors (str): option to raise errors ('raise') or ignore ('ignore')
Returns:
obj: value of given prescription
"""
if not attrPath or not attrPath[0]:
if method and (hasattr(obj, method) or errors == 'raise'):
obj = obj if not method else getattr(obj, method)(
*nvl(methodArgs, ()), **nvl(methodKwargs, {}))
else:
value = obj
try:
value = obj if not cb else cb(obj, *nvl(cbArgs, ()), **nvl(cbKwargs, {}))
except Exception as e:
if errors == 'ignore':
value = default
else:
raise e
else:
head, attrPath = attrPath[0], attrPath[1:]
obj = get(obj, head, slicers, errors, default)
value = access(obj, attrPath=attrPath,
method=method, methodArgs=methodArgs, methodKwargs=methodKwargs,
cb=cb, cbArgs=cbArgs, cbKwargs=cbKwargs,
slicers=slicers, errors=errors, default=default)
return value
def find_value(func, args, kwargs, accessKey, how='name'):
"""
Find a value from a function signature
"""
sig = funcsigs.signature(func)
parameters = sig.parameters
keys = list(parameters.keys())
try:
index = keys.index(accessKey) if how == 'name' else accessKey
defaults = [i.default for i in parameters.values() if i.default != funcsigs._empty]
offset = len(keys) - len(nvl(defaults, []))
default = defaults[index - offset] if index >= offset else None
value = kwargs.get(accessKey, default) if index >= len(args) else args[index]
except ValueError:
value = kwargs.get(accessKey, None)
return value
def replace_value(func, args, kwargs, accessKey, accessValue):
"""
Replace a value from a function signature
"""
sig = funcsigs.signature(func)
parameters = sig.parameters
keys = list(parameters.keys())
index = keys.index(accessKey)
if index >= len(args):
kwargs[accessKey] = accessValue
else:
args = list(args)
args[index] = accessValue
args = tuple(args)
return args, kwargs
def package_dap(dap):
if isinstance(dap, dict):
pass
elif isinstance(dap, str):
dap = {'attrPath': [dap]}
elif isinstance(dap, list):
dap = {'attrPath': dap}
return dap
def persist(obj=None, dap="", root='.', name='', extension='pkl', *writeArgs, **writeKwargs):
"""
Write an object (or attribute) to persist.
"""
dap = package_dap(dap)
obj = access(obj, **dap)
name = name if name else ".".join([getattr(obj, 'name', obj.__class__.__name__), extension])
local = os.path.join(root, name)
joblib.dump(obj, local, *writeArgs, **writeKwargs)
# move
def view_sk_metric(bunch):
"""
Unnest a sklearn metric (or other single value only returning metric)
Args:
bunch (bunch): dict of aggregated scores
Returns:
pandas.DataFrame: flattened view
"""
df = pd.DataFrame({'score': {key: value.values[0][0]
for key, value in
bunch.items()}
}
)
return df
|
/*
* sf_reset:
*
* Perform a soft reset on the Starfire.
*/
void
sf_reset(struct sf_softc *sc)
{
int i;
sf_funcreg_write(sc, SF_GeneralEthernetCtrl, 0);
sf_macreset(sc);
sf_funcreg_write(sc, SF_PciDeviceConfig, PDC_SoftReset);
for (i = 0; i < 1000; i++) {
delay(10);
if ((sf_funcreg_read(sc, SF_PciDeviceConfig) &
PDC_SoftReset) == 0)
break;
}
if (i == 1000) {
printf("%s: reset failed to complete\n", sc->sc_dev.dv_xname);
sf_funcreg_write(sc, SF_PciDeviceConfig, 0);
}
delay(1000);
} |
For practical or theoretical reasons, dictators, Organization Men and certain scientists are anxious to reduce the maddening diversity of men's natures to some kind of manageable uniformity. In the first flush of his Behaviouristic fervour, J.B. Watson roundly declared that he could find "no support for hereditary patterns of behaviour, nor for special abilities (music, art, etc.) which are supposed to run in families." And even today we find a distinguished psychologist, Professor B.F. Skinner of Harvard, insisting that, "as scientific explanation becomes more and more comprehensive, the contribution which may be claimed by the individual himself appears to approach zero. Man's vaunted creative powers, his achievements in art, science and morals, his capacity to choose and our right to hold him responsible for the consequences of his choice - none of these is conspicuous in the new scientific self-portrait."
J.B. Watson's children
a.k.a.
"Sadly, although B.F. Skinner got to brag that his "baby in a box" grew up healthy and happy, Watson's application of science to child-rearing lacks that testimonial validity: William, the older of his and Rosalie's two sons, committed suicide at age 40, just four years after John Watson's death."
Mariette Hartley - Breaking the Silence
a.k.a.
"Grandfather's theories infected my mother's life, my life, and the lives of millions. How do you break a legacy? How do you keep from passing a debilitating inheritance down, generation to generation, like a genetic flaw?"
John B. Watson's background
B.F. Skinner's daughters
I think it was Eric Berne, the founder of Transactional Analysis psychotherapy, who coined the phrase "The long dark night of Behaviorism". The Behaviorist approach to experimental psychology was inaugurated by J.B. Watson in a manifesto entitled "Psychology as the behaviorist views it", published inin 1913. He was the editor of the journal at the time and head of the psychology department at Johns Hopkins University. Behaviorism became the ruling paradigm in psychology for at least fifty years - until the early 1970s - and probably its best known exponent was B.F. Skinner. To sum up what Behaviorism stood for, I'll quote a passage from Aldous Huxley's "Brave New World Revisited" (published in 1958). Huxley uses the British spelling of 'behaviour':Huxley misquotes slightly. Skinner actually wrote "hisin art, science, and morals..." The statement can be found in "Cumulative Record" by B.F. Skinner (Reprint edition: Copley Publishing Group, 1999). In other words, the Behaviorists considered that human behavior was mostly shaped by environmental conditioning and that hereditary factors counted for very little. That was the ruling paradigm in academic psychology for fifty years until the pendulum swung in the other direction. Now, 'evolutionary' psychology and the search for 'genetic predispositions' have taken its place. It seems to be very hard for most psychiatrists and university psychologists to get it into their heads that both factors are fundamentally intertwined.To most students of psychology B.F. Skinner's name is associated with conditioning experiments on rats and pigeons in 'Skinner boxes', and J.B. Watson's name with the induction of phobias into an infant child known as 'Little Albert' (see: the Little Albert experiment at Wikipedia). What happened to Little Albert later in life is unknown.Watson subjected his own children to a harsh upbringing regime - scheduled feeding and no physical affection. His first marriage to Mary Ickes produced a daughter, Mary (Polly), and a son John. Polly made multiple suicide attempts later in life and 'Little John' became a rootless person who often sponged off his father. Little John was plagued by stomach trouble and intolerable headaches throughout his life. He died in his early 50s from bleeding ulcers.After a scandalous affair with a graduate student young enough to be his daughter, Rosalie Rayner, Watson's wife Mary divorced him and he was fired from Johns Hopkins University. Soon after, he married Rosalie and they had two sons - Billy and Jimmy. In adulthood Billy rebelled against his father's behaviorism and established a successful career as a Freudian psychiatrist. Nevertheless, he too attempted suicide. His first attempt was stopped by younger brother Jimmy. He killed himself at his second attempt. Jimmy suffered chronic stomach problems for years (a legacy of scheduled feeding during infancy?), but managed to do well in life after intensive analysis. An article about J.B. Watson on the website of Clayton State University refers to the suicide:In 1930, when the boys were still young, Rosalie Rayner Watson wrote an article fortitled "I Am the Mother of a Behaviorist's Sons," in which she expressed the wish that her sons would grow up to appreciate poetry and the drama of life. She said: "In some respects I bow to the great wisdom in the science of behaviorism, and in others I am rebellious. ... I like being merry and gay and having the giggles. The behaviorists think giggling is a sign of maladjustment." She died five years later from pneumonia. There's an article about John B. Watson and Rosalie Rayner in, published by the university where he was professor of psychology: It's All in the Upbringing . John Broadus Watson became a recluse towards the end of his life. He burned all of his papers prior to his death in 1958. That's not the end of the family saga. Watson's daughter Mary had a daughter in turn, Mary Loretta Hartley (Marietta or Mariette), who later achieved success as the actress Mariette Hartley ( www.mariettehartley.com ). She is probably best known for her TV commercials for Polaroid in which she played the role of James Garner's wife. However, the circumstances of her childhood were dire. Her rage-filled, silence-prone mother was a secret drinker who repeatedly tried to commit suicide, first one way and then another. Her father, a retired advertising executive, took his own life at the age of 67 after a long period of depression. Mariette was eating breakfast with her mother when they heard the gunshot. These circumstances led Mariette herself into alcoholism and thoughts of suicide until her career hit bottom. She managed to pull through and rebuild her life with help from a friend and mentor. Later on, she wrote a memoir of her experiences, "" (Putnam Group, 1990). She has this to say about her grandfather's childrearing principles:J.B. Watson was the author of a bestselling child rearing manual: "The Psychological Care of Infant and Child." After her recovery, Marietta Hartley became honorary director of the American Foundation for Suicide Prevention, and a public speaker and campaigner. She has received humanitarian awards from numerous organizations including, in her home state of California, the California Family Studies Center, the L.A. County Psychological Association and the University of California Brain Imaging Center at Irvine.Mariette Hartley scripted and performed a solo stage show based on her autobiography. In "," which premiered in January 2006, she portrayed the main characters who shaped her life.There are lecture notes about John Broadus Watson on the website of Sonoma State University in California which begin with extracts from Mariette Hartley's "Breaking the Silence". John B. Watson's childhood is documented in K.W. Buckley's biography of his life and work, Mechanical Man . He was raised by a pious mother who hoped that he would become a Southern Baptist preacher. She chose the surname of the most famous Baptist minister of the period, Broadus , as his middle name. In 1894, he enrolled at Furman University, which at the time was a Southern Baptist Academy and Theological Institute. However, a philosophy professor at Furman became his mentor and inspired his interest in psychology, still a branch of the philosophical tradition.In his later career Watson became a champion of the 'scientific method' in psychology. Yet despite the trappings of science, it appears Watson's advice on child-rearing in "The Psychological Care of Infant and Child" was mostly a recapitulation of the Southern Baptist attitude to children Perhaps it is because of the atrocious consequences of J.B. Watson's child-rearing methods on his own children that an urban myth attached itself to the fate of B.F. Skinner's second daughter, Deborah. His first daughter, Julie, became an educational psychologist (see www.juliesvargas.com ). The myth that Deborah had committed suicide arose because of an invention Skinner used as an alternative to a conventional cot -- the 'babytender'. He called it the Air Crib, but it has also been referred to as the 'heir conditioner'. It was something like a large version of a hospital incubator with a plexiglass panel which could be pulled up to seal in the warmth. It provided his daughter, Deborah, with a place to sleep and remain comfortable through the severe Minnesota winters without having to be wrapped in numerous layers of clothing and blankets. Unfortunately, when Skinner wrote an article about the 'baby tender' for the Ladies' Home Journal in 1945, the article was given the title "Baby in a Box." Many people jumped to the conclusion it was a variation of the 'Skinner box' he used for animal experiments. The truth is more benign. Deborah is a successful artist and painter. You can read her account of the story in this 2004 article in The Guardian newspaper . It is her response to Lauren Slater's book "Opening Skinner's Box: Great Psychological Experiments of the Twentieth Century."You can see a large photo of a commercial version of Burrhus Frederic Skinner's "AirCrib" in the Apparatus Collection at the University of Akron's. Most of them were purchased for children of psychologists. |
<filename>src/com/ithxh/baseCore/baseUtils/MultipleSysDicTree.java
package com.ithxh.baseCore.baseUtils;
//package com.tengo.core.util;
//
//import java.util.ArrayList;
//import java.util.List;
//
///**
// * 字典类二 叉树类顺序生成
// */
//public class MultipleSysDicTree {
//
// // 存放结果列表
// List<SysDic> resultList = new ArrayList<SysDic>();
//
// List<SysDic> dataList = new ArrayList<SysDic>();
//
// public MultipleSysDicTree(List<SysDic> dataList) {
//
// this.dataList = dataList;
// }
//
// /**
// * @category 获取父亲Id 获取子Id列表,并对子ID执行排序
// * @param dataList
// * @return
// */
// public List<SysDic> getChildSysDic(String fatherId) {
//
// // 获取列表
// List<SysDic> rootList = new ArrayList<SysDic>();
// for (int i = 0; i < dataList.size(); i++) {
//
// if (dataList.get(i).getSysDicFatherSelfCode().trim().equals(fatherId)) {
// rootList.add(dataList.get(i));
// }
// }
// // 执行排序
// rootList = sortSysDicByCodeMaoPao(rootList);
// for (int i = 0; i < rootList.size(); i++) {
// resultList.add(rootList.get(i));
// // System.out.println(rootList.get(i).getSysDicSelfCode()+"------------"+rootList.get(i).getSysDicValue());
// getChildSysDic(rootList.get(i).getSysDicSelfCode());
// }
//
// return resultList;
// }
//
// /**
// * @category 用数据库字段 sysDicByCode 冒泡排序 sysDicByCode 字段的值不能重复,越小越在前面。
// */
// private List<SysDic> sortSysDicByCodeMaoPao(List<SysDic> dataListOrder) {
//
// for (int i = 0; i < dataListOrder.size() - 1; i++) {
// for (int j = 1; j < dataListOrder.size() - i; j++) {
// SysDic sysDicTemp;
// if ((dataListOrder.get(j - 1).getSysDicByCode()).compareTo(dataListOrder.get(j).getSysDicByCode()) > 0) { // 比较两个整数的大小
// sysDicTemp = dataListOrder.get(j - 1);
// dataListOrder.set((j - 1), dataListOrder.get(j));
// dataListOrder.set(j, sysDicTemp);
// }
// }
// }
// // for (SysDic s : dataListOrder) {
// // System.out.println(s.getSysDicByCode());
// // }
// return dataListOrder;
// }
//
// /**
// * @category 在value 前增加空格
// */
// public static List<SysDic> addNullForValue(List<SysDic> list) {
//
// for (int i = 0; i < list.size(); i++) {
// SysDic sysDic = list.get(i);
// int leng = (int) MatheMaticsUtil.div(sysDic.getSysDicSelfCode().length(), 3);
// String newValue = " └--" + sysDic.getSysDicValue();
// String nullValue = "";
// for (int j = 0; j < leng - 1; j++) {
// nullValue += " ";
// }
// sysDic.setSysDicValue(nullValue + newValue);
// // System.out.println(nullValue+newValue);
// list.set(i, sysDic);
// }
// return list;
// }
//
//} |
import { Injectable, InjectionToken, Inject, PLATFORM_ID } from "@angular/core";
import { QuillConfig } from "./quill-editor.interfaces";
import { dynamicImportCommonJs } from "./helpers";
import { isPlatformServer } from "@angular/common";
export const QUILL_CONFIG_TOKEN = new InjectionToken<QuillConfig>("config");
import { Quill } from "quill";
import { defaultModules } from "./quill-defaults";
let QuillNamespace: any;
const QuillPromise = dynamicImportCommonJs<Quill | typeof Quill>(() =>
import(/* webpackChunkName: "quill" */ "quill").then(m => m.Quill)
);
@Injectable({ providedIn: "root" })
export class QuillEditorService {
constructor(
@Inject(QUILL_CONFIG_TOKEN) private config?: QuillConfig,
@Inject(PLATFORM_ID) private platformId: any
) {
this.init();
}
async init() {
if (isPlatformServer(this.platformId)) return;
if (this.config !== undefined) {
QuillNamespace = await QuillPromise();
// Only register custom options and modules once
this.config.customOptions?.forEach(customOption => {
const newCustomOption = QuillNamespace.import(customOption.import);
newCustomOption.whitelist = customOption.whitelist;
QuillNamespace.register(
newCustomOption,
true,
this.config.suppressGlobalRegisterWarning
);
});
this.config.customModules?.forEach(({ implementation, path }) => {
QuillNamespace.register(
path,
implementation,
this.config.suppressGlobalRegisterWarning
);
});
// set default modules as modules if not modules key passed on custom config
if (!this.config.modules) {
this.config.modules = defaultModules;
}
}
}
}
|
#include "savefile.h"
/*
But it's up to you to encode the palette as a "unsigned short *" and the image as a "unsigned char*"
Bin2c will do part 2 if you wanna store it in header files, and you could modify it to do shorts instead for the palette
Or if you wanna store it in binary, it gives you those directly
Palette is pretty easy, it's a 16 short array of rgb565 encoded colors (I think he meant argb4444)
Also these are "simple" icons. 32x32, 16 colors. But also only take 544 bytes
unsigned short is 16 bits (2 bytes)
16, 16bit colors = 32 bytes. This means there's 4 bits per channel hence arbg4444
The black and white palette is FF FF 00 F0 00 (Then the rest is zeroes)
The colours are fully white and fully black. Then means 1 hex latter = 1 channel.
So we have FF FF which is white and 00 F0 (or F0 00) which is black
*/
int save_uncompressed(uint8_t port, uint8_t unit){
vmu_pkg_t pkg;
uint8 *pkg_out, *data;
int pkg_size;
FILE *fp;
char savename[32];
maple_device_t *vmu;
int rv = 0, blocks_freed = 0;
file_t f;
//Invalid controller/port
if(port < 0 || port > 3 || unit < 1 || unit > 2){
return -2;
}
// Make sure there's a VMU in port a1.
//Change this later to check all slots or the requested slots
if(!(vmu = maple_enum_dev(port, unit))){
return -100;
}
if(!vmu->valid || !(vmu->info.functions & MAPLE_FUNC_MEMCARD)){
return -100;
}
//Only 20 chara allowed at max (21 if you include '\0')
sprintf(savename, "/vmu/%c%d/", port + 97, unit); //port gets converted to a, b, c or d. unit is unit
strcat(savename, "UNCOMPRESS.s");
int filesize = sizeof(SaveFile_t);
data = (uint8_t *) malloc(filesize);
if(data == NULL){
free(data);
return -1;
}
memcpy(data, &save, sizeof(SaveFile_t)); //Last param is number of bytes, not bits
sprintf(pkg.desc_long, "Uncompressed save file!");
strcpy(pkg.desc_short, "Uncompressed");
strcpy(pkg.app_id, "Proto_uncomp_save");
pkg.icon_cnt = 1;
pkg.icon_anim_speed = 0;
memcpy(pkg.icon_pal, palette, 32);
pkg.icon_data = icon;
pkg.eyecatch_type = VMUPKG_EC_NONE;
pkg.data_len = sizeof(SaveFile_t); //Double check this, but I think its right
pkg.data = data;
vmu_pkg_build(&pkg, &pkg_out, &pkg_size);
// See if a file exists with that name, since we'll overwrite it.
f = fs_open(savename, O_RDONLY);
if(f != FILEHND_INVALID){
blocks_freed = fs_total(f) >> 9;
fs_close(f);
}
// Make sure there's enough free space on the VMU.
if(vmufs_free_blocks(vmu) + blocks_freed < (pkg_size >> 9)){
free(pkg_out);
free(data);
return pkg_size >> 9;
}
if(!(fp = fopen(savename, "wb"))){
free(pkg_out);
free(data);
return -1;
}
if(fwrite(pkg_out, 1, pkg_size, fp) != (size_t)pkg_size){
rv = -1;
}
fclose(fp);
free(pkg_out);
free(data);
return rv;
}
int load_uncompressed(uint8_t port, uint8_t unit){
vmu_pkg_t pkg;
uint8 *pkg_out;
int pkg_size;
FILE *fp;
char savename[32];
//Invalid controller/port
if(port < 0 || port > 3 || unit < 1 || unit > 2){
return -2;
}
//Only 20 chara allowed at max (21 if you include '\0')
sprintf(savename, "/vmu/%c%d/", port + 97, unit); //port gets converted to a, b, c or d. unit is unit
strcat(savename, "UNCOMPRESS.s");
if(!(fp = fopen(savename, "rb"))){
return -1;
}
fseek(fp, 0, SEEK_SET);
fseek(fp, 0, SEEK_END);
pkg_size = ftell(fp);
fseek(fp, 0, SEEK_SET);
pkg_out = (uint8 *)malloc(pkg_size);
fread(pkg_out, pkg_size, 1, fp);
fclose(fp);
vmu_pkg_parse(pkg_out, &pkg);
//Read the pkg data into my struct
memcpy(&save, pkg.data, sizeof(SaveFile_t)); //Last param is number of bytes, not bits
free(pkg_out);
return 0;
}
int save_compressed(uint8_t port, uint8_t unit){
vmu_pkg_t pkg;
uint8 *pkg_out, *comp;
int pkg_size, err;
uint32 len;
FILE *fp;
char savename[32];
maple_device_t *vmu;
int rv = 0, blocks_freed = 0;
file_t f;
//Invalid controller/port
if(port < 0 || port > 3 || unit < 1 || unit > 2){
return -2;
}
// Make sure there's a VMU in port A1.
//Change this later to check all slots or the requested slots
if(!(vmu = maple_enum_dev(0, 1))){
return -100;
}
if(!vmu->valid || !(vmu->info.functions & MAPLE_FUNC_MEMCARD)){
return -100;
}
//Only 20 chara allowed at max (21 if you include '\0')
sprintf(savename, "/vmu/%c%d/", port + 97, unit); //port gets converted to a, b, c or d. unit is unit
strcat(savename, "COMPRESS.s");
comp = (uint8 *)malloc(0x10000);
len = 0x10000;
// uint8_t * data;
// int filesize = sizeof(SaveFile_t);
// data = (uint8_t *) malloc(filesize);
// memcpy(data, &save, filesize);
//Try to fully understand these two lines...
// uint8 sms_cart_ram[0x8000];
err = compress2(comp, &len, &save, sizeof(SaveFile_t) / 8, 9); //Warning suggests this is wrong
// err = compress2(comp, &len, sms_cart_ram, 0x8000, 9);
//dest, dest length, source(buffer), source length, level
//The level parameter has the same meaning as in deflateInit. Initializes the internal stream state for compression
//sourceLen is the byte length of the source buffer
// compress(zipdata, &zipsize, data, filesize);
if(err != Z_OK) {
free(comp);
return -1;
}
sprintf(pkg.desc_long, "Compressed save file!");
strcpy(pkg.desc_short, "Compressed");
strcpy(pkg.app_id, "Proto_comp_save");
pkg.icon_cnt = 1;
pkg.icon_anim_speed = 0;
memcpy(pkg.icon_pal, palette, 32);
pkg.icon_data = icon;
pkg.eyecatch_type = VMUPKG_EC_NONE;
pkg.data_len = len;
pkg.data = comp;
vmu_pkg_build(&pkg, &pkg_out, &pkg_size);
// See if a file exists with that name, since we'll overwrite it.
f = fs_open(savename, O_RDONLY);
if(f != FILEHND_INVALID){
blocks_freed = fs_total(f) >> 9;
fs_close(f);
}
// Make sure there's enough free space on the VMU.
if(vmufs_free_blocks(vmu) + blocks_freed < (pkg_size >> 9)){
free(pkg_out);
free(comp);
return pkg_size >> 9;
}
if(!(fp = fopen(savename, "wb"))){
free(pkg_out);
free(comp);
return -1;
}
if(fwrite(pkg_out, 1, pkg_size, fp) != (size_t)pkg_size){
rv = -1;
}
fclose(fp);
free(pkg_out);
free(comp);
return rv;
}
int load_compressed(uint8_t port, uint8_t unit){
vmu_pkg_t pkg;
uint8 *pkg_out;
int pkg_size;
// char prodcode[7];
FILE *fp;
char savename[32];
uint32_t real_size = sizeof(SaveFile_t) / 8;
//Invalid controller/port
if(port < 0 || port > 3 || unit < 1 || unit > 2){
return -2;
}
//Only 20 chara allowed at max (21 if you include '\0')
sprintf(savename, "/vmu/%c%d/", port + 97, unit); //port gets converted to a, b, c or d. unit is unit
strcat(savename, "COMPRESS.s");
if(!(fp = fopen(savename, "rb"))){
return -1;
}
fseek(fp, 0, SEEK_SET);
fseek(fp, 0, SEEK_END);
pkg_size = ftell(fp);
fseek(fp, 0, SEEK_SET);
pkg_out = (uint8 *)malloc(pkg_size);
fread(pkg_out, pkg_size, 1, fp);
fclose(fp);
vmu_pkg_parse(pkg_out, &pkg);
uncompress(&save, &real_size, pkg.data, pkg.data_len);
// ZEXTERN int ZEXPORT uncompress OF((Bytef *dest, uLongf *destLen, const Bytef *source, uLong sourceLen));
free(pkg_out);
return 0;
}
|
package bac.api;
import bac.helper.Helper;
import bac.peers.Peers;
import bac.peers.Peer;
import bac.crypto.Crypto;
import bac.settings.Settings;
import bac.transaction.Transaction;
import bac.transaction.Transactions;
import bac.blockchain.Forge;
import bac.blockchain.ForgeBlock;
import org.json.simple.JSONObject;
import org.json.simple.JSONArray;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import java.io.*;
import java.io.IOException;
import javax.servlet.http.*;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.MalformedURLException;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.ContentResponse;
import org.eclipse.jetty.client.util.StringContentProvider;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.net.*;
import java.net.InetSocketAddress;
public final class APIServlet extends HttpServlet {
public void init() throws ServletException {
// Do required initialization
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter ServletOutputStream = response.getWriter();
ServletOutputStream.println("<h1>" + "The method specified in the Request Line is not allowed for the resource identified by the request." + "</h1>");
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
JSONObject ajaxRequest = new JSONObject();
try {
ajaxRequest = (JSONObject)new JSONParser().parse(request.getReader());
} catch (ParseException e) {
e.printStackTrace();
}
response.setHeader("Cache-Control", "no-cache, no-store, must-revalidate, private");
response.setHeader("Pragma", "no-cache");
response.setDateHeader("Expires", 0);
response.setContentType("text/plain; charset=UTF-8");
JSONObject AjaxResponse = new JSONObject();
// Helper.logMessage("API request:"+ajaxRequest.toString()+" Address:"+Helper.GetAnnouncementHost((String) ajaxRequest.get("AnnouncedAddress"))+" Remote:"+request.getRemoteAddr());
if (( ajaxRequest.get("AnnouncedAddress") == null ) ||
(Helper.GetAnnouncementHost((String) ajaxRequest.get("AnnouncedAddress")).equals(request.getRemoteAddr()) ) ) {
switch ( (String) ajaxRequest.get("requestType") ) {
case "GetAddress": {
AjaxResponse = AjaxGetAddress(ajaxRequest);
} break;
case "GetInfo": {
AjaxResponse = AjaxGetInfo(ajaxRequest);
} break;
case "GetPeers": {
AjaxResponse = AjaxGetPeers(ajaxRequest);
} break;
case "GetAllPeerDetails": {
AjaxResponse = AjaxGetAllPeerDetails(ajaxRequest);
} break;
// Hidden requests
case "ProcessTransactions": {
AjaxResponse = AjaxProcessTransactions(ajaxRequest);
} break;
case "GetUnconfirmedTransactions": {
AjaxResponse = AjaxGetUnconfirmedTransactions(ajaxRequest);
} break;
case "NewFBID": {
AjaxResponse = AjaxNewFBID(ajaxRequest);
} break;
// Test requests
case "CreateTestTransaction": {
AjaxResponse = AjaxCreateTestTransaction(ajaxRequest);
} break;
default: {
AjaxResponse.put("error","Bad requestType.");
} break;
}
} else {
AjaxResponse.put("error","Bad announced address.");
}
response.setContentType("text");
PrintWriter ServletOutputStream = response.getWriter();
// Helper.logMessage("Response:"+AjaxResponse.toString());
AjaxResponse.put("timestamp",Helper.getEpochTimestamp());
ServletOutputStream.print(AjaxResponse.toString());
}
public void destroy() {
// do nothing.
}
public synchronized JSONObject SendJsonQuery(JSONObject request) {
JSONObject JSONresponse = new JSONObject();
// Helper.logMessage("SendJsonQuery:"+request.toString());
try {
HttpClient client = new HttpClient();
request.put("timestamp",Helper.getEpochTimestamp());
client.setBindAddress(new InetSocketAddress( InetAddress.getByName(Settings.APIhost) , 0 ));
client.start();
ContentResponse response = client.POST((String)request.get("serverURL"))
.content(new StringContentProvider(request.toString()) , "application/json; charset=UTF-8")
.send();
client.stop();
if ( response.getStatus() == HttpURLConnection.HTTP_OK ) {
try {
JSONresponse.put("Data",(JSONObject)new JSONParser().parse(response.getContentAsString()));
} catch (Exception e) {
JSONresponse.put("Error","Failed parsing returned JSON object.");
}
}
} catch (Exception e) {
JSONresponse.put("Error","Communication error.");
}
return JSONresponse;
}
private JSONObject AjaxGetAddress( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
byte[] PublicKey = new byte[32];
PublicKey = Crypto.getPublicKey((String)ajaxRequest.get("secretPhrase"));
try {
response.put("PublicKey", Helper.Base58encode((byte[]) PublicKey));
response.put("BAC Address", Helper.PublicKeyToAddress((byte[]) PublicKey));
response.put("secretPhrase", ajaxRequest.get("secretPhrase"));
response.put("requestType", ajaxRequest.get("requestType"));
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxGetAddress)");
response.put("error",1);
}
return response;
}
private JSONObject AjaxGetInfo( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
if ( ajaxRequest.get("AnnouncedAddress").toString().length() > 0 ) {
Peer peer = Peers.peers.get(Helper.GetAnnouncementHost((String)ajaxRequest.get("AnnouncedAddress")));
if (peer == null) {
Peer.AddPeer((String)ajaxRequest.get("AnnouncedAddress"));
} else {
if (peer.PeerState == Peers.PEER_STATE_OFFLINE) {
peer.PeerState = Peers.PEER_STATE_DISCONNECTED;
}
}
}
try {
response.put("AnnouncedAddress", Peers.MyAnnouncedAddress);
response.put("Version", Settings.VERSION);
response.put("requestType", ajaxRequest.get("requestType"));
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxGetAddress)");
response.put("error",1);
}
return response;
}
private JSONObject AjaxGetPeers( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
JSONArray PeersList = new JSONArray();
Set<String> PeersAnnouncements;
try {
synchronized (Peers.peers) {
PeersAnnouncements = ((HashMap<String, Peer>)Peers.peers.clone()).keySet();
}
for (Map.Entry<String, Peer> PeerEntry : Peers.peers.entrySet()) {
Peer peer = PeerEntry.getValue();
PeersList.add(peer.PeerAnnouncedAddress);
}
response.put("PeersList", PeersList);
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxGetPeers)");
response.put("error",1);
}
return response;
}
private JSONObject AjaxGetAllPeerDetails( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
JSONArray PeersList = new JSONArray();
try {
synchronized (Peers.peers) {
for (Map.Entry<String, Peer> PeerEntry : Peers.peers.entrySet()) {
Peer peer = PeerEntry.getValue();
PeersList.add( "ID:"+peer.PeerID+" Announce Address:"+peer.PeerAnnouncedAddress+" State:"+peer.PeerState );
}
}
response.put("PeersList", PeersList);
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxGetAllPeerDetails)");
response.put("error",1);
}
return response;
}
private JSONObject AjaxProcessTransactions( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
try {
Transactions.getInstance().processTransactions((JSONArray)ajaxRequest.get("ValidatedTransactions"), false );
response.put("Accepted", true);
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxProcessTransactions)");
response.put("error",1);
}
return response;
}
private JSONObject AjaxGetUnconfirmedTransactions( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
JSONArray UnconfirmedTransactions = new JSONArray();
try {
synchronized (Transactions.UnconfirmedTransactions) {
for (Map.Entry<String, Transaction> UnconfirmedTransactionsEntry : Transactions.UnconfirmedTransactions.entrySet()) {
UnconfirmedTransactions.add(UnconfirmedTransactionsEntry.getValue().GetTransaction());
}
}
response.put("UnconfirmedTransactions", UnconfirmedTransactions);
response.put("Accepted", true);
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxProcessTransactions)");
response.put("error",1);
}
return response;
}
private JSONObject AjaxNewFBID( JSONObject ajaxRequest ) {
JSONObject response = new JSONObject();
try {
ForgeBlock forgeblock = Forge.getInstance().GetForgeBlock(ForgeBlock.FORGEBLOCK_FORGING);
if (forgeblock != null) {
if (forgeblock.NewFBID(
(String)ajaxRequest.get("ForgeFBID"),
(String)ajaxRequest.get("ForgeFBSign"),
(String)ajaxRequest.get("NodePubKey"),
(String)ajaxRequest.get("AnnouncedAddress") )) {
response.put("Accepted", true);
} else {
response.put("Accepted", false );
}
} else {
response.put("Accepted", false );
}
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxNewFBID)"+response.toString());
response.put("error",1);
}
return response;
}
private JSONObject AjaxCreateTestTransaction( JSONObject ajaxRequest ) {
Helper.logMessage("Test Transaction Created");
JSONObject response = new JSONObject();
try {
Helper.logMessage("Recipient address: "+Helper.PublicKeyToAddress(Crypto.getPublicKey("RecipientSecretPhrase")));
// B3yFMMk6zsw7ZsEhzbA<KEY>
Transaction transaction = new Transaction(
Transaction.TYPE_ORDINARY_PAYMENT, ( 5 * 60 ),
"7z1pmi6XifvGMhV7T1AxJsP8UsSVE5mP3SHKuDqd83xw",
"B3yFMMk6zsw7ZsEhzbA9nwSb7cxZw2fu", 1033, 101, null);
transaction.sign("secretPhrase");
Helper.logMessage("Transaction verify "+transaction.verify());
JSONObject peerRequest = new JSONObject();
peerRequest.put("requestType", "ProcessTransactions");
JSONArray transactionsData = new JSONArray();
transactionsData.add(transaction.GetTransaction());
peerRequest.put("ValidatedTransactions", transactionsData);
Peers peers = new Peers();
peers.SendToAllPeers(peerRequest);
response.put("Test transaction sent.", 1 );
} catch (Exception e) {
Helper.logMessage("Response error. (AjaxCreateTestTransaction)");
response.put("error",1);
}
return response;
}
} |
The LG V30 is a phone with soul.
And funk and punk. And rap and rock. And jazz and blues. This is the best-sounding smartphone that’s ever been made, and it also has LG’s finest industrial design to date. The V30 is distinctive, modern, and stacked with desirable features, a phone seemingly destined for inevitable stardom.
But the V30 is also imperfect, compromised in significant ways that don’t show up on a spec sheet or a feature checklist. I like this phone, and a big part of me really wanted to love it. However, after a month with it, I am coming away with the same conclusion I usually have about LG phones.
The $800 V30 was better as a promise on paper than a phone in real use.
Let’s start with the good stuff, because there’s plenty of it. From among the minimal-bezel phones that have proliferated in 2017, the LG V30 has my favorite design. It is a refinement and a streamlining of LG’s G6 flagship from earlier this year, and it makes the 6-inch OLED screen on the V30 feel incredibly compact. I’ve reviewed 6-inch phones in the past, such as 2013’s HTC One Max, and until recently I was confident that that size signaled an unwieldy two-handed behemoth. Instead, the V30 is a perfectly sculpted smartphone that’s easy, even pleasurable, to operate with one hand.
I don’t love the glass back, but there’s no arguing with the efficiency and ergonomics of the V30 design
Compared to the 6.2-inch Galaxy S8 Plus from Samsung, LG’s V30 is shorter, wider, and a little thinner, and those differences add up to make LG’s phone more pleasant to handle. It also helps that LG has positioned its fingerprint reader on the back of the phone in a sanely central position, unlike Samsung’s off-center calamity. The V30’s ergonomics are so good, in fact, that I expect most people will find it as comfortable to use as smaller devices like the Google Pixel and Galaxy S8. It certainly makes bezel-laden phones like HTC’s U11 and Sony’s Xperia series feel desperately unoptimized in their design. Even the freshly introduced Google Pixel 2 XL, which has the same 6-inch screen, is tangibly larger than the radically minimalist LG V30.
With glass on the front and back, the V30 joins this year’s trend of flagship phones moving beyond plastic and metal to an all-glass finish. That makes for a striking look and allows for the V30’s wireless charging, however I’m not a huge fan of the move. Previous experience with Sony’s glass-back Xperia phones has shown me how easy it is to shatter any glass surface on a phone, front or back, and the everyday scratches accumulating on my regular phone are testament to the softness of the material. LG has waterproofed the V30 to an IP68 rating, and it put the device through a battery of drop tests, but glass is still glass. It will not age handsomely like, say, full-grain leather, and the scratches it picks up will make it structurally weaker rather than giving it the gritty, lived-in feel of a pair of well-worn jeans that you might get from an aluminum phone.
All things considered, I’m happy to crown this as LG’s best industrial design so far. The bezels are superbly minimal (unlike Apple and Essential notch lovers, I appreciate the symmetry of LG’s device), the ergonomic compromises are nonexistent, and the sheer amount of goodness inside this extra-thin phone almost defies logic.
You want specs and features? LG has you covered
There’s no set list of must-have features for a 2017 flagship phone, but here are some of the most desired ones: waterproofing, wireless charging, high-resolution display (preferably OLED), dual-camera system, a long-lasting battery, the latest processor, and a generous serving of storage. The V30 ticks all those boxes. LG ships it with 64GB of expandable storage, a 3,300mAh battery, the Snapdragon 835 processor that’s practically standard-issue at the high end of Android, and one of the most mature dual-camera systems on the market. The V30 is a spec sheet champion, just like practically every flagship phone from LG ever.
The thing that differentiates the V30 from its predecessor V10 and V20 devices from LG is its design refinement. The V10 was subtly the best cameraphone of its time, and the V20 was the undisputed headphone audio champion, but both were giant, clumsy things. Up until this point, the V series was defined by beastly power in a beastly body, but the V30 marks a turning point where LG truly has made its best phone its most beautiful one as well. Everything good about the predecessors, minus the obviously technological design.
The cherry atop the LG V30 spec cake is undeniably the phone’s Hi-Fi Quad-DAC audio system. It is insanely good. I’m talking “angels descended from heaven and plucked harps inside my ears” kind of good. It’s the sort of sound that makes me extend my walks and wish for my train to be delayed, just so I could listen for a little while longer. Listening with the 1More Quad Driver earphones, I find the V30’s bass so tight and pure that I just gorge on it. Everything sounds phenomenal coming out of this phone, whether it’s classical orchestra music, Ramin Djawadi movie soundtracks, 2Pac’s pathos-laden lyrics, or the latest electronic productions from the likes of Nightmares on Wax. The only thing I don’t like are the B&O Play buds that LG bundles with the V30 in most markets: they’re mediocre.
Apple and Google may be pushing the entire mobile industry toward wireless audio, but LG is proving that the headphone jack can still be a source of great delight on a smartphone. I rate the purity, faithfulness, and power of LG’s V30 output right up there with dedicated music players such as the Fiio X5 and X7. Given how my favorite music player is the $1,000 Astell & Kern Kann, it’s not entirely crazy to think of the V30 as a cheaper high-fidelity media player with a much more modern Android interface and a lot of other good things thrown in. It just depends on how important audio is to you and your daily life.
The V30’s headphone audio competes with the best dedicated music players, but it’s still a niche benefit
Over the past month, I’ve found that, as much as I adore the V30’s sound, it just isn’t that critical to my everyday activities. Bringing the V30 and a Google Pixel on my recent trip to Bulgaria, I spent a lot more time using the Pixel’s superior camera than I did the V30’s vastly superior audio. LG is the absolute best in one category of mobile performance, but that niche is narrow. It pains me to say it, but the V30’s greatest strength is a non-essential feature — just a (very) nice to have one.
As to that all-important camera on the V30, I find it good, sometimes great, but never exceptional. LG billed the V30 as a major leap forward in imaging, what with its f/1.6 aperture and real glass lens for enhanced image clarity (on the main camera), however the actual results don’t distinguish this phone from a class of very high performers near the V30’s lofty price point. A head-to-head comparison I did between the LG V30 and HTC U11 ended up evenly split. Both phones had a tendency to slightly overexpose images, and LG had the sharper shot more often than HTC, but that was due to the former company’s continued over-reliance on artificial sharpening. This is much more restrained than it was on the LG G6, but it still leaves pictures looking less photographic. LG’s camera designers told me last month that the V30 has to do less post-processing work to improve an image simply because the new camera has better optics.
Judging by the similarities in image output from LG and Samsung phones, I’d argue there’s an identifiable Korean style of mobile photography, which is defined by crisp edges and contours of objects and aggressive noise suppression in areas of consistent color. That leads to a precise, pristine, and clinical look. It might be awesome if you’re filing expenses or engaging in visual note-taking, but it kills intricate detail in photos and kind of sucks for capturing memories you’ll treasure for a long time. I’ve been put off by the digital, inorganic appearance of many of the photos I’ve captured with the LG V30, and I much prefer the HTC U11, which turns out richer, more nuanced colors. The V30’s images often feel muted and conservative in their saturation and contrast.
Grid View Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
Jaguar E-type Zero Photo by Vlad Savov / The Verge
LG’s camera produces better videos than stills
(All photos of the Jaguar E-type Zero were shot with the LG V30)
I give credit to LG for building out a comprehensive suite of options in its camera software. The big additions this year are cinematic video filters that were developed with the help of pro videographers. They are not bad! I especially like the Summer Blockbuster filter, which makes everything look blue-orange, just like all the action flicks clogging up your local cinema. It’s one of a number of well-executed video filters that really contribute a different vibe to the stuff I film. And LG backs this up with good video performance from the V30, which has impressively steady stabilization, records crystal-clear audio, and retains a nice amount of detail. LG’s gone the extra step of adding a point-zoom function: every other phone can only zoom toward the center of the screen, but with the V30 you can select any point in the frame and have the camera smoothly zoom in on it. It’s the sort of cool extra feature that leaves an impression.
My lack of enthusiasm for LG’s wide-angle second camera on the back can be intuited from the fact I’m mentioning it last. Even with the newest optimizations and improvements — which include reducing the fisheye distortion effect by a third relative to the V20 — LG’s dual camera just doesn’t excite me. The highest-quality shots still come from the main f/1.6 camera, and I rarely find myself needing the all-encompassing field of view of the wide-angle lens. I guess you can count me in the group of people that prefer the second lens to be either monochrome, as with Huawei and the Essential Phone, or a zoomed-in telephoto as with the iPhone, OnePlus 5, Galaxy Note 8, and a bunch of others.
Almost all the core elements of the LG V30 user experience are strong. Performance is consistently fast and fluid, and battery life is at the upper end of the spectrum for the V30’s class. I can comfortably get through an intensive day’s use, even while deploying the power-hungry quad-DAC audio system. Like the HTC U11, which has also impressed me in recent times, the V30 provides great peace of mind that no matter what I’m doing, I won’t suddenly be left with a powerless phone. Having Qi wireless charging also helps in that regard, and I’ve been gleefully charging the V30 using Samsung’s wireless charging pad for the Galaxy S8 (albeit at slower speeds than Samsung phones can wirelessly charge at). Interoperable standards are a beautiful thing!
LG’s Android software is alright, nothing more than that
LG’s Android build on the V30 is stable and pretty close to the default Android user experience, but that’s about as much good as I can say about it. The settings menu is still unnecessarily convoluted and fragmented into tabs, I can’t access the apps list with a swipe up from the bottom of the home screen (a surprisingly big annoyance compared to the Google Pixel and many other Android alternatives), and LG seriously weighs its phones down with carrier bloatware. The Korean version of the V30 that I tested came with no fewer than 54 preloaded carrier apps, and the US alternatives are no better. $800 and a ton of crap I have to disable and remove? What is this, a Windows laptop from 2007?
A cautionary note on the LG software front: the company has indicated that its Android 8.0 Oreo update for the V30 (which ships with the older Android 7.1 Nougat on board) isn’t due until the end of this year. When you think about the long-term investment for an expensive phone, it’s essential to consider the likelihood of facing similar delays with future Android updates down the line. Unfortunately, LG doesn’t provide the same timely update reassurance that Google’s Pixel phones or more adroit rivals like Samsung do.
Looking on the bright side, I do enjoy the always-on display that LG has implemented with the V30. It lets me glance at the time and basic notifications as well as control my music without activating the phone. And it doesn’t seem to consume a meaningful amount of extra power, so I have no worries about it draining my battery.
You may be wondering why I have left discussion of the LG V30’s display for last. Well, that’s because I wanted you to understand the essential strengths and weaknesses of this phone before I told you the ultimate deal-breaker for me. The OLED screen of the V30 is just bad. There’s no dodging this issue, and there’s no making excuses for it. This isn’t a good display, and if your phone doesn’t have a good display it might as well be a Nokia 5110.
LG’s OLED display should have been the beautiful highlight of the V30, but instead it’s an ugly disappointment
Areas of the same color on the V30 appear blotchy: when I open up a Google Keep note, I don’t get a flat white canvas as I should, but instead I see streaks of gray, looking as if there’s an inconsistent backlight. This being an OLED display, there’s no backlight to speak of, so it’s just poor brightness uniformity across those light-emitting diodes. The same unhappy effect is even more pronounced with darker grays and colors like navy blue, and it’s amplified by the V30’s apparent inability to render color gradations smoothly. Gradients appear grainy and I see unpleasant color banding, exactly the same issues that Ars Technica encountered with a preproduction V30 device last month.
Every time I switched between the V30 and HTC’s U11 in my testing, the U11’s screen felt like a luxurious escape. It’s painfully apparent that LG’s so-called plastic OLED screens are multiple generations behind Samsung’s alternative — which graces the Galaxy S8, Galaxy Note 8, and the upcoming iPhone X — as well as significantly behind the best LCDs, as exhibited by the U11. If you’d asked me, before getting ahold of the V30, what most excited me about this phone, the bezel-deprived OLED screen would have been one of my top two or three features. I really do think LG nailed the dimensions of this phone, and I’m confident the screen technology contributes to the V30’s thinness (as well as making it compatible with Google’s budding Daydream VR ecosystem). But the V30’s desaturated, lifeless screen, addled with all of the foregoing issues, makes this a phone I find impossible to love, or even get along with on a daily basis.
Like its predecessors, the LG V30 is a phone designed to appeal to spec lovers first, though with its pretty aesthetic and great ergonomics it could have stood a chance of attracting a more mainstream audience as well. But the story of LG smartphones has always been a matter of “could have” and “should have.” It’s not that LG phones aren’t improving every year — they are, and they continue to offer cutting-edge spec sheets with each new iteration — but the improvements the company is making seem to always be accompanied by self-inflicted wounds. The LG G5 was a nice step up in design over the G4, but LG hamstrung it with a poorly conceived and quickly aborted modular accessory system. The G6 got better again, but it too lacked the final polishing touch to outshine Samsung’s more accomplished designs.
The V30 arrives half a year after the G6 and, at first blush, appears to rectify everything that ailed LG’s devices of the past. But LG rushed to put an imperfect OLED screen in its flagship phone, preferring to have the highlight spec over the superior user experience, and I can’t condone either the choice or the eventual product. This is a phone that has given me goosebumps with the astonishing quality of its headphone audio, and if I was reviewing it on the strength of music playback alone, I would say it’s one of the best media players on the market. But this is supposed to be a smartphone. And as a smartphone, the LG V30 fails to validate its high price and flagship status. |
#include "CoverageBlock.h"
// using namespace std;
CoverageBlock::CoverageBlock(uint start, uint end) {
blockStart = start;
blockEnd = end;
firstDepth[0] = 0;
firstDepth[1] = 0;
blockExtents = NULL;
blockExtentsL = NULL;
}
//direction -- 0=False/Neg, 1=True/Pos.
void CoverageBlock::RecordCover(uint readStart, uint readEnd, bool dir) {
if (readStart <= blockStart && readEnd > blockStart) {
firstDepth[dir]++;
} else if (readStart < blockEnd) {
// Need to increment the starts vector.
uint inc_index = readStart - blockStart - 1;
if (blockExtentsL) { //already an int vector
blockExtentsL->at(inc_index).start[dir]++;
} else if (!blockExtents) { //don't have a char vector either - create first.
blockExtents = new std::vector<start_stops>(vectorLen());
blockExtents->at(inc_index).start[dir]++;
} else {
if (blockExtents->at(inc_index).start[dir] == 254) {
blockExtentsL = new std::vector<start_stopsL>(
blockExtents->begin(), blockExtents->end());
delete blockExtents;
blockExtents = NULL;
blockExtentsL->at(inc_index).start[dir]++;
} else {
blockExtents->at(inc_index).start[dir]++;
}
}
} else {
return;
}
if (readEnd >= blockEnd) {
return;
} else {
// Need to increment the ends vector.
uint inc_index = readEnd - blockStart - 1;
if (blockExtentsL) { //already an int vector
blockExtentsL->at(inc_index).end[dir]++;
} else if (!blockExtents) { //don't have a char vector either - create first.
blockExtents = new std::vector<start_stops>(vectorLen());
blockExtents->at(inc_index).end[dir]++;
} else {
if (blockExtents->at(inc_index).end[dir] == 254) {
blockExtentsL = new std::vector<start_stopsL>(
blockExtents->begin(), blockExtents->end());
delete blockExtents;
blockExtents = NULL;
blockExtentsL->at(inc_index).end[dir]++;
} else {
blockExtents->at(inc_index).end[dir]++;
}
}
}
// Can Throw: Out of range exception.
}
void CoverageBlock::updateCoverageHist(std::map<uint, uint> &hist, uint start,
uint end) const {
if (!blockExtentsL && !blockExtents) {
// how many bases in this block?
hist[firstDepth[0] + firstDepth[1]] += std::min(blockEnd, end)
- std::max(blockStart, start);
} else {
// There are read starts and ends -- need to walk the positions from the start of this block
// even if not in the region of interest.
//special handling for the first base -- the one before the vector starts.
uint depth = firstDepth[0] + firstDepth[1];
if (start <= blockStart) {
// use the first depth, before commencing in the vector.
hist[depth]++;
}
uint startindex = std::max(blockStart + 1, start) - blockStart - 1;
uint endindex = std::min(blockEnd, end) - blockStart - 1;
if (blockExtents) {
for (uint i = 0; i < endindex; i++) {
depth += -(*blockExtents)[i].end[0] - (*blockExtents)[i].end[1]
+ (*blockExtents)[i].start[0]
+ (*blockExtents)[i].start[1];
if (i >= startindex) {
hist[depth]++;
}
}
} else {
for (uint i = 0; i < endindex; i++) {
depth += -(*blockExtentsL)[i].end[0]
- (*blockExtentsL)[i].end[1]
+ (*blockExtentsL)[i].start[0]
+ (*blockExtentsL)[i].start[1];
if (i >= startindex) {
hist[depth]++;
}
}
}
// When in the region of interest, update the hist each step.
}
}
void CoverageBlock::updateCoverageHist(std::map<uint, uint> &hist, uint start,
uint end, bool dir) const {
if (!blockExtentsL && !blockExtents) {
// how many bases in this block?
hist[firstDepth[dir]] += std::min(blockEnd, end)
- std::max(blockStart, start);
} else {
//special handling for the first base -- the one before the vector starts.
uint depth = firstDepth[dir];
if (start <= blockStart) {
// use the first depth, before commencing in the vector.
hist[depth]++;
}
uint startindex = std::max(blockStart + 1, start) - blockStart - 1;
uint endindex = std::min(blockEnd, end) - blockStart - 1;
if (blockExtents) {
for (uint i = 0; i < endindex; i++) {
depth += -(*blockExtents)[i].end[dir]
+ (*blockExtents)[i].start[dir];
if (i >= startindex) {
hist[depth]++;
}
}
} else {
for (uint i = 0; i < endindex; i++) {
depth += -(*blockExtentsL)[i].end[dir]
+ (*blockExtentsL)[i].start[dir];
if (i >= startindex) {
hist[depth]++;
}
}
}
}
}
void CoverageBlock::updateCoverageArray(std::vector<uint> &arr,
std::vector<bool> &covered, uint start, uint end) const {
uint depth = firstDepth[0] + firstDepth[1],
startindex = std::max( blockStart, start-1) - blockStart,
endindex = std::min(blockEnd, end) - blockStart,
startarray = std::max(blockStart+1, start ) - start ,
endarray = std::min(blockEnd, end) - start ;
if (!blockExtentsL && !blockExtents) {
for (uint i = startindex; i < endindex && startarray < endarray;
i++, startarray++) {
arr[startarray] += depth;
covered[startarray] = true;
}
} else {
// There are read starts and ends -- need to walk the positions from the start of this block
// even if not in the region of interest.
if (blockExtents) {
for (uint i = 0; i < endindex && startarray < endarray; i++) {
depth += -(*blockExtents)[i].end[0] - (*blockExtents)[i].end[1]
+ (*blockExtents)[i].start[0]
+ (*blockExtents)[i].start[1];
if (i >= startindex) {
arr[startarray] += depth;
covered[startarray] = true;
startarray++;
}
}
} else {
for (uint i = 0; i < endindex && startarray < endarray; i++) {
depth += -(*blockExtentsL)[i].end[0]
- (*blockExtentsL)[i].end[1]
+ (*blockExtentsL)[i].start[0]
+ (*blockExtentsL)[i].start[1];
if (i >= startindex) {
arr[startarray] += depth;
covered[startarray] = true;
startarray++;
}
}
}
// When in the region of interest, update the hist each step.
}
}
void CoverageBlock::updateCoverageArray(std::vector<uint> &arr,
std::vector<bool> &covered, uint start, uint end, bool dir) const {
uint depth = firstDepth[0] + firstDepth[1],
startindex = std::max( blockStart, start-1) - blockStart,
endindex = std::min(blockEnd, end) - blockStart,
startarray = std::max(blockStart+1, start ) - start ,
endarray = std::min(blockEnd, end) - start ;
if (!blockExtentsL && !blockExtents) {
for (uint i = startindex; i < endindex && startarray < endarray;
i++, startarray++) {
arr[startarray] += depth;
covered[startarray] = true;
}
} else {
// There are read starts and ends -- need to walk the positions from the start of this block
// even if not in the region of interest.
if (blockExtents) {
for (uint i = 0; i < endindex && startarray < endarray; i++) {
depth += -(*blockExtents)[i].end[dir]
+ (*blockExtents)[i].start[dir];
if (i >= startindex) {
arr[startarray] += depth;
covered[startarray] = true;
startarray++;
}
}
} else {
for (uint i = 0; i < endindex && startarray < endarray; i++) {
depth += -(*blockExtentsL)[i].end[dir]
+ (*blockExtentsL)[i].start[dir];
if (i >= startindex) {
arr[startarray] += depth;
covered[startarray] = true;
startarray++;
}
}
}
// When in the region of interest, update the hist each step.
}
}
void CoverageBlock::print(std::ostream &os) const {
os << "Coverage block " << blockStart << " - " << blockEnd << "\n";
os << "First depth 0 : " << firstDepth[0] << "\n";
os << "First depth 1 : " << firstDepth[0] << "\n";
uint i=0;
if (blockExtents) {
os << "BlockExtents: \n";
for (auto &a : (*blockExtents)) {
os << i+blockStart << " " << (uint) a.start[0] << ":" << (uint) a.start[1] << " - " << (uint) a.end[0] << ":"<< (uint) a.end[1] << "\n";
i++;
}
}
if (blockExtentsL) {
os << "BlockExtentsL: \n";
for (auto &a : (*blockExtentsL)) {
os << i+blockStart << " " << (uint) a.start[0] << ":" << (uint) a.start[1] << " - " << (uint) a.end[0] << ":"<< (uint) a.end[1] << "\n";
i++;
}
}
}
std::ostream& operator<<(std::ostream &os, const CoverageBlock &cb) {
cb.print(os);
return os;
}
|
import { EventEmitter2 } from 'eventemitter2'
import { deleteGlobalRef, setGlobalRef } from '../client/globalUnit'
import { $_ } from '../interface/$_'
import { PO } from '../interface/PO'
import { S } from '../interface/S'
import { System } from '../system'
import { Unlisten } from '../Unlisten'
import { Dict } from './../types/Dict'
export class $ extends EventEmitter2 implements $_ {
static id: string
public $system: System | null = null
public $pod: PO | null = null
public globalId: string
public _: string[] = []
protected _listener_count: Dict<number> = {}
constructor() {
super()
this.globalId = setGlobalRef(this)
}
getGlobalId(): string {
return this.globalId
}
getInterface(): string[] {
return this._
}
refSystem(): S {
return this.$system
}
refPod(): PO {
return this.$pod
}
attach($system: System, $pod: PO): void {
// console.log('$', 'attach', $system, $pod)
this.$system = $system
this.$pod = $pod
this.emit('_attach', $system, $pod)
}
dettach(): void {
this.$system = null
this.$pod = null
this.emit('_dettach')
}
_prependListener(
event: string,
listener: (...data: any[]) => void
): Unlisten {
this.prependListener(event, listener)
return () => {
this.removeListener(event, listener)
}
}
_addListener(event: string, listener: (...data: any[]) => void): Unlisten {
this.addListener(event, listener)
return () => {
this.removeListener(event, listener)
}
}
getListeners(): string[] {
return Object.keys(this._listener_count)
}
destroy() {
deleteGlobalRef(this.globalId)
this.emit('destroy')
}
listen(event: string, listener: (data: any) => void): Unlisten {
this._listener_count[event] = this._listener_count[event] ?? 0
this._listener_count[event]++
this.addListener(event, listener)
if (this._listener_count[event] === 1) {
this.emit('listen', { event })
}
return () => {
this._listener_count[event]--
if (this._listener_count[event] === 0) {
delete this._listener_count[event]
this.emit('unlisten', { event })
}
this.removeListener(event, listener)
}
}
listenerCount(name: string) {
return this.listenerCount(name)
}
}
|
#ifndef VECTOR_H
#define VECTOR_H
#define DEFAULT_VECTOR_SIZE 8
#if defined(_MSC_VER) || defined(__MINGW32__) || defined(__MINGW64__)
#include <malloc.h>
#else
#include <stdlib.h>
static inline void *_aligned_malloc(size_t size, size_t alignment)
{
void *p;
int ret = posix_memalign(&p, alignment, size);
return (ret == 0) ? p : 0;
}
static inline void _aligned_free(void *p)
{
free(p);
}
#endif
#ifdef _MSC_VER
#define MIE_ALIGN(x) __declspec(align(x))
#else
#define MIE_ALIGN(x) __attribute__((aligned(x)))
#endif
#define CONST_128D(var, val) \
MIE_ALIGN(16) static const double var[2] = {(val), (val)}
// Based kvec.h, dynamic vectors of any type
#define __VECTOR_BASE(name, type) typedef struct { size_t n, m; type *a; } name; \
static inline name *name##_new_size(size_t size) { \
name *array = malloc(sizeof(name)); \
if (array == NULL) return NULL; \
array->n = array->m = 0; \
array->a = malloc(size * sizeof(type)); \
if (array->a == NULL) return NULL; \
array->m = size; \
return array; \
} \
static inline name *name##_new(void) { \
return name##_new_size(DEFAULT_VECTOR_SIZE); \
} \
static inline name *name##_new_aligned(size_t size, size_t alignment) { \
name *array = malloc(sizeof(name)); \
if (array == NULL) return NULL; \
array->n = array->m = 0; \
array->a = _aligned_malloc(size * sizeof(type), alignment); \
if (array->a == NULL) return NULL; \
array->m = size; \
return array; \
} \
static inline void name##_resize(name *array, size_t size) { \
if (size <= array->m) return; \
type *ptr = realloc(array->a, sizeof(type) * size); \
if (ptr == NULL) return; \
array->a = ptr; \
array->m = size; \
} \
static inline void name##_push(name *array, type value) { \
if (array->n == array->m) { \
size_t size = array->m ? array->m << 1 : 2; \
type *ptr = realloc(array->a, sizeof(type) * size); \
if (ptr == NULL) return; \
array->a = ptr; \
array->m = size; \
} \
array->a[array->n++] = value; \
} \
static inline void name##_extend(name *array, name *other) { \
size_t new_size = array->n + other->n; \
if (new_size > array->m) name##_resize(array, new_size); \
memcpy(array->a + array->n, other->a, other->n * sizeof(type)); \
array->n = new_size; \
} \
static inline void name##_pop(name *array) { \
if (array->n > 0) array->n--; \
} \
static inline void name##_clear(name *array) { \
array->n = 0; \
} \
static inline void name##_copy(name *dst, name *src, size_t n) { \
if (dst->m < n) name##_resize(dst, n); \
memcpy(dst->a, src->a, n * sizeof(type)); \
dst->n = n; \
} \
static inline name *name##_new_copy(name *vector, size_t n) { \
name *cpy = name##_new_size(n); \
name##_copy(cpy, vector, n); \
return cpy; \
}
#define __VECTOR_DESTROY(name, type) \
static inline void name##_destroy(name *array) { \
if (array == NULL) return; \
if (array->a != NULL) free(array->a); \
free(array); \
}
#define __VECTOR_DESTROY_FREE_DATA(name, type, free_func) \
static inline void name##_destroy(name *array) { \
if (array == NULL) return; \
if (array->a != NULL) { \
for (int i = 0; i < array->n; i++) { \
free_func(array->a[i]); \
} \
} \
free(array->a); \
free(array); \
}
#define VECTOR_INIT(name, type) \
__VECTOR_BASE(name, type) \
__VECTOR_DESTROY(name, type)
#define VECTOR_INIT_FREE_DATA(name, type, free_func) \
__VECTOR_BASE(name, type) \
__VECTOR_DESTROY_FREE_DATA(name, type, free_func)
#endif
|
<filename>notebooks/print_distributed_state.py
import sys
sys.path.insert(0, '../build/lib')
import intelqs_py as iqs
import numpy as np
def run_circuit(num_qubits):
reg = iqs.QubitRegister(num_qubits, 'base', 0, 0)
for i in range(num_qubits):
reg.ApplyHadamard(i)
reg.ApplyRotationZ(i, np.pi/3)
return reg
if __name__ == '__main__':
if len(sys.argv) <= 1:
print('Usage: {} <num_qubits>'.format(sys.argv[0]))
sys.exit(0)
num_qubits = int(sys.argv[1])
iqs.EnvInit()
if iqs.MPIEnvironment.IsUsefulRank()==False:
iqs.EnvFinalize()
exit()
# The simulation of a N-qubit system cannot be divided in more than 2^(N-1) ranks.
if iqs.MPIEnvironment.GetStateSize()>2**(num_qubits-1) or num_qubits<1:
if iqs.MPIEnvironment.GetRank()==0:
print("No more than 2^(N-1) useful ranks for a N-qubit state.")
iqs.EnvFinalize()
exit()
reg = run_circuit(num_qubits)
state_vector = np.array(reg, copy=False)
rank = iqs.MPIEnvironment.GetRank()
if num_qubits<7:
print('\nFinal state at rank {}: {}'.format(rank, state_vector))
elif rank==0:
print('\nState too large to be printed, try 4 qubits :-)\n')
iqs.EnvFinalize()
# SAMPLE RUNS:
## Single-Process:
'''
python python_mpi.py 4
[|s0>: 0] world_rank: 0 , state_rank: 0 (state 0 of 1) my_node_id: 0 , num_nodes: 1 , ranks/node: 1 , threads/rank: 72 --useful
Final state at rank 0: [0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j
0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j
0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j
0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j]
'''
## Multi-Process:
'''
mpiexec.hydra -n 2 python python_mpi.py 4
[|s0>: 0] world_rank: 0 , state_rank: 0 (state 0 of 1) my_node_id: 0 , num_nodes: 1 , ranks/node: 2 , threads/rank: 36 --useful
[|s0>: 1] world_rank: 1 , state_rank: 1 (state 0 of 1) my_node_id: 0 , num_nodes: 1 , ranks/node: 2 , threads/rank: 36 --useful
Final state at rank 0: [0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j
0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j 0.21650635-0.125j]
Final state at rank 1: [0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j
0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j 0.21650635+0.125j]
'''
|
<gh_stars>0
"""
This is RT source plugin for deoplete. It completes RequestTracker numbers from
a cache file.
# Install:
1. Copy the file to $HOME/.vim/bundle/deoplete.nvim/rplugin/python3/deoplete/sources/
2. pip install regex (https://pypi.python.org/pypi/regex supports cool fuzzy matching)
"""
from .base import Base
from jira_rt_completion_server.deoplete_rt import DeopleteSourceRT
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self._source = DeopleteSourceRT()
self.debug_enabled = True
self.name = 'request_tracker'
#self.kind = 'keyword'
self.mark = '[RT]'
#self.min_pattern_length = 2
# Use these options if you want to filter candidates yourself
self.is_volatile = True
self.matchers = []
self.sorters = []
# Use these options if you want to implement custom matcher
#self.matchers = ['matcher_fuzzy', 'matcher_full_fuzzy']
#self.sorters = ['sorter_rank']
#self.converters = []
self.max_menu_width = 120
self.max_abbr_width = 120
self.input_pattern = self._source.input_pattern
def get_complete_position(self, context):
return self._source.get_complete_position(context)
def gather_candidates(self, context):
ready, result = self._source.async_gather_candidates(context)
context['is_async'] = not ready
return result
def old_gather_candidates(self, context):
return self._source.gather_candidates(context)
# def on_post_filter(self, context):
# return self._source.on_post_filter(context)
|
def sign_file(file_obj):
from olympia.versions.tasks import extract_version_to_git
if (file_obj.version.addon.type == amo.ADDON_SEARCH and
file_obj.version.is_webextension is False):
return file_obj
if not settings.ENABLE_ADDON_SIGNING:
raise SigningError(u'Not signing file {0}: no active endpoint'.format(
file_obj.pk))
if not os.path.exists(file_obj.current_file_path):
raise SigningError(u'File {0} doesn\'t exist on disk'.format(
file_obj.current_file_path))
if file_obj.is_mozilla_signed_extension:
log.info(u'Not signing file {0}: mozilla signed extension is already '
u'signed'.format(file_obj.pk))
return file_obj
if not supports_firefox(file_obj):
raise SigningError(
u'Not signing version {0}: not for a Firefox version we support'
.format(file_obj.version.pk))
cert_serial_num = str(call_signing(file_obj))
size = storage.size(file_obj.current_file_path)
file_obj.update(cert_serial_num=cert_serial_num,
hash=file_obj.generate_hash(),
is_signed=True,
size=size)
log.info(u'Signing complete for file {0}'.format(file_obj.pk))
if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
transaction.on_commit(
lambda: extract_version_to_git.delay(
version_id=file_obj.version.pk,
note='after successful signing'))
return file_obj |
<filename>src/app/_services/group.service.ts<gh_stars>1-10
import { Injectable } from '@angular/core';
import { Observable, Observer } from 'rxjs';
import { HttpManagerService } from './http-manager.service';
import { BasicGroup } from '../_models/group/basic-group';
import * as _ from 'underscore';
@Injectable({
providedIn: 'root'
})
export class GroupService {
// Only stores basic information of groups
// NOTE groups are only stored in same tab, if groups should be stored across tabs, use local storage, but then, garbage collection / cache expiration is necessary
public groups: BasicGroup[] = [];
constructor(
private httpManagerService: HttpManagerService
) { }
/**
* @desc: Gets basic group from cache
*/
public getBasicGroupFromCache(groupId: string): BasicGroup {
return this.groups.find((group) => group.groupId == groupId);
}
/**
* @desc: Gets basic information about group from either server (fist load) or cache (second+ load)
*/
public getBasicGroupAsync(groupId: string): Observable<BasicGroup> {
// Create Observable, such that subscribe can be used, after this function was called
return Observable.create((observer: Observer<BasicGroup>) => {
// Try to get group from cache
const cachedGroupIndex = this.groups.findIndex(group => group.groupId == groupId);
// If group was found in cache and was not expired inbetween, return it
// Note: Groups in finished or rejected topics are always taken from the database (expiration equals last deadline)
// We assume that these groups are not opened very often
//if (cachedGroup !== undefined && cachedGroup.expiration > Date.now()) {
if (cachedGroupIndex != -1 && this.groups[cachedGroupIndex].expiration > Date.now()) {
// Hand over to next subscription
observer.next(this.groups[cachedGroupIndex]);
observer.complete();
// If group was not cached, get it from server, store it in cache an return it
} else {
return this.httpManagerService.get('/json/group/basic/' + groupId).subscribe((group) => {
const groupFromDb = new BasicGroup(group);
// If group was not cached before, add group to cache
if (cachedGroupIndex == -1) {
this.groups.push(groupFromDb);
// If group was cached, update cache
} else {
this.groups[cachedGroupIndex] = groupFromDb;
}
// Hand over to next subscription
observer.next(groupFromDb);
observer.complete();
});
}
});
}
/**
* @desc: Gets the ratings of all group members, used for members tab in group
*/
public getMembersRatings(groupId: string): Observable<any> {
return this.httpManagerService.get('/json/group/ratings/' + groupId);
}
}
|
<filename>blades/xbmc/tools/EventClients/Clients/WiiRemote/wiiuse_v0.12/example/example.c
/*
* wiiuse
*
* Written By:
* <NAME> < para >
* Email: < thepara (--AT--) g m a i l [--DOT--] com >
*
* Copyright 2006-2007
*
* This file is part of wiiuse.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* $Header$
*
*/
/**
* @file
*
* @brief Example using the wiiuse API.
*
* This file is an example of how to use the wiiuse library.
*/
#include <stdio.h>
#include <stdlib.h>
#ifndef WIN32
#include <unistd.h>
#endif
#include "wiiuse.h"
#define MAX_WIIMOTES 4
/**
* @brief Callback that handles an event.
*
* @param wm Pointer to a wiimote_t structure.
*
* This function is called automatically by the wiiuse library when an
* event occurs on the specified wiimote.
*/
void handle_event(struct wiimote_t* wm) {
printf("\n\n--- EVENT [id %i] ---\n", wm->unid);
/* if a button is pressed, report it */
if (IS_PRESSED(wm, WIIMOTE_BUTTON_A)) printf("A pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_B)) printf("B pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_UP)) printf("UP pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_DOWN)) printf("DOWN pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_LEFT)) printf("LEFT pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_RIGHT)) printf("RIGHT pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_MINUS)) printf("MINUS pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_PLUS)) printf("PLUS pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_ONE)) printf("ONE pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_TWO)) printf("TWO pressed\n");
if (IS_PRESSED(wm, WIIMOTE_BUTTON_HOME)) printf("HOME pressed\n");
/*
* Pressing minus will tell the wiimote we are no longer interested in movement.
* This is useful because it saves battery power.
*/
if (IS_JUST_PRESSED(wm, WIIMOTE_BUTTON_MINUS))
wiiuse_motion_sensing(wm, 0);
/*
* Pressing plus will tell the wiimote we are interested in movement.
*/
if (IS_JUST_PRESSED(wm, WIIMOTE_BUTTON_PLUS))
wiiuse_motion_sensing(wm, 1);
/*
* Pressing B will toggle the rumble
*
* if B is pressed but is not held, toggle the rumble
*/
if (IS_JUST_PRESSED(wm, WIIMOTE_BUTTON_B))
wiiuse_toggle_rumble(wm);
if (IS_JUST_PRESSED(wm, WIIMOTE_BUTTON_UP))
wiiuse_set_ir(wm, 1);
if (IS_JUST_PRESSED(wm, WIIMOTE_BUTTON_DOWN))
wiiuse_set_ir(wm, 0);
/* if the accelerometer is turned on then print angles */
if (WIIUSE_USING_ACC(wm)) {
printf("wiimote roll = %f [%f]\n", wm->orient.roll, wm->orient.a_roll);
printf("wiimote pitch = %f [%f]\n", wm->orient.pitch, wm->orient.a_pitch);
printf("wiimote yaw = %f\n", wm->orient.yaw);
}
/*
* If IR tracking is enabled then print the coordinates
* on the virtual screen that the wiimote is pointing to.
*
* Also make sure that we see at least 1 dot.
*/
if (WIIUSE_USING_IR(wm)) {
int i = 0;
/* go through each of the 4 possible IR sources */
for (; i < 4; ++i) {
/* check if the source is visible */
if (wm->ir.dot[i].visible)
printf("IR source %i: (%u, %u)\n", i, wm->ir.dot[i].x, wm->ir.dot[i].y);
}
printf("IR cursor: (%u, %u)\n", wm->ir.x, wm->ir.y);
printf("IR z distance: %f\n", wm->ir.z);
}
/* show events specific to supported expansions */
if (wm->exp.type == EXP_NUNCHUK) {
/* nunchuk */
struct nunchuk_t* nc = (nunchuk_t*)&wm->exp.nunchuk;
if (IS_PRESSED(nc, NUNCHUK_BUTTON_C)) printf("Nunchuk: C pressed\n");
if (IS_PRESSED(nc, NUNCHUK_BUTTON_Z)) printf("Nunchuk: Z pressed\n");
printf("nunchuk roll = %f\n", nc->orient.roll);
printf("nunchuk pitch = %f\n", nc->orient.pitch);
printf("nunchuk yaw = %f\n", nc->orient.yaw);
printf("nunchuk joystick angle: %f\n", nc->js.ang);
printf("nunchuk joystick magnitude: %f\n", nc->js.mag);
} else if (wm->exp.type == EXP_CLASSIC) {
/* classic controller */
struct classic_ctrl_t* cc = (classic_ctrl_t*)&wm->exp.classic;
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_ZL)) printf("Classic: ZL pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_B)) printf("Classic: B pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_Y)) printf("Classic: Y pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_A)) printf("Classic: A pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_X)) printf("Classic: X pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_ZR)) printf("Classic: ZR pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_LEFT)) printf("Classic: LEFT pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_UP)) printf("Classic: UP pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_RIGHT)) printf("Classic: RIGHT pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_DOWN)) printf("Classic: DOWN pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_FULL_L)) printf("Classic: FULL L pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_MINUS)) printf("Classic: MINUS pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_HOME)) printf("Classic: HOME pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_PLUS)) printf("Classic: PLUS pressed\n");
if (IS_PRESSED(cc, CLASSIC_CTRL_BUTTON_FULL_R)) printf("Classic: FULL R pressed\n");
printf("classic L button pressed: %f\n", cc->l_shoulder);
printf("classic R button pressed: %f\n", cc->r_shoulder);
printf("classic left joystick angle: %f\n", cc->ljs.ang);
printf("classic left joystick magnitude: %f\n", cc->ljs.mag);
printf("classic right joystick angle: %f\n", cc->rjs.ang);
printf("classic right joystick magnitude: %f\n", cc->rjs.mag);
} else if (wm->exp.type == EXP_GUITAR_HERO_3) {
/* guitar hero 3 guitar */
struct guitar_hero_3_t* gh3 = (guitar_hero_3_t*)&wm->exp.gh3;
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_STRUM_UP)) printf("Guitar: Strum Up pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_STRUM_DOWN)) printf("Guitar: Strum Down pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_YELLOW)) printf("Guitar: Yellow pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_GREEN)) printf("Guitar: Green pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_BLUE)) printf("Guitar: Blue pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_RED)) printf("Guitar: Red pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_ORANGE)) printf("Guitar: Orange pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_PLUS)) printf("Guitar: Plus pressed\n");
if (IS_PRESSED(gh3, GUITAR_HERO_3_BUTTON_MINUS)) printf("Guitar: Minus pressed\n");
printf("Guitar whammy bar: %f\n", gh3->whammy_bar);
printf("Guitar joystick angle: %f\n", gh3->js.ang);
printf("Guitar joystick magnitude: %f\n", gh3->js.mag);
}
}
/**
* @brief Callback that handles a read event.
*
* @param wm Pointer to a wiimote_t structure.
* @param data Pointer to the filled data block.
* @param len Length in bytes of the data block.
*
* This function is called automatically by the wiiuse library when
* the wiimote has returned the full data requested by a previous
* call to wiiuse_read_data().
*
* You can read data on the wiimote, such as Mii data, if
* you know the offset address and the length.
*
* The \a data pointer was specified on the call to wiiuse_read_data().
* At the time of this function being called, it is not safe to deallocate
* this buffer.
*/
void handle_read(struct wiimote_t* wm, byte* data, unsigned short len) {
int i = 0;
printf("\n\n--- DATA READ [wiimote id %i] ---\n", wm->unid);
printf("finished read of size %i\n", len);
for (; i < len; ++i) {
if (!(i%16))
printf("\n");
printf("%x ", data[i]);
}
printf("\n\n");
}
/**
* @brief Callback that handles a controller status event.
*
* @param wm Pointer to a wiimote_t structure.
* @param attachment Is there an attachment? (1 for yes, 0 for no)
* @param speaker Is the speaker enabled? (1 for yes, 0 for no)
* @param ir Is the IR support enabled? (1 for yes, 0 for no)
* @param led What LEDs are lit.
* @param battery_level Battery level, between 0.0 (0%) and 1.0 (100%).
*
* This occurs when either the controller status changed
* or the controller status was requested explicitly by
* wiiuse_status().
*
* One reason the status can change is if the nunchuk was
* inserted or removed from the expansion port.
*/
void handle_ctrl_status(struct wiimote_t* wm) {
printf("\n\n--- CONTROLLER STATUS [wiimote id %i] ---\n", wm->unid);
printf("attachment: %i\n", wm->exp.type);
printf("speaker: %i\n", WIIUSE_USING_SPEAKER(wm));
printf("ir: %i\n", WIIUSE_USING_IR(wm));
printf("leds: %i %i %i %i\n", WIIUSE_IS_LED_SET(wm, 1), WIIUSE_IS_LED_SET(wm, 2), WIIUSE_IS_LED_SET(wm, 3), WIIUSE_IS_LED_SET(wm, 4));
printf("battery: %f %%\n", wm->battery_level);
}
/**
* @brief Callback that handles a disconnection event.
*
* @param wm Pointer to a wiimote_t structure.
*
* This can happen if the POWER button is pressed, or
* if the connection is interrupted.
*/
void handle_disconnect(wiimote* wm) {
printf("\n\n--- DISCONNECTED [wiimote id %i] ---\n", wm->unid);
}
void test(struct wiimote_t* wm, byte* data, unsigned short len) {
printf("test: %i [%x %x %x %x]\n", len, data[0], data[1], data[2], data[3]);
}
/**
* @brief main()
*
* Connect to up to two wiimotes and print any events
* that occur on either device.
*/
int main(int argc, char** argv) {
wiimote** wiimotes;
int found, connected;
/*
* Initialize an array of wiimote objects.
*
* The parameter is the number of wiimotes I want to create.
*/
wiimotes = wiiuse_init(MAX_WIIMOTES);
/*
* Find wiimote devices
*
* Now we need to find some wiimotes.
* Give the function the wiimote array we created, and tell it there
* are MAX_WIIMOTES wiimotes we are interested in.
*
* Set the timeout to be 5 seconds.
*
* This will return the number of actual wiimotes that are in discovery mode.
*/
found = wiiuse_find(wiimotes, MAX_WIIMOTES, 5);
if (!found) {
printf ("No wiimotes found.");
return 0;
}
/*
* Connect to the wiimotes
*
* Now that we found some wiimotes, connect to them.
* Give the function the wiimote array and the number
* of wiimote devices we found.
*
* This will return the number of established connections to the found wiimotes.
*/
connected = wiiuse_connect(wiimotes, MAX_WIIMOTES);
if (connected)
printf("Connected to %i wiimotes (of %i found).\n", connected, found);
else {
printf("Failed to connect to any wiimote.\n");
return 0;
}
/*
* Now set the LEDs and rumble for a second so it's easy
* to tell which wiimotes are connected (just like the wii does).
*/
wiiuse_set_leds(wiimotes[0], WIIMOTE_LED_1);
wiiuse_set_leds(wiimotes[1], WIIMOTE_LED_2);
wiiuse_set_leds(wiimotes[2], WIIMOTE_LED_3);
wiiuse_set_leds(wiimotes[3], WIIMOTE_LED_4);
wiiuse_rumble(wiimotes[0], 1);
wiiuse_rumble(wiimotes[1], 1);
#ifndef WIN32
usleep(200000);
#else
Sleep(200);
#endif
wiiuse_rumble(wiimotes[0], 0);
wiiuse_rumble(wiimotes[1], 0);
/*
* Maybe I'm interested in the battery power of the 0th
* wiimote. This should be WIIMOTE_ID_1 but to be sure
* you can get the wiimote assoicated with WIIMOTE_ID_1
* using the wiiuse_get_by_id() function.
*
* A status request will return other things too, like
* if any expansions are plugged into the wiimote or
* what LEDs are lit.
*/
//wiiuse_status(wiimotes[0]);
/*
* This is the main loop
*
* wiiuse_poll() needs to be called with the wiimote array
* and the number of wiimote structures in that array
* (it doesn't matter if some of those wiimotes are not used
* or are not connected).
*
* This function will set the event flag for each wiimote
* when the wiimote has things to report.
*/
while (1) {
if (wiiuse_poll(wiimotes, MAX_WIIMOTES)) {
/*
* This happens if something happened on any wiimote.
* So go through each one and check if anything happened.
*/
int i = 0;
for (; i < MAX_WIIMOTES; ++i) {
switch (wiimotes[i]->event) {
case WIIUSE_EVENT:
/* a generic event occured */
handle_event(wiimotes[i]);
break;
case WIIUSE_STATUS:
/* a status event occured */
handle_ctrl_status(wiimotes[i]);
break;
case WIIUSE_DISCONNECT:
case WIIUSE_UNEXPECTED_DISCONNECT:
/* the wiimote disconnected */
handle_disconnect(wiimotes[i]);
break;
case WIIUSE_READ_DATA:
/*
* Data we requested to read was returned.
* Take a look at wiimotes[i]->read_req
* for the data.
*/
break;
case WIIUSE_NUNCHUK_INSERTED:
/*
* a nunchuk was inserted
* This is a good place to set any nunchuk specific
* threshold values. By default they are the same
* as the wiimote.
*/
//wiiuse_set_nunchuk_orient_threshold((struct nunchuk_t*)&wiimotes[i]->exp.nunchuk, 90.0f);
//wiiuse_set_nunchuk_accel_threshold((struct nunchuk_t*)&wiimotes[i]->exp.nunchuk, 100);
printf("Nunchuk inserted.\n");
break;
case WIIUSE_CLASSIC_CTRL_INSERTED:
printf("Classic controller inserted.\n");
break;
case WIIUSE_GUITAR_HERO_3_CTRL_INSERTED:
/* some expansion was inserted */
handle_ctrl_status(wiimotes[i]);
printf("Guitar Hero 3 controller inserted.\n");
break;
case WIIUSE_NUNCHUK_REMOVED:
case WIIUSE_CLASSIC_CTRL_REMOVED:
case WIIUSE_GUITAR_HERO_3_CTRL_REMOVED:
/* some expansion was removed */
handle_ctrl_status(wiimotes[i]);
printf("An expansion was removed.\n");
break;
default:
break;
}
}
}
}
/*
* Disconnect the wiimotes
*/
wiiuse_cleanup(wiimotes, MAX_WIIMOTES);
return 0;
}
|
Psychological and psychophysiological factors in prevention and treatment of cold injuries.
Cold injured patients in Alaska come from many sources. Although sport and work continues to provide large numbers of cold injured, most severe repeat injuries tend to reflect other biopsychosocial consequences. Certain behaviors can increase the probability of injury, however all persons living in cold climates are potential candidates. One can decrease risk by education, knowledge and intelligent behavior. Proper respect for adequate protection and hydration seem to be critical factors. Understanding the psychological, physiological and psychophysiological aspects of the cold environment performer helps refine the prevention and treatment strategies for cold injury. Skill training with bio-behavioral methods, such as thermal biofeedback, and the value of medical psychotherapy appear to offer continued promise by facilitating physiologic recovery from injury, as well as assisting in long term rehabilitation. Both approaches increase the likelihood of a favorable healing response by soliciting active patient participation. Medical Psychotherapy for traumatic injuries can also help identify and manage cognitive emotional issues for families and patients faced with the permanent consequences of severe thermal injuries. Thermal biofeedback therapy has the potential benefit of encouraging greater self-reliance and responsibility for self-regulating overall health by integrating self-management skills regarding physiology, diet and lifestyle. Inpatient and outpatient biofeedback training offers specific influence over vascular responses for healing, as well as providing an effective tool for pain management. Interest in cold region habitation has continued to expand our study of human tolerance to harsh, extreme environments. Biological, psychological, sociological, and anthropological views on adaptation, habituation, acclimatization, and injury in cold environments acknowledges the role of development, learning and educated responses to cold environments. The study of health, performance, and injury prevention in extreme isolated cold environments has important strategic and scientific implications. What is learned from behavioral studies of cold survival provides an opportunity to increase our scientific knowledge and understanding. These cold research findings can assist in our future exploration of cold, underwater farming at great depths, and to far distance space travel to cold planets. The relatively new research frontier "Polar Psychology" has evolved to study how interactions with cold environments can have both positive and/or negative consequences. This research simulates the psychological factors likely to be encountered while exploring isolated cold regions of distant galaxies. The psychological and psychophysiological correlates of cold experience appear to be a function of four interactive issues: the environment, genetic predisposition, learning or experience, and finally perception or cognition. Individual cold tolerance seems to relate heavily on sensation, perception and behavior.(ABSTRACT TRUNCATED AT 400 WORDS) |
// High configures a GPIO signal as high.
func (gpio *GPIO) High() {
register := PeripheralAddress(GPSET0 + 4*uint32(gpio.num/32))
shift := uint32(gpio.num % 32)
reg.Write(register, 1<<shift)
} |
<gh_stars>0
#ifndef THORS_ANVIL_SERIALIZE_BINARY_PARSER_TPP
#define THORS_ANVIL_SERIALIZE_BINARY_PARSER_TPP
#include "SerializeConfig.h"
#ifdef NETWORK_BYTE_ORDER
#include "Traits.h"
namespace ThorsAnvil
{
namespace Serialize
{
template<typename T>
template<typename R>
class BinaryParserMapParentCommon<T>::MemberAdder<R, TraitType::Map>
{
public:
void operator()(BinaryParserMapParentCommon<T>& obj)
{
using Traits = ThorsAnvil::Serialize::Traits<typename std::remove_reference<R>::type>;
obj.fill(Traits::getMembers());
}
};
template<typename T>
template<typename R>
class BinaryParserMapParentCommon<T>::MemberAdder<R, TraitType::Parent>
{
public:
void operator()(BinaryParserMapParentCommon<T>& obj)
{
MemberAdder<typename ThorsAnvil::Serialize::Traits<T>::Parent> addParent;
addParent(obj);
using Traits = ThorsAnvil::Serialize::Traits<typename std::remove_reference<R>::type>;
obj.fill(Traits::getMembers());
}
};
template<typename T>
BinaryParserMapParentCommon<T>::BinaryParserMapParentCommon(bool root, ParserToken first, ParserToken last, ParserToken nextValue)
: BinaryParserUtilBase( root, first, last, nextValue)
{
MemberAdder<T> addMembers;
addMembers(*this);
}
template<typename T>
template<typename P>
void BinaryParserMapParentCommon<T>::addMember(std::pair<char const*, P> const& token)
{
keys.emplace_back(token.first);
using DestTypeBase = decltype(((T*)nullptr)->*(token.second));
using DestType = typename std::remove_reference<DestTypeBase>::type;
if ( ThorsAnvil::Serialize::Traits<DestType>::type == TraitType::Map
|| ThorsAnvil::Serialize::Traits<DestType>::type == TraitType::Array
|| ThorsAnvil::Serialize::Traits<DestType>::type == TraitType::Parent)
{
utils.emplace_back(new BinaryParserUtil<DestType>(false));
}
else
{
utils.emplace_back(nullptr);
}
}
template<typename T>
template<typename Tuple, std::size_t... Seq>
void BinaryParserMapParentCommon<T>::fillMembers(Tuple const& members, std::index_sequence<Seq...> const&)
{
auto discard = {(addMember(std::get<Seq>(members)),1)...};
(void)discard;
}
template<typename T>
template<typename... M>
void BinaryParserMapParentCommon<T>::fill(std::tuple<M...> const& members)
{
fillMembers(members, std::make_index_sequence<sizeof...(M)>());
}
template<typename T>
ParserToken BinaryParserMapParentCommon<T>
::pushNextState( std::size_t position,
ParserInterface& parser,
ParserState& state,
ParserToken norm)
{
if (utils[position].get())
{
state.push_back(std::move(utils[position]));
return state.back()->getNextToken(parser, state);
}
return norm;
}
template<typename T>
BinaryParserUtil<T, TraitType::Map>::BinaryParserUtil(bool root)
: BinaryParserMapParentCommon<T>(root,
ParserToken::MapStart,
ParserToken::MapEnd,
ParserToken::Key)
{}
template<typename T>
BinaryParserUtil<T, TraitType::Parent>::BinaryParserUtil(bool root)
: BinaryParserMapParentCommon<T>(root,
ParserToken::MapStart,
ParserToken::MapEnd,
ParserToken::Key)
{}
template<typename T>
BinaryParserUtil<T, TraitType::Array>::BinaryParserUtil(bool root)
: BinaryParserUtilBase( root,
ParserToken::ArrayStart,
ParserToken::ArrayEnd,
ParserToken::Value)
{}
template<typename T>
BinaryParserUtil<T, TraitType::Value>::BinaryParserUtil(bool root)
: BinaryParserUtilBase( root,
ParserToken::Value,
ParserToken::DocEnd,
ParserToken::Error)
{}
template<typename T>
std::size_t BinaryParserUtil<T, TraitType::Array>::readSize(ParserInterface& parent)
{
unsigned int result;
parent.getValue(result);
return result;
}
template<typename T>
ParserToken BinaryParserUtil<T, TraitType::Array>
::pushNextState(std::size_t, ParserInterface& parser, ParserState& state, ParserToken norm)
{
using ChildType = typename T::value_type;
if ( ThorsAnvil::Serialize::Traits<ChildType>::type == TraitType::Map
|| ThorsAnvil::Serialize::Traits<ChildType>::type == TraitType::Array
|| ThorsAnvil::Serialize::Traits<ChildType>::type == TraitType::Parent)
{
state.emplace_back(new BinaryParserUtil<ChildType>(false));
return state.back()->getNextToken(parser, state);
}
return norm;
}
}
}
#endif
#endif
|
£50m spent, 10 new arrivals in the summer, a whole new coaching team, a disjointed and unbalanced side with no identity or real potential. That is how to best summarise Garry Monk’s brief but disappointing reign with Middlesbrough Football Club.
There are a number of reasons that will have led to Steve Gibson calling time on the 38-year-old’s tenure at the club after just 197 days - the shortest managerial reign in the club’s history.
However, there is one stat that best summarises why Monk now finds himself out of work, after taking charge of 23 league games, Boro won just 10 under the former Swansea and Leeds boss.
On paper, it looks harsh. The club had just recorded a hard-fought victory over Sheffield Wednesday which has left the club just three points away from the play-off places with home games against Bolton Wanderers and Aston Villa to come.
But ask any Boro fan that has watched the club this season and they’ll understand Steve Gibson’s decision, even if they believe that Monk should have been afforded more time.
Boro have never looked like a side that deserved to challenging at the top end of the Championship and even during Monk’s best period as boss - a three game winning streak over Reading, Hull and Sunderland - they were far from convincing.
In fact, in all of Boro’s games this season, fans have never really walked away from the ground feeling 100% confident that the side they were watching were going to mount a promotion push. There was always an element of doubt somewhere in the display.
Whether that was the lack of pace in the final third, arguably the most creative player at the club being consistently left out of the matchday squad, constant alterations to the formation and personnel or - the most common one - defensive errors to the point where it felt like Boro would concede a goal a game through an error at the back.
Boro’s back four has gone from the sturdy and solid red wall under Aitor Karanka to a self-imploding quintet that became the club’s obvious weakness.
He’ll also be criticised for his signings. Of the ten that came through the Rockcliffe door this summer, only really Cyrus Christie and Darren Randolph have consistently impressed from the off while Britt Assombalonga has already scored 11 goals in the Championship this season.
Jonny Howson has improved in recent months and opened his account against Wednesday, but the other have flattered to deceive.
Connor Roberts, Lewis Baker, Marvin Johnson and Ryan Shotton have largely found themselves out of the fold. Martin Braithwaite has been frustratingly inconsistent and Ashley Fletcher has done very little to justify his hefty price tag.
And then there’s the freezing out of personnel. Stewart Downing, Daniel Ayala, Patrick Bamford, Grant Leadbitter and more recently Adam Clayton have found themselves out of the first-team fold with all but Clayton being brought back in after poor displays, not exactly a trait that shows a strong grasp of the playing staff.
Ultimately, Garry Monk’s sacking has been inevitable. His team has looked poor since day one with no immediate signs of improvement, he had a poor to near non-existent relationship with the fans - which counts for a lot in the chairman’s decision-making and he looked like he struggled with the pressure of being pre-season promotion favourites.
Craig Liddle has taken over the first team until the club find a replacement and he already has one major - and slightly sad - advantage in his favour, he’s not Garry Monk. Some fans have wanted Monk gone for some time so seeing someone - anyone - else in the dug out on Boxing Day will be a relief.
The last six months have been hopelessly frustrating and desperately disappointing, but with the manager gone games coming thick and fast, it's time to move on and support the side under his successor. |
<filename>core/src/main/java/com/liang/p2p/business/service/IBidRequestService.java
package com.liang.p2p.business.service;
import com.liang.p2p.base.query.PageResult;
import com.liang.p2p.business.domain.BidRequest;
import com.liang.p2p.business.domain.BidRequestAuditHistory;
import com.liang.p2p.business.query.BidRequestQueryObject;
import java.util.List;
/**
* 借款相关
* Created by liang on 2018/5/17.
*/
public interface IBidRequestService {
void update(BidRequest bidRequest);
BidRequest get(Long id);
/**
* 判断用户是否具有申请借款的权利
* @return
*/
boolean canApplyBidRequest(Long logininfoId);
/**
* 申请借款
* @param bidRequest
*/
void apply(BidRequest bidRequest);
/**
* 根据一个标查询该标的审核历史
* @param id
* @return
*/
List<BidRequestAuditHistory> listAuditHistoryByBidRequest(Long id);
PageResult query(BidRequestQueryObject qo);
/**
* 发标前审核
* @param id
* @param remark
* @param state
*/
void publishAudit(Long id, String remark, int state);
/**
* 查询首页数据
* @return
*/
List<BidRequest> listIndex(int size);
}
|
package com.example.concurrent.limit.distribute;
import com.example.util.Constants;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
import org.redisson.Redisson;
import org.redisson.api.RScript;
import org.redisson.api.RedissonClient;
import org.redisson.client.codec.StringCodec;
import org.redisson.config.Config;
import org.springframework.core.io.ClassPathResource;
import org.springframework.scripting.support.ResourceScriptSource;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.CountDownLatch;
/**
* 演示redis 分布式限流:redis + lua
*/
public class RedisLuaLimiter {
private static RedissonClient redissonClient = buildRedissonClient();
private static RedissonClient buildRedissonClient() {
Config config = new Config();
config.useClusterServers().addNodeAddress(
new String[]{"redis://172.16.31.10:6390", "redis://172.16.31.10:6391", "redis://172.16.31.10:6392",
"redis://172.16.31.10:6390", "redis://172.16.31.10:6391", "redis://172.16.31.10:6392"})
.setTimeout(10000)
.setConnectTimeout(20000)
.setPassword(Constants.REDIS_CLUSTER_PWD);
return Redisson.create(config);
}
public static boolean acquire() throws IOException {
String luaScript = FileUtils.readFileToString(new File("D:\\workspace\\opensource\\java-demo\\main-demo\\src\\main\\java\\com\\example\\concurrent\\limit\\distribute\\limit.lua"), StandardCharsets.UTF_8);
// 注意:要把limit.lua拷贝到build/classes对应的包路径下,否则找不到文件
// 如:main-demo/build/classes/java/main/com/example/concurrent/limit/distribute/limit.lua
// 或者修改构建脚本,自动拷贝
/*ClassPathResource resource = new ClassPathResource("/com/example/concurrent/limit/distribute/limit.lua");
ResourceScriptSource source = new ResourceScriptSource(resource);
String luaScript = source.getScriptAsString();*/
// 时间戳取秒数
String key = "ip:" + System.currentTimeMillis() / 1000;
String limit = "5";
Long result = redissonClient.getScript(StringCodec.INSTANCE).eval(RScript.Mode.READ_WRITE, luaScript, RScript.ReturnType.INTEGER,
Lists.newArrayList(key), new Object[]{limit});
return result == 1;
}
public static void main(String[] args) throws IOException, InterruptedException {
final int THREAD_NUM = 10;
CountDownLatch latch = new CountDownLatch(THREAD_NUM);
for (int i = 0; i < THREAD_NUM; i++) {
final int index = i;
new Thread(() -> {
try {
if (!acquire()) {
System.out.println("refused: " + index);
} else {
System.out.println("processed: " + index);
}
} catch (IOException e) {
e.printStackTrace();
}
latch.countDown();
}).start();
}
latch.await();
redissonClient.shutdown();
}
}
|
//---------------------------------------------------------------------------------------------
// assumption is this is mutually exclusive per FlowIndex
static void FlowInsert(u32 CPUID, FlowIndex_t* FlowIndex, FlowRecord_t* FlowPkt, u32* SHA1, u32 Length, u64 TS)
{
FlowRecord_t* F = FlowAdd(FlowIndex, FlowPkt, SHA1);
assert(F != NULL);
F->TotalPkt += 1;
F->TotalByte += Length;
F->FirstTS = (F->FirstTS == 0) ? TS : F->FirstTS;
F->LastTS = TS;
if (F->IPProto == IPv4_PROTO_TCP)
{
TCPHeader_t* TCP = &FlowPkt->TCPHeader;
u16 TCPFlags = swap16(TCP->Flags);
F->TCPFINCnt += (TCP_FLAG_FIN(TCPFlags) != 0);
F->TCPSYNCnt += (TCP_FLAG_SYN(TCPFlags) != 0);
F->TCPRSTCnt += (TCP_FLAG_RST(TCPFlags) != 0);
F->TCPPSHCnt += (TCP_FLAG_PSH(TCPFlags) != 0);
F->TCPACKCnt += (TCP_FLAG_ACK(TCPFlags) != 0);
2018/12/04: SACK traffic messes this up
/*
if (TCP_FLAG_ACK(TCPFlags))
{
u32 TCPAckNo = swap32(TCP->AckNo);
if ((FlowPkt->TCPLength == 0) && (F->TCPAckNo == TCPAckNo))
{
if its not a SACK
if (!FlowPkt->TCPIsSACK)
{
F->TCPSACKCnt += 1;
}
else
{
F->TCPACKDupCnt += 1;
}
}
F->TCPAckNo = TCPAckNo;
}
*/
first packet
u32 TCPWindow = swap16(TCP->Window);
if (F->TotalPkt == 1)
{
F->TCPWindowMin = TCPWindow;
F->TCPWindowMax = TCPWindow;
}
F->TCPWindowMin = min32(F->TCPWindowMin, TCPWindow);
F->TCPWindowMax = max32(F->TCPWindowMax, TCPWindow);
}
} |
// SetWill sets the Will into the packet if the topic is non-nil, or unsets it if nil.
func (p *ConnectPacket) SetWill(properties Properties, topic, message []byte) {
p.ConnectHeader.SetWill(topic != nil)
p.ConnectPayload.WillProperties = properties
p.ConnectPayload.WillTopic = topic
p.ConnectPayload.WillMessage = message
} |
package gotpl
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"regexp"
"strings"
"text/template"
"github.com/kenshaw/snaker"
xo "github.com/xo/xo/types"
)
// Funcs is a set of template funcs.
type Funcs struct {
driver string
schema string
nth func(int) string
first *bool
pkg string
tags []string
imports []string
conflict string
custom string
escSchema bool
escTable bool
escColumn bool
fieldtag *template.Template
context string
inject string
// knownTypes is the collection of known Go types.
knownTypes map[string]bool
// shorts is the collection of Go style short names for types, mainly
// used for use with declaring a func receiver on a type.
shorts map[string]string
}
// NewFuncs returns a set of template funcs.
func NewFuncs(ctx context.Context, knownTypes map[string]bool, shorts map[string]string, first *bool) (*Funcs, error) {
// force not first
if NotFirst(ctx) {
b := false
first = &b
}
// parse field tag template
fieldtag, err := template.New("fieldtag").Parse(FieldTag(ctx))
if err != nil {
return nil, err
}
// load inject
inject := Inject(ctx)
if s := InjectFile(ctx); s != "" {
buf, err := ioutil.ReadFile(s)
if err != nil {
return nil, fmt.Errorf("unable to read file: %v", err)
}
inject = string(buf)
}
driver, schema, nthParam := xo.DriverSchemaNthParam(ctx)
return &Funcs{
driver: driver,
schema: schema,
nth: nthParam,
first: first,
pkg: Pkg(ctx),
tags: Tags(ctx),
imports: Imports(ctx),
conflict: Conflict(ctx),
custom: Custom(ctx),
escSchema: Esc(ctx, "schema"),
escTable: Esc(ctx, "table"),
escColumn: Esc(ctx, "column"),
fieldtag: fieldtag,
context: Context(ctx),
inject: inject,
knownTypes: knownTypes,
shorts: shorts,
}, nil
}
// AddKnownType adds a known type.
func (f *Funcs) AddKnownType(name string) {
f.knownTypes[name] = true
}
// FuncMap returns the func map.
func (f *Funcs) FuncMap() template.FuncMap {
return template.FuncMap{
// general
"driver": f.driverfn,
"schema": f.schemafn,
"first": f.firstfn,
"pkg": f.pkgfn,
"tags": f.tagsfn,
"imports": f.importsfn,
"inject": f.injectfn,
"eval": f.eval,
// context
"context": f.contextfn,
"context_both": f.context_both,
"context_disable": f.context_disable,
// func and query
"func_name_context": f.func_name_context,
"func_name": f.func_name_none,
"func_context": f.func_context,
"func": f.func_none,
"recv_context": f.recv_context,
"recv": f.recv_none,
"foreign_key_context": f.foreign_key_context,
"foreign_key": f.foreign_key_none,
"db": f.db,
"db_prefix": f.db_prefix,
"db_update": f.db_update,
"db_named": f.db_named,
"named": f.named,
"logf": f.logf,
"logf_pkeys": f.logf_pkeys,
"logf_update": f.logf_update,
// type
"names": f.names,
"names_all": f.names_all,
"names_ignore": f.names_ignore,
"params": f.params,
"zero": f.zero,
"type": f.typefn,
"field": f.field,
"short": f.short,
"check_name": f.checkName,
// sqlstr funcs
"querystr": f.querystr,
"sqlstr": f.sqlstr,
/*
"nthparam": f.nthparam,
// general
"colcount": f.colcount,
"colname": f.colname,
"colnames": f.colnames,
"colnamesmulti": f.colnamesmulti,
"colnamesquery": f.colnamesquery,
"colnamesquerymulti": f.colnamesquerymulti,
"colprefixnames": f.colprefixnames,
"colvals": f.colvals,
"colvalsmulti": f.colvalsmulti,
"fieldnames": f.fieldnames,
"fieldnamesmulti": f.fieldnamesmulti,
"startcount": f.startcount,
"hascolumn": f.hascolumn,
"hasfield": f.hasfield,
"paramlist": f.paramlist,
"retype": f.retype,
*/
}
}
// driverfn returns true if the driver is any of the passed drivers.
func (f *Funcs) driverfn(drivers ...string) bool {
for _, driver := range drivers {
if f.driver == driver {
return true
}
}
return false
}
// schemafn takes a series of names and joins them with the schema name.
func (f *Funcs) schemafn(names ...string) string {
s := f.schema
// escape table names
if f.escTable {
for i, name := range names {
names[i] = f.escfn(name)
}
}
n := strings.Join(names, ".")
switch {
case s == "" && n == "":
return ""
case f.driver == "sqlite3" && n == "":
return f.schema
case f.driver == "sqlite3":
return n
case s != "" && n != "":
if f.escSchema {
s = f.escfn(s)
}
s += "."
}
return s + n
}
// firstfn returns true if it is the template was the first template generated.
func (f *Funcs) firstfn() bool {
b := *f.first
*f.first = false
return b
}
// pkgfn returns the package name.
func (f *Funcs) pkgfn() string {
return f.pkg
}
// tagsfn returns the tags.
func (f *Funcs) tagsfn() []string {
return f.tags
}
// importsfn returns the imports.
func (f *Funcs) importsfn() []PackageImport {
var imports []PackageImport
for _, s := range f.imports {
alias, pkg := "", s
if i := strings.Index(pkg, " "); i != -1 {
alias, pkg = pkg[:i], strings.TrimSpace(pkg[i:])
}
imports = append(imports, PackageImport{
Alias: alias,
Pkg: pkg,
})
}
return imports
}
// contextfn returns true when the context mode is both or only.
func (f *Funcs) contextfn() bool {
return f.context == "both" || f.context == "only"
}
// context_both returns true with the context mode is both.
func (f *Funcs) context_both() bool {
return f.context == "both"
}
// context_disable returns true with the context mode is both.
func (f *Funcs) context_disable() bool {
return f.context == "disable"
}
// injectfn returns the injected content provided from args.
func (f *Funcs) injectfn() string {
return f.inject
}
// eval evalutates a template s against v.
func (f *Funcs) eval(v interface{}, s string) (string, error) {
tpl, err := template.New(fmt.Sprintf("[EVAL %q]", s)).Parse(s)
if err != nil {
return "", err
}
buf := new(bytes.Buffer)
if err := tpl.Execute(buf, v); err != nil {
return "", err
}
return buf.String(), nil
}
// func_name_none builds a func name.
func (f *Funcs) func_name_none(v interface{}) string {
switch x := v.(type) {
case string:
return x
case Query:
return x.Name
case Table:
return x.GoName
case ForeignKey:
return x.GoName
case Proc:
n := x.GoName
if x.Overloaded {
n = x.OverloadedName
}
return n
case Index:
return x.FuncName
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
}
// func_name_context generates a name for the func.
func (f *Funcs) func_name_context(v interface{}) string {
var name string
switch x := v.(type) {
case string:
return nameContext(f.context_both(), x)
case Query:
name = nameContext(f.context_both(), x.Name)
case Table:
name = nameContext(f.context_both(), x.GoName)
case ForeignKey:
name = nameContext(f.context_both(), x.GoName)
case Proc:
n := x.GoName
if x.Overloaded {
n = x.OverloadedName
}
name = nameContext(f.context_both(), n)
case Index:
name = nameContext(f.context_both(), x.FuncName)
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
return name
}
// funcfn builds a func definition.
func (f *Funcs) funcfn(name string, context bool, v interface{}) string {
var p, r []string
if context {
p = append(p, "ctx context.Context")
}
p = append(p, "db DB")
switch x := v.(type) {
case Query:
// params
for _, z := range x.Params {
p = append(p, fmt.Sprintf("%s %s", z.Name, z.Type))
}
// returns
switch {
case x.Exec:
r = append(r, "sql.Result")
case x.Flat:
for _, z := range x.Type.Fields {
r = append(r, f.typefn(z.Type))
}
case x.One:
r = append(r, "*"+x.Type.GoName)
default:
r = append(r, "[]*"+x.Type.GoName)
}
case Proc:
// params
p = append(p, f.params(x.Params, true))
// returns
if !x.Void {
for _, ret := range x.Returns {
r = append(r, f.typefn(ret.Type))
}
}
case Index:
// params
p = append(p, f.params(x.Fields, true))
// returns
rt := "*" + x.Table.GoName
if !x.IsUnique {
rt = "[]" + rt
}
r = append(r, rt)
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
r = append(r, "error")
return fmt.Sprintf("func %s(%s) (%s)", name, strings.Join(p, ", "), strings.Join(r, ", "))
}
// func_context generates a func signature for v with context determined by the
// context mode.
func (f *Funcs) func_context(v interface{}) string {
return f.funcfn(f.func_name_context(v), f.contextfn(), v)
}
// func_none genarates a func signature for v without context.
func (f *Funcs) func_none(v interface{}) string {
return f.funcfn(f.func_name_none(v), false, v)
}
// recv builds a receiver func definition.
func (f *Funcs) recv(name string, context bool, typ interface{}, v interface{}) string {
short := f.short(typ)
var tableName string
switch x := typ.(type) {
case Table:
tableName = x.GoName
default:
return fmt.Sprintf("[[ UNSUPPORTED RECEIVER TYPE: %T ]]", typ)
}
var p, r []string
// determine params and return type
if context {
p = append(p, "ctx context.Context")
}
p = append(p, "db DB")
switch x := v.(type) {
case ForeignKey:
r = append(r, "*"+x.RefTable)
}
r = append(r, "error")
return fmt.Sprintf("func (%s *%s) %s(%s) (%s)", short, tableName, name, strings.Join(p, ", "), strings.Join(r, ", "))
}
// recv_context builds a receiver func definition with context determined by
// the context mode.
func (f *Funcs) recv_context(typ interface{}, v interface{}) string {
return f.recv(f.func_name_context(v), f.contextfn(), typ, v)
}
// recv_none builds a receiver func definition without context.
func (f *Funcs) recv_none(typ interface{}, v interface{}) string {
return f.recv(f.func_name_none(v), false, typ, v)
}
func (f *Funcs) foreign_key_context(v interface{}) string {
var name string
var p []string
if f.contextfn() {
p = append(p, "ctx")
}
switch x := v.(type) {
case ForeignKey:
name = x.RefFuncName
if f.context_both() {
name += "Context"
}
// add params
p = append(p, "db", f.convertTypes(x))
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE %T ]]", v)
}
return fmt.Sprintf("%s(%s)", name, strings.Join(p, ", "))
}
func (f *Funcs) foreign_key_none(v interface{}) string {
var name string
var p []string
switch x := v.(type) {
case ForeignKey:
name = x.RefFuncName
p = append(p, "context.Background()", "db", f.convertTypes(x))
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE %T ]]", v)
}
return fmt.Sprintf("%s(%s)", name, strings.Join(p, ", "))
}
// db generates a db.<name>Context(ctx, sqlstr, ...)
func (f *Funcs) db(name string, v ...interface{}) string {
// params
var p []interface{}
if f.contextfn() {
name, p = name+"Context", append(p, "ctx")
}
p = append(p, "sqlstr")
return fmt.Sprintf("db.%s(%s)", name, f.names("", append(p, v...)...))
}
// db_prefix generates a db.<name>Context(ctx, sqlstr, <prefix>.param, ...).
//
// Will skip the specific parameters based on the type provided.
func (f *Funcs) db_prefix(name string, skip bool, vs ...interface{}) string {
var prefix string
var params []interface{}
for i, v := range vs {
var ignore []string
switch x := v.(type) {
case string:
params = append(params, x)
case Table:
prefix = f.short(x.GoName) + "."
// skip primary keys
if skip {
for _, field := range x.Fields {
if field.IsSequence {
ignore = append(ignore, field.GoName)
}
}
}
p := f.names_ignore(prefix, v, ignore...)
// p is "" when no columns are present except for primary key
// params
if p != "" {
params = append(params, p)
}
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE %d: %T ]]", i, v)
}
}
return f.db(name, params...)
}
// db_update generates a db.<name>Context(ctx, sqlstr, regularparams,
// primaryparams)
func (f *Funcs) db_update(name string, v interface{}) string {
var ignore, p []string
switch x := v.(type) {
case Table:
prefix := f.short(x.GoName) + "."
for _, pk := range x.PrimaryKeys {
ignore = append(ignore, pk.GoName)
}
p = append(p, f.names_ignore(prefix, x, ignore...), f.names(prefix, x.PrimaryKeys))
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
return f.db(name, strings.Join(p, ", "))
}
// db_named generates a db.<name>Context(ctx, sql.Named(name, res)...)
func (f *Funcs) db_named(name string, v interface{}) string {
var p []string
switch x := v.(type) {
case Proc:
for _, z := range x.Params {
p = append(p, f.named(z.SQLName, z.GoName, false))
}
for _, z := range x.Returns {
p = append(p, f.named(z.SQLName, "&"+z.GoName, true))
}
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
return f.db(name, strings.Join(p, ", "))
}
func (f *Funcs) named(name, value string, out bool) string {
if out {
return fmt.Sprintf("sql.Named(%q, sql.Out{Dest: %s})", name, value)
}
return fmt.Sprintf("sql.Named(%q, %s)", name, value)
}
func (f *Funcs) logf_pkeys(v interface{}) string {
p := []string{"sqlstr"}
switch x := v.(type) {
case Table:
p = append(p, f.names(f.short(x.GoName)+".", x.PrimaryKeys))
}
return fmt.Sprintf("logf(%s)", strings.Join(p, ", "))
}
func (f *Funcs) logf(v interface{}, ignore ...interface{}) string {
var ignoreNames []string
p := []string{"sqlstr"}
// build ignore list
for i, x := range ignore {
switch z := x.(type) {
case string:
ignoreNames = append(ignoreNames, z)
case Field:
ignoreNames = append(ignoreNames, z.GoName)
case []Field:
for _, f := range z {
ignoreNames = append(ignoreNames, f.GoName)
}
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE %d: %T ]]", i, x)
}
}
// add fields
switch x := v.(type) {
case Table:
p = append(p, f.names_ignore(f.short(x.GoName)+".", x, ignoreNames...))
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
return fmt.Sprintf("logf(%s)", strings.Join(p, ", "))
}
func (f *Funcs) logf_update(v interface{}) string {
var ignore []string
p := []string{"sqlstr"}
switch x := v.(type) {
case Table:
prefix := f.short(x.GoName) + "."
for _, pk := range x.PrimaryKeys {
ignore = append(ignore, pk.GoName)
}
p = append(p, f.names_ignore(prefix, x, ignore...), f.names(prefix, x.PrimaryKeys))
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)
}
return fmt.Sprintf("logf(%s)", strings.Join(p, ", "))
}
// names generates a list of names.
func (f *Funcs) namesfn(all bool, prefix string, z ...interface{}) string {
var names []string
for i, v := range z {
switch x := v.(type) {
case string:
names = append(names, x)
case Query:
for _, p := range x.Params {
if !all && p.Interpolate {
continue
}
names = append(names, prefix+p.Name)
}
case Table:
for _, p := range x.Fields {
names = append(names, prefix+f.checkName(p.GoName))
}
case []Field:
for _, p := range x {
names = append(names, prefix+f.checkName(p.GoName))
}
case Proc:
if params := f.params(x.Params, false); params != "" {
names = append(names, params)
}
case Index:
names = append(names, f.params(x.Fields, false))
default:
names = append(names, fmt.Sprintf("/* UNSUPPORTED TYPE %d %T */", i, v))
}
}
return strings.Join(names, ", ")
}
// names generates a list of names (excluding certain ones such as interpolated
// names).
func (f *Funcs) names(prefix string, z ...interface{}) string {
return f.namesfn(false, prefix, z...)
}
// names_all generates a list of all names.
func (f *Funcs) names_all(prefix string, z ...interface{}) string {
return f.namesfn(true, prefix, z...)
}
// names_all generates a list of all names, ignoring fields that match the value in ignore.
func (f *Funcs) names_ignore(prefix string, v interface{}, ignore ...string) string {
m := map[string]bool{}
for _, n := range ignore {
m[n] = true
}
var vals []Field
switch x := v.(type) {
case Table:
for _, p := range x.Fields {
if m[p.GoName] {
continue
}
vals = append(vals, p)
}
case []Field:
for _, p := range x {
if m[p.GoName] {
continue
}
vals = append(vals, p)
}
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE %T ]]", v)
}
return f.namesfn(true, prefix, vals)
}
// querystr generates a querystr for the specified query and any accompanying
// comments.
func (f *Funcs) querystr(v interface{}) string {
var interpolate bool
var query, comments []string
switch x := v.(type) {
case Query:
interpolate, query, comments = x.Interpolate, x.Query, x.Comments
default:
return fmt.Sprintf("const sqlstr = [[ NOT IMPLEMENTED FOR %T ]]", v)
}
typ := "const"
if interpolate {
typ = "var"
}
var lines []string
for i := 0; i < len(query); i++ {
line := "`" + query[i] + "`"
if i != len(query)-1 {
line += " + "
}
if s := strings.TrimSpace(comments[i]); s != "" {
line += "// " + s
}
lines = append(lines, line)
}
sqlstr := stripRE.ReplaceAllString(strings.Join(lines, "\n"), " ")
return fmt.Sprintf("%s sqlstr = %s", typ, sqlstr)
}
var stripRE = regexp.MustCompile(`\s+\+\s+` + "``")
func (f *Funcs) sqlstr(typ string, v interface{}) string {
var lines []string
switch typ {
case "insert_manual":
lines = f.sqlstr_insert_manual(v)
case "insert":
lines = f.sqlstr_insert(v)
case "update":
lines = f.sqlstr_update(v)
case "upsert":
lines = f.sqlstr_upsert(v)
case "delete":
lines = f.sqlstr_delete(v)
case "proc":
lines = f.sqlstr_proc(v)
case "index":
lines = f.sqlstr_index(v)
default:
return fmt.Sprintf("const sqlstr = `UNKNOWN QUERY TYPE: %s`", typ)
}
return fmt.Sprintf("const sqlstr = `%s`", strings.Join(lines, "` +\n\t`"))
}
// sqlstr_insert_manual builds an INSERT query
// If not all, sequence columns are skipped.
func (f *Funcs) sqlstr_insert_base(all bool, v interface{}) []string {
switch x := v.(type) {
case Table:
// build names and values
var fields, vals []string
var i int
for _, z := range x.Fields {
if z.IsSequence && !all {
continue
}
fields, vals = append(fields, f.colname(z)), append(vals, f.nth(i))
i++
}
return []string{
"INSERT INTO " + f.schemafn(x.SQLName) + " (",
strings.Join(fields, ", "),
") VALUES (",
strings.Join(vals, ", "),
")",
}
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_insert_manual builds an INSERT query that inserts all fields.
func (f *Funcs) sqlstr_insert_manual(v interface{}) []string {
return f.sqlstr_insert_base(true, v)
}
// sqlstr_insert builds an INSERT query, skipping the sequence field with
// applicable RETURNING clause for generated primary key fields.
func (f *Funcs) sqlstr_insert(v interface{}) []string {
switch x := v.(type) {
case Table:
var seq Field
for _, field := range x.Fields {
if field.IsSequence {
seq = field
}
}
lines := f.sqlstr_insert_base(false, v)
// add return clause
switch f.driver {
case "oracle":
lines[len(lines)-1] += ` RETURNING ` + f.colname(seq) + ` /*LASTINSERTID*/ INTO :pk`
case "postgres":
lines[len(lines)-1] += ` RETURNING ` + f.colname(seq)
case "sqlserver":
lines[len(lines)-1] += "; SELECT ID = CONVERT(BIGINT, SCOPE_IDENTITY())"
}
return lines
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_update_base builds an UPDATE query, using primary key fields as the WHERE
// clause, adding prefix.
//
// When prefix is empty, the WHERE clause will be in the form of name = $1.
// When prefix is non-empty, the WHERE clause will be in the form of name = <PREFIX>name.
//
// Similarly, when prefix is empty, the table's name is added after UPDATE,
// otherwise it is omitted.
func (f *Funcs) sqlstr_update_base(prefix string, v interface{}) (int, []string) {
switch x := v.(type) {
case Table:
// build names and values
var i int
var list []string
for _, z := range x.Fields {
if z.IsPrimary {
continue
}
name, param := f.colname(z), f.nth(i)
if prefix != "" {
param = prefix + name
}
list = append(list, fmt.Sprintf("%s = %s", name, param))
i++
}
name := ""
if prefix == "" {
name = f.schemafn(x.SQLName) + " "
}
return i, []string{
"UPDATE " + name + "SET ",
strings.Join(list, ", ") + " ",
}
}
return 0, []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_update builds an UPDATE query, using primary key fields as the WHERE
// clause.
func (f *Funcs) sqlstr_update(v interface{}) []string {
// build pkey vals
switch x := v.(type) {
case Table:
var list []string
n, lines := f.sqlstr_update_base("", v)
for i, z := range x.PrimaryKeys {
list = append(list, fmt.Sprintf("%s = %s", f.colname(z), f.nth(n+i)))
}
return append(lines, "WHERE "+strings.Join(list, ", "))
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
func (f *Funcs) sqlstr_upsert(v interface{}) []string {
switch v.(type) {
case Table:
// build insert
lines := f.sqlstr_insert_base(true, v)
switch f.driver {
case "postgres", "sqlite3":
return append(lines, f.sqlstr_upsert_postgres_sqlite(v)...)
case "mysql":
return append(lines, f.sqlstr_upsert_mysql(v)...)
case "sqlserver", "oracle":
return f.sqlstr_upsert_sqlserver_oracle(v)
}
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_upsert_postgres_sqlite builds an uspert query for postgres and sqlite
//
// INSERT (..) VALUES (..) ON CONFLICT DO UPDATE SET ...
func (f *Funcs) sqlstr_upsert_postgres_sqlite(v interface{}) []string {
switch x := v.(type) {
case Table:
// add conflict and update
var conflicts []string
for _, f := range x.PrimaryKeys {
conflicts = append(conflicts, f.SQLName)
}
lines := []string{` ON CONFLICT (` + strings.Join(conflicts, ", ") + `) DO `}
_, update := f.sqlstr_update_base("EXCLUDED.", v)
return append(lines, update...)
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_upsert_mysql builds an uspert query for mysql
//
// INSERT (..) VALUES (..) ON DUPLICATE KEY UPDATE SET ...
func (f *Funcs) sqlstr_upsert_mysql(v interface{}) []string {
switch x := v.(type) {
case Table:
lines := []string{` ON DUPLICATE KEY UPDATE `}
var list []string
i := len(x.Fields)
for _, z := range x.Fields {
if z.IsSequence {
continue
}
name := f.colname(z)
list = append(list, fmt.Sprintf("%s = VALUES(%s)", name, name))
i++
}
return append(lines, strings.Join(list, ", "))
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_upsert_sqlserver_oracle builds an upsert query for sqlserver
//
// MERGE [table] AS target USING (SELECT [pkeys]) AS source ...
func (f *Funcs) sqlstr_upsert_sqlserver_oracle(v interface{}) []string {
switch x := v.(type) {
case Table:
var lines []string
// merge [table]...
switch f.driver {
case "sqlserver":
lines = []string{`MERGE ` + f.schemafn(x.SQLName) + ` AS t `}
case "oracle":
lines = []string{`MERGE ` + f.schemafn(x.SQLName) + `t `}
}
// using (select ..)
var fields, predicate []string
for i, field := range x.Fields {
fields = append(fields, fmt.Sprintf("%s %s", f.nth(i), field.SQLName))
}
for _, field := range x.PrimaryKeys {
predicate = append(predicate, fmt.Sprintf("s.%s = t.%s", field.SQLName, field.SQLName))
}
// closing part for select
var closing string
switch f.driver {
case "sqlserver":
closing = `) AS s `
case "oracle":
closing = `FROM DUAL ) s `
}
lines = append(lines, `USING (`,
`SELECT `+strings.Join(fields, ", ")+" ",
closing,
`ON `+strings.Join(predicate, " AND ")+" ")
// build param lists
var updateParams, insertParams, insertVals []string
for _, field := range x.Fields {
// sequences are always managed by db
if field.IsSequence {
continue
}
// primary keys
if !field.IsPrimary {
updateParams = append(updateParams, fmt.Sprintf("t.%s = s.%s", field.SQLName, field.SQLName))
}
insertParams = append(insertParams, field.SQLName)
insertVals = append(insertVals, "s."+field.SQLName)
}
// when matched then update...
lines = append(lines,
`WHEN MATCHED THEN `, `UPDATE SET `,
strings.Join(updateParams, ", ")+" ",
`WHEN NOT MATCHED THEN `,
`INSERT (`,
strings.Join(insertParams, ", "),
`) VALUES (`,
strings.Join(insertVals, ", "),
`);`,
)
return lines
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_delete builds a DELETE query for the primary keys.
func (f *Funcs) sqlstr_delete(v interface{}) []string {
switch x := v.(type) {
case Table:
// names and values
var list []string
for i, z := range x.PrimaryKeys {
list = append(list, fmt.Sprintf("%s = %s", f.colname(z), f.nth(i)))
}
return []string{
"DELETE FROM " + f.schemafn(x.SQLName) + " ",
"WHERE " + strings.Join(list, " AND "),
}
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_index builds a
func (f *Funcs) sqlstr_index(v interface{}) []string {
switch x := v.(type) {
case Index:
// build table fieldnames
var fields []string
for _, z := range x.Table.Fields {
fields = append(fields, f.colname(z))
}
// index fields
var list []string
for i, z := range x.Fields {
list = append(list, fmt.Sprintf("%s = %s", f.colname(z), f.nth(i)))
}
return []string{
"SELECT ",
strings.Join(fields, ", ") + " ",
"FROM " + f.schemafn(x.Table.SQLName) + " ",
"WHERE " + strings.Join(list, " AND "),
}
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// sqlstr_proc builds a stored procedure call.
func (f *Funcs) sqlstr_proc(v interface{}) []string {
switch x := v.(type) {
case Proc:
if x.Type == "function" {
return f.sqlstr_func(v)
}
// sql string format
var format string
switch f.driver {
case "postgres", "mysql":
format = "CALL %s(%s)"
case "sqlserver":
format = "%[1]s"
case "oracle":
format = "BEGIN %s(%s); END;"
}
// build params list; add return fields for orcle
l := x.Params
if f.driver == "oracle" {
l = append(l, x.Returns...)
}
var list []string
for i, field := range l {
s := f.nth(i)
if f.driver == "oracle" {
s = ":" + field.SQLName
}
list = append(list, s)
}
// dont prefix with schema for oracle
name := f.schemafn(x.SQLName)
if f.driver == "oracle" {
name = x.SQLName
}
return []string{
fmt.Sprintf(format, name, strings.Join(list, ", ")),
}
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
func (f *Funcs) sqlstr_func(v interface{}) []string {
switch x := v.(type) {
case Proc:
var format string
switch f.driver {
case "postgres":
format = "SELECT * FROM %s(%s)"
case "mysql":
format = "SELECT %s(%s)"
case "sqlserver":
format = "SELECT %s(%s) AS OUT"
case "oracle":
format = "SELECT %s(%s) FROM dual"
}
var list []string
l := x.Params
for i := range l {
list = append(list, f.nth(i))
}
return []string{
fmt.Sprintf(format, f.schemafn(x.SQLName), strings.Join(list, ", ")),
}
}
return []string{fmt.Sprintf("[[ UNSUPPORTED TYPE: %T ]]", v)}
}
// convertTypes generates the conversions to convert the foreign key field
// types to their respective referenced field types.
func (f *Funcs) convertTypes(fkey ForeignKey) string {
var p []string
for i := range fkey.Fields {
field := fkey.Fields[i]
refField := fkey.RefFields[i]
expr := f.short(fkey.Table) + "." + field.GoName
// types match, can match
if field.Type == refField.Type {
p = append(p, expr)
continue
}
// convert types
typ, refType := field.Type, refField.Type
if strings.HasPrefix(typ, "sql.Null") {
expr = expr + "." + typ[8:]
typ = strings.ToLower(typ[8:])
}
if strings.ToLower(refType) != typ {
expr = refType + "(" + expr + ")"
}
p = append(p, expr)
}
return strings.Join(p, ", ")
}
// params converts a list of fields into their named Go parameters, skipping
// any Field with Name contained in ignore. addType will cause the go Type to
// be added after each variable name. addPrefix will cause the returned string
// to be prefixed with ", " if the generated string is not empty.
//
// Any field name encountered will be checked against goReservedNames, and will
// have its name substituted by its corresponding looked up value.
//
// Used to present a comma separated list of Go variable names for use with as
// either a Go func parameter list, or in a call to another Go func.
// (ie, ", a, b, c, ..." or ", a T1, b T2, c T3, ...").
func (f *Funcs) params(fields []Field, addType bool) string {
var vals []string
for _, field := range fields {
vals = append(vals, f.param(field, addType))
}
return strings.Join(vals, ", ")
}
func (f *Funcs) param(field Field, addType bool) string {
n := strings.Split(snaker.CamelToSnake(field.GoName), "_")
s := strings.ToLower(n[0]) + field.GoName[len(n[0]):]
// check go reserved names
if r, ok := goReservedNames[strings.ToLower(s)]; ok {
s = r
}
// add the go type
if addType {
s += " " + f.typefn(field.Type)
}
// add to vals
return s
}
// zero generates a zero list.
func (f *Funcs) zero(z ...interface{}) string {
var zeroes []string
for i, v := range z {
switch x := v.(type) {
case string:
zeroes = append(zeroes, x)
case Table:
for _, p := range x.Fields {
zeroes = append(zeroes, p.Zero)
}
case []Field:
for _, p := range x {
zeroes = append(zeroes, p.Zero)
}
default:
zeroes = append(zeroes, fmt.Sprintf("/* UNSUPPORTED TYPE %d %T */", i, v))
}
}
return strings.Join(zeroes, ", ")
}
// typefn generates the Go type, prefixing the custom package name if applicable.
func (f *Funcs) typefn(typ string) string {
if strings.Contains(typ, ".") {
return typ
}
prefix := ""
for strings.HasPrefix(typ, "[]") {
typ = typ[2:]
prefix = prefix + "[]"
}
if _, ok := f.knownTypes[typ]; !ok {
pkg := f.custom
if pkg != "" {
pkg = pkg + "."
}
return prefix + pkg + typ
}
return prefix + typ
}
// field generates a field definition for a struct.
func (f *Funcs) field(field Field) (string, error) {
tag := ""
buf := new(bytes.Buffer)
if err := f.fieldtag.Funcs(f.FuncMap()).Execute(buf, field); err != nil {
return "", err
}
if s := buf.String(); s != "" {
tag = " " + s
}
s := fmt.Sprintf("%s %s%s %s", field.GoName, field.Type, tag, "// "+field.SQLName)
return s, nil
}
// short generates a safe Go identifier for typ. typ is first checked
// against shorts, and if not found, then the value is calculated and
// stored in the shorts for future use.
//
// A short is the concatenation of the lowercase of the first character in
// the words comprising the name. For example, "MyCustomName" will have have
// the short of "mcn".
//
// If a generated short conflicts with a Go reserved name or a name used in
// the templates, then the corresponding value in goReservedNames map will be
// used.
//
// Generated shorts that have conflicts with any scopeConflicts member will
// have nameConflictSuffix appended.
func (f *Funcs) short(v interface{}) string {
var n string
switch x := v.(type) {
case string:
n = x
case Table:
n = x.GoName
default:
return fmt.Sprintf("[[ UNSUPPORTED TYPE %T ]]", v)
}
// check short name map
name, ok := f.shorts[n]
if !ok {
// calc the short name
var u []string
for _, s := range strings.Split(strings.ToLower(snaker.CamelToSnake(n)), "_") {
if len(s) > 0 && s != "id" {
u = append(u, s[:1])
}
}
// ensure no name conflict
name = f.checkName(strings.Join(u, ""))
// store back to short name map
f.shorts[n] = name
}
// append suffix if conflict exists
if _, ok := templateReservedNames[name]; ok {
name += f.conflict
}
return name
}
func (f *Funcs) checkName(name string) string {
if n, ok := goReservedNames[name]; ok {
return n
}
return name
}
// colname returns the ColumnName of a field escaped if needed.
func (f *Funcs) colname(z Field) string {
if f.escColumn {
return f.escfn(z.SQLName)
}
return z.SQLName
}
// escfn escapes s.
func (f *Funcs) escfn(s string) string {
return `"` + s + `"`
}
// PackageImport holds information about a Go package import.
type PackageImport struct {
Alias string
Pkg string
}
// String satisfies the fmt.Stringer interface.
func (v PackageImport) String() string {
if v.Alias != "" {
return fmt.Sprintf("%s %q", v.Alias, v.Pkg)
}
return fmt.Sprintf("%q", v.Pkg)
}
// templateReservedNames are the template reserved names.
var templateReservedNames = map[string]bool{
// variables
"ctx": true,
"db": true,
"err": true,
"log": true,
"logf": true,
"res": true,
"rows": true,
// packages
"context": true,
"csv": true,
"driver": true,
"errors": true,
"fmt": true,
"hstore": true,
"regexp": true,
"sql": true,
"strings": true,
"time": true,
"uuid": true,
}
// goReservedNames is a map of of go reserved names to "safe" names.
var goReservedNames = map[string]string{
"break": "brk",
"case": "cs",
"chan": "chn",
"const": "cnst",
"continue": "cnt",
"default": "def",
"defer": "dfr",
"else": "els",
"fallthrough": "flthrough",
"for": "fr",
"func": "fn",
"go": "goVal",
"goto": "gt",
"if": "ifVal",
"import": "imp",
"interface": "iface",
"map": "mp",
"package": "pkg",
"range": "rnge",
"return": "ret",
"select": "slct",
"struct": "strct",
"switch": "swtch",
"type": "typ",
"var": "vr",
// go types
"error": "e",
"bool": "b",
"string": "str",
"byte": "byt",
"rune": "r",
"uintptr": "uptr",
"int": "i",
"int8": "i8",
"int16": "i16",
"int32": "i32",
"int64": "i64",
"uint": "u",
"uint8": "u8",
"uint16": "u16",
"uint32": "u32",
"uint64": "u64",
"float32": "z",
"float64": "f",
"complex64": "c",
"complex128": "c128",
}
// nameContext adds suffix Context to name.
func nameContext(context bool, name string) string {
if context {
return name + "Context"
}
return name
}
|
package action
import (
"context"
"errors"
"fmt"
"github.com/sirupsen/logrus"
"github.com/operator-framework/operator-registry/internal/declcfg"
"github.com/operator-framework/operator-registry/internal/model"
"github.com/operator-framework/operator-registry/pkg/image"
)
type Diff struct {
Registry image.Registry
OldRefs []string
NewRefs []string
Logger *logrus.Entry
}
func (a Diff) Run(ctx context.Context) (*declcfg.DeclarativeConfig, error) {
if err := a.validate(); err != nil {
return nil, err
}
// Disallow bundle refs.
mask := RefDCDir | RefDCImage | RefSqliteFile | RefSqliteImage
// Heads-only mode does not require an old ref, so there may be nothing to render.
var oldModel model.Model
if len(a.OldRefs) != 0 {
oldRender := Render{Refs: a.OldRefs, Registry: a.Registry, AllowedRefMask: mask}
oldCfg, err := oldRender.Run(ctx)
if err != nil {
if errors.Is(err, ErrNotAllowed) {
return nil, fmt.Errorf("%w (diff does not permit direct bundle references)", err)
}
return nil, fmt.Errorf("error rendering old refs: %v", err)
}
oldModel, err = declcfg.ConvertToModel(*oldCfg)
if err != nil {
return nil, fmt.Errorf("error converting old declarative config to model: %v", err)
}
}
newRender := Render{Refs: a.NewRefs, Registry: a.Registry, AllowedRefMask: mask}
newCfg, err := newRender.Run(ctx)
if err != nil {
if errors.Is(err, ErrNotAllowed) {
return nil, fmt.Errorf("%w (diff does not permit direct bundle references)", err)
}
return nil, fmt.Errorf("error rendering new refs: %v", err)
}
newModel, err := declcfg.ConvertToModel(*newCfg)
if err != nil {
return nil, fmt.Errorf("error converting new declarative config to model: %v", err)
}
diffModel, err := declcfg.Diff(oldModel, newModel)
if err != nil {
return nil, fmt.Errorf("error generating diff: %v", err)
}
cfg := declcfg.ConvertFromModel(diffModel)
return &cfg, nil
}
func (p Diff) validate() error {
if len(p.NewRefs) == 0 {
return fmt.Errorf("no new refs to diff")
}
return nil
}
|
/**
* Add user
*
* @param user the user
* @return always 1
* @throws JsonProcessingException the json processing exception
*/
public int addUser(User user) throws JsonProcessingException {
DirectoryInfo dirInfo = new DirectoryInfo();
Long newDirId = redisTemplate.opsForValue().increment("dircount",1);
dirInfo.setId(String.valueOf(newDirId));
dirInfo.setName(AuthConstants.DEFAULT_ROOT_DIR_NAME);
dirInfo.setCreateTime(String.valueOf(System.currentTimeMillis()));
Map<String, String> dirMap = new HashMap<>();
dirMap.put("id", dirInfo.getId());
dirMap.put("name", dirInfo.getName());
dirMap.put("createTime", dirInfo.getCreateTime());
redisTemplate.opsForHash().putAll("dirs:"+newDirId, dirMap);
Node node = new Node();
node.setDir(true);
node.setValue(String.valueOf(newDirId));
redisTemplate.opsForValue().set("nodes:"+user.getId(), new ObjectMapper().writeValueAsString(node));
Map<String, String> userMap = new HashMap<>();
userMap.put("id", user.getId());
userMap.put("password", user.getPassword());
userMap.put("createTime", user.getCreateTime());
redisTemplate.opsForHash().putAll("users:"+user.getId(), userMap);
return 1;
} |
/* glonass ephemeris to satellite clock bias -----------------------------------
* compute satellite clock bias with glonass ephemeris
* args : gtime_t time I time by satellite clock (gpst)
* geph_t *geph I glonass ephemeris
* return : satellite clock bias (s)
* notes : see ref [2]
*-----------------------------------------------------------------------------*/
extern double geph2clk(gtime_t time, const geph_t *geph)
{
double t;
int i;
trace(4,"geph2clk: time=%s sat=%2d\n",time_str(time,3),geph->sat);
t=timediff(time,geph->toe);
for (i=0;i<2;i++) {
t-=-geph->taun+geph->gamn*t;
}
return -geph->taun+geph->gamn*t;
} |
def load_weights(path, actor, encoder=None, planner=None, cnn=None,gripper_cnn=None, step=""):
if 'checkpoint' in os.listdir(path):
if gripper_cnn is not None and cnn is not None:
ckpt = tf.train.Checkpoint(step=tf.Variable(1), actor=actor, encoder=encoder, planner=planner, cnn=cnn, gripper_cnn=gripper_cnn)
elif cnn is not None:
ckpt = tf.train.Checkpoint(step=tf.Variable(1), actor=actor, encoder=encoder, planner=planner, cnn=cnn)
else:
ckpt = tf.train.Checkpoint(step=tf.Variable(1), actor=actor, encoder=encoder, planner=planner)
ckpt.restore(tf.train.latest_checkpoint(path)).expect_partial()
print('Checkpoint restored')
else:
actor.load_weights(f'{path}/model' + step + '.h5')
if planner is not None: planner.load_weights(f'{path}/planner' + step + '.h5')
if encoder is not None: encoder.load_weights(f'{path}/encoder' + step + '.h5')
if cnn is not None: cnn.load_weights(f'{path}/cnn'+step+'.h5') |
from libpython.spam.db import Conexao
from libpython.spam.models import Usuario
def test_salvar_usuario():
conexao = Conexao()
sessao = conexao.gerar_sessao()
usuario = Usuario(nome ='Eskurinho Dev')
sessao.salvar(usuario)
assert isinstance(usuario.id, int)
sessao.roll_back()
sessao.fechar()
conexao.fechar()
def test_listar_usuarios():
conexao = Conexao()
sessao = conexao.gerar_sessao()
usuarios = [Usuario(nome ='Eskurinho Dev'), Usuario(nome ='<NAME>')]
for usuario in usuarios:
sessao.salvar(usuario)
assert usuarios == sessao.listar()
sessao.roll_back()
sessao.fechar()
conexao.fechar()
|
use super::{
acc_value::{cal_acc_pk, AccValue},
keys::AccPublicKey,
poly::{poly_a, poly_b, poly_variable_minus_one, Poly, Variable, R, S},
set::Set,
};
use anyhow::{ensure, Context as _, Result};
use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve};
use ark_ff::{PrimeField, Zero};
use core::marker::PhantomData;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone)]
struct SendPtr<T>(*mut T);
unsafe impl<T> Send for SendPtr<T> {}
/// Set operation
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum Op {
Intersection,
Union,
Difference,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
struct IntersectionProof<E: PairingEngine> {
#[serde(with = "super::serde_impl")]
g_x: E::G1Affine,
#[serde(with = "super::serde_impl")]
g_x_beta: E::G1Affine,
#[serde(with = "super::serde_impl")]
q_x_y: E::G1Affine,
#[serde(with = "super::serde_impl")]
q_x_y_delta: E::G1Affine,
#[serde(with = "super::serde_impl")]
l_x: E::G1Affine,
#[serde(bound = "E: PairingEngine")]
_marker: PhantomData<E>,
}
impl<E: PairingEngine> IntersectionProof<E> {
#[allow(clippy::many_single_char_names)]
#[allow(clippy::too_many_arguments)]
fn new(
set: &Set,
q_poly: &Poly<E::Fr>,
x: Variable,
y: Variable,
g: E::G1Affine,
get_g_x_i: impl Fn(u64) -> E::G1Affine + Sync + Send,
get_g_beta_x_i: impl Fn(u64) -> E::G1Affine + Sync + Send,
get_g_x_i_y_j: impl Fn(u64, u64) -> E::G1Affine + Sync + Send,
get_g_delta_x_i_y_j: impl Fn(u64, u64) -> E::G1Affine + Sync + Send,
) -> Self {
let (g_x, g_x_beta) = rayon::join(
|| cal_acc_pk(set, &get_g_x_i),
|| cal_acc_pk(set, &get_g_beta_x_i),
);
let l_x = cal_acc_pk(set, |i| if i == 1 { g } else { get_g_x_i(i - 1) });
let q_poly_num_terms = q_poly.num_terms();
let mut bases: Vec<E::G1Affine> = Vec::with_capacity(q_poly_num_terms);
let mut delta_bases: Vec<E::G1Affine> = Vec::with_capacity(q_poly_num_terms);
let mut scalars: Vec<<E::Fr as PrimeField>::BigInt> = Vec::with_capacity(q_poly_num_terms);
let bases_ptr = bases.as_mut_ptr();
let delta_bases_ptr = delta_bases.as_mut_ptr();
let scalars_ptr = scalars.as_mut_ptr();
q_poly.coeff_par_iter_with_index().for_each_with(
(
SendPtr(bases_ptr),
SendPtr(delta_bases_ptr),
SendPtr(scalars_ptr),
),
|(bases_ptr, delta_bases_ptr, scalars_ptr), (idx, (term, coeff))| {
let i = term.get_power(x);
let j = term.get_power(y);
unsafe {
*bases_ptr.0.add(idx) = get_g_x_i_y_j(i, j);
*delta_bases_ptr.0.add(idx) = get_g_delta_x_i_y_j(i, j);
*scalars_ptr.0.add(idx) = coeff.into_repr();
}
},
);
unsafe {
bases.set_len(q_poly_num_terms);
delta_bases.set_len(q_poly_num_terms);
scalars.set_len(q_poly_num_terms);
}
let (q_x_y, q_x_y_delta) = rayon::join(
|| VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine(),
|| VariableBaseMSM::multi_scalar_mul(&delta_bases[..], &scalars[..]).into_affine(),
);
Self {
g_x,
g_x_beta,
q_x_y,
q_x_y_delta,
l_x,
_marker: PhantomData,
}
}
#[allow(clippy::too_many_arguments)]
fn verify(
&self,
lhs_acc: E::G1Affine,
rhs_acc: E::G2Affine,
h: E::G2Affine,
h_y_q: E::G2Affine,
h_beta: E::G2Affine,
h_delta: E::G2Affine,
h_x: E::G2Affine,
) -> Result<()> {
ensure!(
E::pairing(lhs_acc, rhs_acc)
== E::product_of_pairings(&[
(self.g_x.into(), h_y_q.into()),
(self.q_x_y.into(), h.into())
]),
"e(A, B) != e(I, h^{y^q}) * e(Q_{x,y}, h)"
);
ensure!(
E::pairing(self.g_x, h_beta) == E::pairing(self.g_x_beta, h),
"e(I, h^{beta}) != e(I_{beta}, h)"
);
ensure!(
E::pairing(self.q_x_y, h_delta) == E::pairing(self.q_x_y_delta, h),
"e(Q_{x,y}, h^{delta}) != e(Q_{x,y,delta}, h)"
);
ensure!(
E::pairing(self.g_x, h) == E::pairing(self.l_x, h_x),
"e(I, h) != e(L, h^x)"
);
Ok(())
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct IntermediateProof<E: PairingEngine> {
op: Op,
#[serde(bound = "E: PairingEngine")]
inner_proof_r: IntersectionProof<E>,
#[serde(bound = "E: PairingEngine")]
inner_proof_s: IntersectionProof<E>,
#[serde(with = "super::serde_impl")]
result_acc_s_r_gamma: E::G1Affine,
#[serde(with = "super::serde_impl")]
result_acc_r_s_gamma: E::G1Affine,
#[serde(with = "super::serde_impl")]
z_s_r: E::G1Affine,
#[serde(with = "super::serde_impl")]
z_r_s: E::G1Affine,
}
impl<E: PairingEngine> IntermediateProof<E> {
pub fn verify(
&self,
lhs_acc: &AccValue<E>,
rhs_acc: &AccValue<E>,
result_acc: &AccValue<E>,
pk: &AccPublicKey<E>,
) -> Result<()> {
let (verify_inner_proof_r, verify_inner_proof_s) = rayon::join(
|| {
self.inner_proof_r.verify(
lhs_acc.g_s,
rhs_acc.h_r_s,
pk.h,
pk.h_s_q,
pk.h_beta,
pk.h_delta,
pk.h_r,
)
},
|| {
self.inner_proof_s.verify(
lhs_acc.g_r,
rhs_acc.h_s_r,
pk.h,
pk.h_r_q,
pk.h_beta,
pk.h_delta,
pk.h_s,
)
},
);
verify_inner_proof_r.context("failed to verify the inner_proof_r.")?;
verify_inner_proof_s.context("failed to verify the inner_proof_s.")?;
match self.op {
Op::Intersection => {
ensure!(
result_acc.g_s == self.inner_proof_s.g_x,
"acc(set).g_s is invalid."
);
ensure!(
result_acc.g_r == self.inner_proof_r.g_x,
"acc(set).g_r is invalid."
);
}
Op::Union => {
ensure!(
result_acc.g_s == lhs_acc.g_s + rhs_acc.g_s + (-self.inner_proof_s.g_x),
"acc(set).g_s is invalid."
);
ensure!(
result_acc.g_r == lhs_acc.g_r + rhs_acc.g_r + (-self.inner_proof_r.g_x),
"acc(set).g_r is invalid."
);
}
Op::Difference => {
ensure!(
result_acc.g_s == lhs_acc.g_s + (-self.inner_proof_s.g_x),
"acc(set).g_s is invalid."
);
ensure!(
result_acc.g_r == lhs_acc.g_r + (-self.inner_proof_r.g_x),
"acc(set).g_r is invalid."
);
}
}
ensure!(
E::pairing(pk.g_gamma, result_acc.h_r_s) == E::pairing(self.result_acc_r_s_gamma, pk.h),
"e(g^{gamma}, R_{r,s}) != e(R_{r,s,gamma}, h)"
);
ensure!(
E::pairing(pk.g_gamma, result_acc.h_s_r) == E::pairing(self.result_acc_s_r_gamma, pk.h),
"e(g^{gamma}, R_{s,r}) != e(R_{s,r,gamma}, h)"
);
ensure!(
E::product_of_pairings(&[
(result_acc.g_r.into(), pk.h.into()),
(pk.g.into(), (-result_acc.h_r_s).into())
]) == E::pairing(self.z_s_r, pk.h_s + (-pk.h)),
"e(R_{r}, h) * e(g, 1/R_{r,s}) != e(Z_{s,r}, h^{s-1})"
);
ensure!(
E::product_of_pairings(&[
(result_acc.g_s.into(), pk.h.into()),
(pk.g.into(), (-result_acc.h_s_r).into())
]) == E::pairing(self.z_r_s, pk.h_r + (-pk.h)),
"e(R_{s}, h) * e(g, 1/R_{s,r}) != e(Z_{r,s}, h^{r-1})"
);
Ok(())
}
}
pub fn compute_set_operation_intermediate<E: PairingEngine>(
op: Op,
lhs_set: &Set,
lhs_acc: &AccValue<E>,
rhs_set: &Set,
rhs_acc: &AccValue<E>,
pk: &AccPublicKey<E>,
) -> (Set, AccValue<E>, IntermediateProof<E>) {
let intersection_set = lhs_set & rhs_set;
let lhs_poly: Poly<E::Fr> = poly_a(lhs_set, S);
let rhs_poly: Poly<E::Fr> = poly_b(rhs_set, R, S, pk.q);
let mut q_poly = &lhs_poly * &rhs_poly;
q_poly.remove_intersected_term(S, pk.q, &intersection_set);
let (inner_proof_r, inner_proof_s) = rayon::join(
|| {
IntersectionProof::<E>::new(
&intersection_set,
&q_poly,
R,
S,
pk.g,
|i| pk.get_g_r_i(i),
|i| pk.get_g_beta_r_i(i),
|i, j| pk.get_g_r_i_s_j(i, j),
|i, j| pk.get_g_delta_r_i_s_j(i, j),
)
},
|| {
IntersectionProof::<E>::new(
&intersection_set,
&q_poly,
S,
R,
pk.g,
|i| pk.get_g_s_i(i),
|i| pk.get_g_beta_s_i(i),
|i, j| pk.get_g_r_i_s_j(i, j),
|i, j| pk.get_g_delta_r_i_s_j(i, j),
)
},
);
let result_set = match op {
Op::Intersection => intersection_set,
Op::Union => lhs_set | rhs_set,
Op::Difference => lhs_set / &intersection_set,
};
let result_acc = match op {
Op::Intersection => AccValue::<E>::new(
inner_proof_s.g_x,
inner_proof_r.g_x,
cal_acc_pk(&result_set, |i| pk.get_h_s_r_i(i)),
cal_acc_pk(&result_set, |i| pk.get_h_r_s_i(i)),
),
Op::Union => AccValue::<E>::new(
lhs_acc.g_s + rhs_acc.g_s + (-inner_proof_s.g_x),
lhs_acc.g_r + rhs_acc.g_r + (-inner_proof_r.g_x),
cal_acc_pk(&result_set, |i| pk.get_h_s_r_i(i)),
cal_acc_pk(&result_set, |i| pk.get_h_r_s_i(i)),
),
Op::Difference => AccValue::<E>::new(
lhs_acc.g_s + (-inner_proof_s.g_x),
lhs_acc.g_r + (-inner_proof_r.g_x),
cal_acc_pk(&result_set, |i| pk.get_h_s_r_i(i)),
cal_acc_pk(&result_set, |i| pk.get_h_r_s_i(i)),
),
};
let (result_acc_s_r_gamma, result_acc_r_s_gamma) = rayon::join(
|| cal_acc_pk(&result_set, |i| pk.get_g_gamma_s_r_i(i)),
|| cal_acc_pk(&result_set, |i| pk.get_g_gamma_r_s_i(i)),
);
let result_y_poly = poly_a::<E::Fr>(&result_set, R);
let result_x_y_poly = poly_b::<E::Fr>(&result_set, R, S, pk.q);
let (z_poly, r_poly) = (result_y_poly - result_x_y_poly) / &poly_variable_minus_one::<E::Fr>(S);
debug_assert!(r_poly.is_zero());
let z_poly_num_terms = z_poly.num_terms();
let mut z_s_r_bases: Vec<E::G1Affine> = Vec::with_capacity(z_poly_num_terms);
let mut z_r_s_bases: Vec<E::G1Affine> = Vec::with_capacity(z_poly_num_terms);
let mut scalars: Vec<<E::Fr as PrimeField>::BigInt> = Vec::with_capacity(z_poly_num_terms);
let z_s_r_bases_ptr = z_s_r_bases.as_mut_ptr();
let z_r_s_bases_ptr = z_r_s_bases.as_mut_ptr();
let scalars_ptr = scalars.as_mut_ptr();
z_poly.coeff_par_iter_with_index().for_each_with(
(
SendPtr(z_s_r_bases_ptr),
SendPtr(z_r_s_bases_ptr),
SendPtr(scalars_ptr),
),
|(z_s_r_bases_ptr, z_r_s_bases_ptr, scalars_ptr), (idx, (term, coeff))| {
let i = term.get_power(R);
let j = term.get_power(S);
let (z_s_r_base, z_r_s_base) = match (i, j) {
(0, 0) => (pk.g, pk.g),
(0, _) => (pk.get_g_s_i(j), pk.get_g_r_i(j)),
(_, 0) => (pk.get_g_r_i(i), pk.get_g_s_i(i)),
(_, _) => (pk.get_g_r_i_s_j(i, j), pk.get_g_r_i_s_j(j, i)),
};
unsafe {
*z_s_r_bases_ptr.0.add(idx) = z_s_r_base;
*z_r_s_bases_ptr.0.add(idx) = z_r_s_base;
*scalars_ptr.0.add(idx) = coeff.into_repr();
}
},
);
unsafe {
z_s_r_bases.set_len(z_poly_num_terms);
z_r_s_bases.set_len(z_poly_num_terms);
scalars.set_len(z_poly_num_terms);
}
let (z_s_r, z_r_s) = rayon::join(
|| VariableBaseMSM::multi_scalar_mul(&z_s_r_bases[..], &scalars[..]).into_affine(),
|| VariableBaseMSM::multi_scalar_mul(&z_r_s_bases[..], &scalars[..]).into_affine(),
);
let proof = IntermediateProof {
op,
inner_proof_r,
inner_proof_s,
result_acc_s_r_gamma,
result_acc_r_s_gamma,
z_s_r,
z_r_s,
};
(result_set, result_acc, proof)
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct FinalProof<E: PairingEngine> {
op: Op,
#[serde(bound = "E: PairingEngine")]
inner_proof: IntersectionProof<E>,
}
impl<E: PairingEngine> FinalProof<E> {
pub fn verify(
&self,
lhs_acc: &AccValue<E>,
rhs_acc: &AccValue<E>,
result_set: &Set,
pk: &AccPublicKey<E>,
) -> Result<()> {
self.inner_proof
.verify(
lhs_acc.g_s,
rhs_acc.h_r_s,
pk.h,
pk.h_s_q,
pk.h_beta,
pk.h_delta,
pk.h_r,
)
.context("failed to verify the inner_proof.")?;
let result_acc = match self.op {
Op::Intersection => self.inner_proof.g_x,
Op::Union => lhs_acc.g_r + rhs_acc.g_r + (-self.inner_proof.g_x),
Op::Difference => lhs_acc.g_r + (-self.inner_proof.g_x),
};
let expect_acc = cal_acc_pk(result_set, |i| pk.get_g_r_i(i));
ensure!(result_acc == expect_acc, "acc(set) is invalid.");
Ok(())
}
}
pub fn compute_set_operation_final<E: PairingEngine>(
op: Op,
lhs_set: &Set,
rhs_set: &Set,
pk: &AccPublicKey<E>,
) -> (Set, FinalProof<E>) {
let intersection_set = lhs_set & rhs_set;
let lhs_poly: Poly<E::Fr> = poly_a(lhs_set, S);
let rhs_poly: Poly<E::Fr> = poly_b(rhs_set, R, S, pk.q);
let mut q_poly = &lhs_poly * &rhs_poly;
q_poly.remove_intersected_term(S, pk.q, &intersection_set);
let inner_proof = IntersectionProof::new(
&intersection_set,
&q_poly,
R,
S,
pk.g,
|i| pk.get_g_r_i(i),
|i| pk.get_g_beta_r_i(i),
|i, j| pk.get_g_r_i_s_j(i, j),
|i, j| pk.get_g_delta_r_i_s_j(i, j),
);
let proof = FinalProof { op, inner_proof };
let result = match op {
Op::Intersection => intersection_set,
Op::Union => lhs_set | rhs_set,
Op::Difference => lhs_set / &intersection_set,
};
(result, proof)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{acc::keys::AccSecretKey, set};
use ark_bn254::{Bn254, Fr};
#[test]
fn test_intersection_proof() {
let mut rng = rand::thread_rng();
let q = 10;
let sk = AccSecretKey::<Bn254>::rand(&mut rng).into();
let pk = AccPublicKey::<Bn254>::gen_key(&sk, q);
let s1 = set! {1, 2, 3};
let s2 = set! {1, 5};
let s3 = set! {1};
let s1_a_poly: Poly<Fr> = poly_a(&s1, S);
let s2_b_poly: Poly<Fr> = poly_b(&s2, R, S, q);
let mut q_poly = &s1_a_poly * &s2_b_poly;
q_poly.remove_intersected_term(S, pk.q, &s3);
let s1_acc = AccValue::from_set_sk(&s1, &sk, q);
let s2_acc = AccValue::from_set_sk(&s2, &sk, q);
let proof = IntersectionProof::<Bn254>::new(
&s3,
&q_poly,
R,
S,
pk.g,
|i| pk.get_g_r_i(i),
|i| pk.get_g_beta_r_i(i),
|i, j| pk.get_g_r_i_s_j(i, j),
|i, j| pk.get_g_delta_r_i_s_j(i, j),
);
proof
.verify(
s1_acc.g_s,
s2_acc.h_r_s,
pk.h,
pk.h_s_q,
pk.h_beta,
pk.h_delta,
pk.h_r,
)
.unwrap();
let bin = bincode::serialize(&proof).unwrap();
assert_eq!(
bincode::deserialize::<IntersectionProof<_>>(&bin[..]).unwrap(),
proof
);
}
#[test]
fn test_intermediate_proof() {
let mut rng = rand::thread_rng();
let q = 10;
let sk = AccSecretKey::<Bn254>::rand(&mut rng).into();
let pk = AccPublicKey::<Bn254>::gen_key(&sk, q);
let s1 = set! {1, 2, 3};
let s2 = set! {1, 5};
let s1_acc = AccValue::from_set_sk(&s1, &sk, q);
let s2_acc = AccValue::from_set_sk(&s2, &sk, q);
let intersection_acc = AccValue::from_set_sk(&set! {1}, &sk, q);
let union_acc = AccValue::from_set_sk(&set! {1, 2, 3, 5}, &sk, q);
let _difference_acc = AccValue::from_set_sk(&set! {2, 3}, &sk, q);
let (intersection_result_set, intersection_result_acc, intersection_proof) =
compute_set_operation_intermediate::<Bn254>(
Op::Intersection,
&s1,
&s1_acc,
&s2,
&s2_acc,
&pk,
);
assert_eq!(intersection_result_set, set! {1});
assert_eq!(intersection_result_acc, intersection_acc);
intersection_proof
.verify(&s1_acc, &s2_acc, &intersection_result_acc, &pk)
.unwrap();
let bin = bincode::serialize(&intersection_proof).unwrap();
assert_eq!(
bincode::deserialize::<IntermediateProof<_>>(&bin[..]).unwrap(),
intersection_proof
);
let (union_result_set, union_result_acc, union_proof) =
compute_set_operation_intermediate::<Bn254>(Op::Union, &s1, &s1_acc, &s2, &s2_acc, &pk);
assert_eq!(union_result_set, set! {1, 2, 3, 5});
assert_eq!(union_result_acc, union_acc);
union_proof
.verify(&s1_acc, &s2_acc, &union_result_acc, &pk)
.unwrap();
let bin = bincode::serialize(&union_proof).unwrap();
assert_eq!(
bincode::deserialize::<IntermediateProof<_>>(&bin[..]).unwrap(),
union_proof
);
let (diff_result_set, diff_result_acc, diff_proof) =
compute_set_operation_intermediate::<Bn254>(
Op::Difference,
&s1,
&s1_acc,
&s2,
&s2_acc,
&pk,
);
assert_eq!(diff_result_set, set! {2, 3});
assert_eq!(diff_result_acc, diff_result_acc);
diff_proof
.verify(&s1_acc, &s2_acc, &diff_result_acc, &pk)
.unwrap();
let bin = bincode::serialize(&diff_proof).unwrap();
assert_eq!(
bincode::deserialize::<IntermediateProof<_>>(&bin[..]).unwrap(),
diff_proof
);
}
#[test]
fn test_final_proof() {
let mut rng = rand::thread_rng();
let q = 10;
let sk = AccSecretKey::<Bn254>::rand(&mut rng).into();
let pk = AccPublicKey::<Bn254>::gen_key(&sk, q);
let s1 = set! {1, 2, 3};
let s2 = set! {1, 5};
let s1_acc = AccValue::from_set_sk(&s1, &sk, q);
let s2_acc = AccValue::from_set_sk(&s2, &sk, q);
let (intersection_result, intersection_proof) =
compute_set_operation_final::<Bn254>(Op::Intersection, &s1, &s2, &pk);
assert_eq!(intersection_result, set! {1});
intersection_proof
.verify(&s1_acc, &s2_acc, &intersection_result, &pk)
.unwrap();
let bin = bincode::serialize(&intersection_proof).unwrap();
assert_eq!(
bincode::deserialize::<FinalProof<_>>(&bin[..]).unwrap(),
intersection_proof
);
let (union_result, union_proof) =
compute_set_operation_final::<Bn254>(Op::Union, &s1, &s2, &pk);
assert_eq!(union_result, set! {1, 2, 3, 5});
union_proof
.verify(&s1_acc, &s2_acc, &union_result, &pk)
.unwrap();
let bin = bincode::serialize(&union_proof).unwrap();
assert_eq!(
bincode::deserialize::<FinalProof<_>>(&bin[..]).unwrap(),
union_proof
);
let (diff_result, diff_proof) =
compute_set_operation_final::<Bn254>(Op::Difference, &s1, &s2, &pk);
assert_eq!(diff_result, set! {2, 3});
diff_proof
.verify(&s1_acc, &s2_acc, &diff_result, &pk)
.unwrap();
let bin = bincode::serialize(&diff_proof).unwrap();
assert_eq!(
bincode::deserialize::<FinalProof<_>>(&bin[..]).unwrap(),
diff_proof
);
}
}
|
<gh_stars>0
//
// // Author: <NAME> <<EMAIL>>
// // Challenge solved: https://www.hackerrank.com/challenges/crush
// // Compiler: C++14
//
#include <iostream>
#include <algorithm>
#include <map>
using namespace std;
class Delta{
public:
Delta(): start(0),value(0){}
Delta(long s,long v): start(s),value(v){}
void Add(Delta& d)
{
start = d.start;
value += d.value;
}
void Sum(long& sumVal){
sumVal += value;
}
long start;
long value;
};
typedef std::map<unsigned long,Delta> DeltaMap;
int main() {
DeltaMap m;
unsigned long N,M;
cin >> N >> M;
while(M--)
{
// build the deltas on a map
unsigned long a,b,k;
cin >> a >> b >> k;
Delta dPos(a-1,k);
Delta dNeg(b,b == N ? 0:-k);
m[a-1].Add(dPos);
m[b].Add(dNeg);
}
// find the max from the running sum
long maxVal = 0;
long runningSum = 0;
std::for_each(m.begin(),m.end(),[&](std::pair<const unsigned long,Delta>& a){
a.second.Sum(runningSum);
if (maxVal < runningSum)
{
maxVal = runningSum;
}
});
cout << maxVal;
return 0;
}
|
import { AsyncSpecification } from '../index';
export class NumberIsFortyTwo extends AsyncSpecification<number> {
async isSatisfiedBy(n: number) {
return n === 42;
}
}
|
// String prints the Post meta content and body.
func (p *PostFormatter) String() string {
t := "Title: " + p.Title + "\n"
a := "Author: " + p.Author + "\n"
g := "Tag: " + p.Tag + "\n"
d := "Date: " + p.Date() + "\n"
return t + a + g + d + "\n" + string(p.Body)
} |
Seeds of change. Five plants that transformed mankind
interested in the first chapter, which gives a brief overview of Soviet psychiatry up to recent times. After fighting through the endless verbiage, one might draw the conclusion that the only scientific work of any consequence done in psychiatry was before 1917. The single major exception would be A. R. Luria, who is completely ignored in the book, except for one appearance of his name in a list of Soviet scientists; this extraordinary treatment, which is presumably because he was Jewish, is in contrast to the repeated mentions and extended space given to such mediocre apparatchiks is Snezhnevsky. Medical hegemonism seems to be triumphant, since even nursing and occupational therapy only appear in a medical context, and other professions are not even mentioned. It is not surprising to find that the political misuse of psychiatry in the USSR is not referred to, but the text is regularly interrupted with allegations such as that ECT "has become all but a repressive measure applied to even healthy people" in America. There are some short accounts of important pre-revolutionary figures like Bechterev, but the lack of any references or supporting details make these of very limited use. One is told yet again that psychiatric day-hospitals began in Moscow in 1931-2, but this tantalizing morsel is not filled out with the kind of information that historians need. Altogether, a worthless piece of propaganda and a sadly missed opportunity. Too many people with too little knowledge have clambered aboard the "green medicine" bandwagon in recent years, so that any further book with a title even vaguely suggestive ofthat is liable to be approached with some suspicion. Certainly, it was with some scepticism, even a sinking heart, that this reviewer turned to the opening pages of a volume handicapped by the revelation on the dust-jacket that the author is a one-time Daily Express journalist (a former "William Hickey" no less, according to the publicity handout). That initially unfavourable reaction was very quickly dispelled. This is a thoughtful, thought-provoking, extremely readable work based on a wide and careful sifting of the secondary literature (the notes, placed at the end of each chapter, are alone more fascinating than many other authors' texts). All that is lacking is a little spicing of wit. The five crop plants discussed are quinine, sugar, tea, cotton, and the potato. Though only one of these is a medicine, three of the others have … |
def num_pairs(group_size):
return (group_size*(group_size-1)) / 2
if __name__ == '__main__':
n, m = [int(num) for num in raw_input().split()]
max_team_size = n - m + 1
max_num_friends = num_pairs(max_team_size)
# Up until doing this problem, I had a single view of n % m, which said,
# "repeatedly add m until you get to n and the remainder is n % m".
#
# This problem demands an additional understanding of m % n, which can be
# expressed as the following. Take the following example of 8 % 3. For this
# problem, we are interested in splitting up 8 people into three groups. If
# we could have partial people, then we would have the following:
#
# 8 / 3
#
# 2.67 2.67 2.67
#
# The amount of decimal defines how much modulus there is. We can rewrite
# the amount above as:
#
# 2.67 2.67 2.67 = 2 + 2 + 2 + (.67 + .67 + .67)
# = 2 + 2 + 2 + 2
#
# It is not coincidence that the decimal places added up to 2 and that 8 % 3
# = 2. If n / m exactly, then the decimal places sum is zero and n % m is
# zero. If n % m is large, then the decimal place will be comparatively
# large. If n % m is small, then the decimal place will be comparatively
# small.
#
# In general for n / m, we have:
#
# n / m
#
# floor(n/m) floor(n/m) ... floor(n/m) (m times) + n%m
#
# Now to be as balanced as possible, we need to distribute the remainder of
# n%m over the m floor(n/m) numbers. To do this as evenly as possible, we
# split it up into ones and distribute them over.
#
# min_groups, leftover = [n/m]*m, n%m
# for i in range(leftover):
# min_groups[i] += 1
# min_num_friends = sum(num_pairs(min_group) for min_group in min_groups)
min_num_friends = (n%m) * num_pairs(n/m+1) + (m - (n%m))*num_pairs(n/m)
print min_num_friends, max_num_friends
|
/**
* A List directly usable as a scriptable JavaScript object.
*/
public class ScriptableListWrapper implements List<Object>, Scriptable, Wrapper, Serializable {
private final List<Object> list;
private Scriptable prototype;
private Scriptable parent;
public ScriptableListWrapper(List<Object> list) {
this.list = list;
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#getClassName()
*/
public String getClassName() {
return "Object";
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#has(int,
* org.mozilla.javascript.Scriptable)
*/
public boolean has(int index, Scriptable start) {
// We catch the ArrayIndexOutOfBoundsException because the parser is
// probably trying to retrieve
// the value first(reading the statement left to right) then assign the
// value to that index...
// or something like that.
try {
return this.list.get(index) != null;
} catch (final ArrayIndexOutOfBoundsException e) {
return false;
}
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#get(int,
* org.mozilla.javascript.Scriptable)
*/
public Object get(int index, Scriptable start) {
return this.list.get(index);
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#put(int,
* org.mozilla.javascript.Scriptable, java.lang.Object)
*/
public void put(int index, Scriptable start, Object value) {
final int max = index + 1;
if (max > this.list.size()) {
for (int i = this.list.size(); i < index; i++) {
this.list.add(i, null);
}
this.list.add(index, value);
} else {
this.list.set(index, value);
}
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#delete(int)
*/
public void delete(int index) {
this.list.remove(index);
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#delete(java.lang.String)
*/
public void delete(String name) {
try {
final int i = Integer.valueOf(name);
this.list.remove(i);
} catch (final Exception ignore) {}
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#get(java.lang.String,
* org.mozilla.javascript.Scriptable)
*/
public Object get(String name, Scriptable start) {
if (name.equals("length")) {
return new Integer(this.list.size());
}
try {
final int i = Integer.valueOf(name);
return this.list.get(i);
} catch (final Exception ignore) {
return null;
}
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#getIds()
*/
public Object[] getIds() {
final int len = this.list.size();
final Integer[] ids = new Integer[len];
for (int x = 0; x < len; x++) {
ids[x] = new Integer(x);
}
return ids;
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#getDefaultValue(java.lang.Class)
*/
public Object getDefaultValue(Class hint) {
return this.toString();
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#has(java.lang.String,
* org.mozilla.javascript.Scriptable)
*/
public boolean has(String name, Scriptable start) {
return name.equals("length");
}
/*
* (non-Javadoc)
* @see
* org.mozilla.javascript.Scriptable#hasInstance(org.mozilla.javascript.
* Scriptable)
*/
public boolean hasInstance(Scriptable value) {
Scriptable proto = value.getPrototype();
while (proto != null) {
if (proto.equals(this)) {
return true;
}
proto = proto.getPrototype();
}
return false;
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#put(java.lang.String,
* org.mozilla.javascript.Scriptable, java.lang.Object)
*/
public void put(String name, Scriptable start, Object value) {
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#getPrototype()
*/
public Scriptable getPrototype() {
return this.prototype;
}
/*
* (non-Javadoc)
* @see
* org.mozilla.javascript.Scriptable#setPrototype(org.mozilla.javascript
* .Scriptable)
*/
public void setPrototype(Scriptable prototype) {
this.prototype = prototype;
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Scriptable#getParentScope()
*/
public Scriptable getParentScope() {
return this.parent;
}
/*
* (non-Javadoc)
* @see
* org.mozilla.javascript.Scriptable#setParentScope(org.mozilla.javascript
* .Scriptable)
*/
public void setParentScope(Scriptable parent) {
this.parent = parent;
}
/*
* (non-Javadoc)
* @see org.mozilla.javascript.Wrapper#unwrap()
*/
public Object unwrap() {
return this.list;
}
public boolean add(Object o) {
return this.list.add(o);
}
public void add(int index, Object element) {
this.list.add(index, element);
}
public boolean addAll(Collection<? extends Object> c) {
return this.list.addAll(c);
}
public boolean addAll(int index, Collection<? extends Object> c) {
return this.list.addAll(index, c);
}
public void clear() {
this.list.clear();
}
public boolean contains(Object o) {
return this.list.contains(o);
}
public boolean containsAll(Collection<?> c) {
return this.list.containsAll(c);
}
public Object get(int index) {
return this.list.get(index);
}
public int indexOf(Object o) {
return this.list.indexOf(o);
}
public boolean isEmpty() {
return this.list.isEmpty();
}
public Iterator<Object> iterator() {
return this.list.iterator();
}
public int lastIndexOf(Object o) {
return this.list.lastIndexOf(o);
}
public ListIterator<Object> listIterator() {
return this.list.listIterator();
}
public ListIterator<Object> listIterator(int index) {
return this.list.listIterator(index);
}
public boolean remove(Object o) {
return this.list.remove(o);
}
public Object remove(int index) {
return this.list.remove(index);
}
public boolean removeAll(Collection c) {
return this.list.removeAll(c);
}
public boolean retainAll(Collection c) {
return this.list.retainAll(c);
}
public Object set(int index, Object element) {
return this.list.set(index, element);
}
public int size() {
return this.list.size();
}
public List<Object> subList(int fromIndex, int toIndex) {
return this.list.subList(fromIndex, toIndex);
}
public Object[] toArray() {
return this.list.toArray();
}
public <T> T[] toArray(T[] a) {
return this.list.toArray(a);
}
@Override
public String toString() {
return this.list.toString();
}
} |
/**
\ingroup Commands
\brief Write gains and losses
*
*/
int cafe_cmd_gainloss(Globals& globals, std::vector<std::string> tokens)
{
pCafeParam param = &globals.param;
prereqs(param, REQUIRES_FAMILY | REQUIRES_TREE | REQUIRES_LAMBDA);
if (globals.viterbi->viterbiNodeFamilysizes.empty())
{
if (ConditionalDistribution::matrix.empty())
{
cafe_shell_set_lambdas(param, param->input.parameters);
reset_birthdeath_cache(param->pcafe, param->parameterized_k_value, ¶m->family_size);
ConditionalDistribution::reset(param->pcafe, ¶m->family_size, param->num_threads, globals.num_random_samples);
}
pArrayList cd = ConditionalDistribution::to_arraylist();
cafe_viterbi(globals, *globals.viterbi, &ConditionalDistribution::matrix);
arraylist_free(cd, NULL);
}
string name = tokens[1] + ".gs";
ofstream ofst(name.c_str());
pCafeTree pcafe = param->pcafe;
pCafeTree psum = cafe_tree_copy(pcafe);
clear_tree_viterbis(psum);
int totalsum = 0;
int fsize = param->pfamily->flist->size;
for (int i = 0; i < fsize; i++)
{
pCafeFamilyItem pitem = (pCafeFamilyItem)param->pfamily->flist->array[i];
cafe_family_set_size(param->pfamily, pitem, pcafe);
globals.viterbi->set_node_familysize(pcafe, pitem);
totalsum += write_family_gainloss(ofst, pitem->id, pcafe, psum);
}
ofst << "SUM\t" << totalsum << "\t";
pString pstr = phylogeny_string((pTree)psum, __cafe_tree_string_sum_gainloss);
ofst << pstr->buf << "\n";
string_free(pstr);
cafe_tree_free(psum);
return 0;
} |
/**
* For handling when a user taps on a menu item (top right)
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
super.onOptionsItemSelected(item);
int id = item.getItemId();
if (id == R.id.deployments) {
launchDeploymentsActivity();
return true;
} else if (id == R.id.osmdownloader) {
askIfDownloadOSM();
return true;
} else if (id == R.id.basemaps) {
basemap.presentBasemapsOptions();
return true;
} else if (id == R.id.osmcredentials) {
inputOSMCredentials();
return true;
} else if (id == R.id.osmXmlLayers) {
presentOSMOptions();
return true;
} else if (id == R.id.info) {
showInfo();
return true;
} else if (id == R.id.action_save_to_odk_collect) {
saveToODKCollectAndExit();
return true;
}
return false;
} |
<gh_stars>1-10
package bzh.strawberry.strawbattle.listeners.player;
import bzh.strawberry.strawbattle.StrawBattle;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerSwapHandItemsEvent;
/*
* This file PlayerSwapHandItems is part of a project StrawBattle.StrawBattle.
* It was created on 23/01/2021 18:53 by Eclixal.
* This file as the whole project shouldn't be modify by others without the express permission from StrawberryCorps author(s).
* Also this comment shouldn't get remove from the file. (see Licence)
*/
public class PlayerSwapHandItems implements Listener {
public PlayerSwapHandItems() {
StrawBattle.STRAW_BATTLE.getLogger().info("[LISTENER] Registered Listener : " + getClass().getName());
}
@EventHandler
public void onPlayerSwapHandItems(PlayerSwapHandItemsEvent event) {
event.setCancelled(true);
}
} |
// begin moving with mouse
class StartMovingCmd: public WindowHelperCmd {
public:
StartMovingCmd() { }
protected:
void real_execute();
} |
<filename>clients/frontend-authorization/src/oidc/browser/BrowserAuthorizationClient.ts<gh_stars>0
/*---------------------------------------------------------------------------------------------
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
* See LICENSE.md in the project root for license terms and full copyright notice.
*--------------------------------------------------------------------------------------------*/
/** @packageDocumentation
* @module BrowserAuthorization
*/
import { User, UserManager, UserManagerSettings } from "oidc-client";
import { assert, AuthStatus, BeEvent, BentleyError, ClientRequestContext, IDisposable, Logger } from "@bentley/bentleyjs-core";
import { AccessToken, ImsAuthorizationClient } from "@bentley/itwin-client";
import { FrontendAuthorizationClient } from "../../FrontendAuthorizationClient";
import { FrontendAuthorizationClientLoggerCategory } from "../../FrontendAuthorizationClientLoggerCategory";
import { BrowserAuthorizationBase } from "./BrowserAuthorizationBase";
import { BrowserAuthorizationClientRedirectState } from "./BrowserAuthorizationClientRedirectState";
/**
* @beta
*/
export interface BrowserAuthorizationClientConfiguration {
/** The URL of the OIDC/OAuth2 provider. If left undefined, the Bentley auth authority will be used by default. */
readonly authority?: string;
/** The unique client id registered through the issuing authority. Required to obtain authorization from the user. */
readonly clientId: string;
/**
* The URL passed in the authorization request, to which the authority will redirect the browser after the user grants/denies access
* The redirect URL must be registered against the clientId through the issuing authority to be considered valid.
*/
readonly redirectUri: string;
/**
* The URL passed in the signout request, to which the authority will redirect the browser after the user has been signed out.
* The signout URL must be registered against the clientId through the issuing authority to be considered valid.
*/
readonly postSignoutRedirectUri?: string;
/** A space-delimited collection of individual access claims specified by the authority. The user must consent to all specified scopes in order to grant authorization */
readonly scope: string;
/** The mechanism (or authentication flow) used to acquire auth information from the user through the authority */
readonly responseType?: "code" | "id_token" | "id_token token" | "code id_token" | "code token" | "code id_token token" | string;
}
/**
* @beta
*/
export class BrowserAuthorizationClient extends BrowserAuthorizationBase<BrowserAuthorizationClientConfiguration> implements FrontendAuthorizationClient, IDisposable {
public readonly onUserStateChanged = new BeEvent<(token: AccessToken | undefined) => void>();
protected _accessToken?: AccessToken;
public get isAuthorized(): boolean {
return this.hasSignedIn;
}
public get hasExpired(): boolean {
return !this._accessToken;
}
public get hasSignedIn(): boolean {
return !!this._accessToken;
}
public constructor(configuration: BrowserAuthorizationClientConfiguration) {
super(configuration);
}
protected async getUserManager(requestContext: ClientRequestContext): Promise<UserManager> {
if (this._userManager) {
return this._userManager;
}
const settings = await this.getUserManagerSettings(requestContext, this._basicSettings, this._advancedSettings);
this._userManager = this.createUserManager(settings);
return this._userManager;
}
/**
* Merges the basic and advanced settings into a single configuration object consumable by the internal userManager.
* @param requestContext
* @param basicSettings
* @param advancedSettings
*/
protected async getUserManagerSettings(requestContext: ClientRequestContext, basicSettings: BrowserAuthorizationClientConfiguration, advancedSettings?: UserManagerSettings): Promise<UserManagerSettings> {
let userManagerSettings: UserManagerSettings = {
authority: basicSettings.authority,
redirect_uri: basicSettings.redirectUri,
client_id: basicSettings.clientId,
scope: basicSettings.scope,
post_logout_redirect_uri: basicSettings.postSignoutRedirectUri,
response_type: basicSettings.responseType,
};
if (advancedSettings) {
userManagerSettings = Object.assign(userManagerSettings, advancedSettings);
}
if (!userManagerSettings.authority) {
const imsAuthorizationClient = new ImsAuthorizationClient();
const authority = await imsAuthorizationClient.getUrl(requestContext);
userManagerSettings.authority = authority;
}
return userManagerSettings;
}
/**
* Creates the internal user manager and binds all relevant events to their respective callback function.
* @param settings
*/
protected createUserManager(settings: UserManagerSettings): UserManager {
const userManager = new UserManager(settings);
userManager.events.addUserLoaded(this._onUserLoaded);
userManager.events.addUserUnloaded(this._onUserUnloaded);
userManager.events.addAccessTokenExpiring(this._onAccessTokenExpiring);
userManager.events.addAccessTokenExpired(this._onAccessTokenExpired);
userManager.events.addSilentRenewError(this._onSilentRenewError);
userManager.events.addUserSignedOut(this._onUserSignedOut);
return userManager;
}
/**
* Alias for signInRedirect needed to satisfy [[FrontendAuthorizationClient]]
* @param requestContext
*/
public async signIn(requestContext: ClientRequestContext): Promise<void> {
return this.signInRedirect(requestContext);
}
/**
* Attempts a sign-in via redirection with the authorization provider.
* If possible, a non-interactive signin will be attempted first.
* If successful, the returned promise will be resolved.
* Otherwise, an attempt to redirect the browser will proceed.
* If an error prevents the redirection from occurring, the returned promise will be rejected with the responsible error.
* Otherwise, the browser's window will be redirected away from the current page, effectively ending execution here.
*/
public async signInRedirect(requestContext: ClientRequestContext, successRedirectUrl?: string): Promise<void> {
requestContext.enter();
const user = await this.nonInteractiveSignIn(requestContext);
if (user) {
return;
}
const userManager = await this.getUserManager(requestContext);
const state: BrowserAuthorizationClientRedirectState = {
successRedirectUrl: successRedirectUrl || window.location.href,
};
await userManager.signinRedirect({
state,
}); // This call changes the window's URL, which effectively ends execution here unless an exception is thrown.
}
/**
* Attempts a sign-in via popup with the authorization provider
* @param requestContext
*/
public async signInPopup(requestContext: ClientRequestContext): Promise<void> {
requestContext.enter();
let user = await this.nonInteractiveSignIn(requestContext);
if (user) {
return;
}
const userManager = await this.getUserManager(requestContext);
user = await userManager.signinPopup();
assert(user && !user.expired, "Expected userManager.signinPopup to always resolve to an authorized user");
return;
}
/**
* Attempts a silent sign in with the authorization provider
* @throws [[Error]] If the silent sign in fails
*/
public async signInSilent(requestContext: ClientRequestContext): Promise<void> {
requestContext.enter();
const user = await this.nonInteractiveSignIn(requestContext);
assert(!!user && !user.expired, "Expected userManager.signinSilent to always resolve to an authorized user");
return;
}
/**
* Attempts a non-interactive signIn
* - tries to load the user from storage
* - tries to silently sign-in the user
*/
protected async nonInteractiveSignIn(requestContext: ClientRequestContext): Promise<User | undefined> {
let user = await this.loadUser(requestContext);
if (user) {
return user;
}
const userManager = await this.getUserManager(requestContext);
if (userManager.settings.prompt && userManager.settings.prompt !== "none") { // No need to even try a silent sign in if we know the prompt will force its failure.
return undefined;
}
// Attempt a silent sign-in
try {
user = await userManager.signinSilent(); // calls events
return user;
} catch (err) {
Logger.logInfo(FrontendAuthorizationClientLoggerCategory.Authorization, "Silent sign-in failed");
return undefined;
}
}
/**
* Gets the user from storage
* @return User found in storage.
* - Resolves to undefined if no user was found.
* - Returned user may have expired - so it's up to the caller to check the expired state
*/
protected async loadUser(requestContext: ClientRequestContext): Promise<User | undefined> {
const userManager = await this.getUserManager(requestContext);
requestContext.enter();
const user = await userManager.getUser();
requestContext.enter();
if (user && !user.expired) {
this._onUserLoaded(user); // Call only because getUser() doesn't call any events
return user;
}
return undefined;
}
protected initAccessToken(user: User | undefined) {
if (!user) {
this._accessToken = undefined;
return;
}
this._accessToken = AccessToken.fromTokenResponseJson(user, user.profile);
}
/**
* Alias for signOutRedirect
* @param requestContext
*/
public async signOut(requestContext: ClientRequestContext): Promise<void> {
await this.signOutRedirect(requestContext);
}
public async signOutRedirect(requestContext: ClientRequestContext): Promise<void> {
const userManager = await this.getUserManager(requestContext);
requestContext.enter();
await userManager.signoutRedirect();
}
public async signOutPopup(requestContext: ClientRequestContext): Promise<void> {
const userManager = await this.getUserManager(requestContext);
requestContext.enter();
await userManager.signoutPopup();
}
/**
* Returns a promise that resolves to the AccessToken of the currently authorized user.
* The token is refreshed as necessary.
* @throws [BentleyError]($bentley) If signIn() was not called, or there was an authorization error.
*/
public async getAccessToken(requestContext?: ClientRequestContext): Promise<AccessToken> {
if (this._accessToken)
return this._accessToken;
if (requestContext)
requestContext.enter();
throw new BentleyError(AuthStatus.Error, "Not signed in.", Logger.logError, FrontendAuthorizationClientLoggerCategory.Authorization);
}
/**
* Checks the current local user session against that of the identity provider.
* If the session is no longer valid, the local user is removed from storage.
* @returns true if the local session is still active with the provider, false otherwise.
* @param requestContext
* @param ignoreCheckInterval Bypass the default behavior to wait until a certain time has passed since the last check was performed
*/
public async checkSessionStatus(requestContext: ClientRequestContext): Promise<boolean> {
requestContext.enter();
const userManager = await this.getUserManager(requestContext);
try {
await userManager.querySessionStatus();
} catch (err) { // Access token is no longer valid in this session
await userManager.removeUser();
return false;
}
return true;
}
protected _onUserStateChanged = (user: User | undefined) => {
this.initAccessToken(user);
try {
this.onUserStateChanged.raiseEvent(this._accessToken);
} catch (err) {
Logger.logError(FrontendAuthorizationClientLoggerCategory.Authorization, "Error thrown when handing OidcBrowserClient.onUserStateChanged event", () => ({ message: err.message }));
}
}
/**
* Raised when a user session has been established (or re-established).
* This can happen on startup, after token refresh or token callback.
*/
protected _onUserLoaded = (user: User) => {
this._onUserStateChanged(user);
}
/**
* Raised when a user session has been terminated.
*/
protected _onUserUnloaded = () => {
this._onUserStateChanged(undefined);
}
/**
* Raised prior to the access token expiring
*/
protected _onAccessTokenExpiring = async () => {
}
/**
* Raised after the access token has expired.
*/
protected _onAccessTokenExpired = () => {
this._onUserStateChanged(undefined);
}
/**
* Raised when the automatic silent renew has failed.
*/
protected _onSilentRenewError = () => {
this._onUserStateChanged(undefined);
}
/**
* Raised when the user's sign-in status at the OP has changed.
*/
protected _onUserSignedOut = () => {
this._onUserStateChanged(undefined);
}
/** Disposes the resources held by this client */
public dispose(): void {
if (this._userManager) {
this._userManager.events.removeUserLoaded(this._onUserLoaded);
this._userManager.events.removeAccessTokenExpiring(this._onAccessTokenExpiring);
this._userManager.events.removeAccessTokenExpired(this._onAccessTokenExpired);
this._userManager.events.removeUserUnloaded(this._onUserUnloaded);
this._userManager.events.removeSilentRenewError(this._onSilentRenewError);
this._userManager.events.removeUserSignedOut(this._onUserSignedOut);
}
}
}
|
<gh_stars>1-10
package com.christophecvb.elitedangerous.events.stationservices;
import com.christophecvb.elitedangerous.events.Event;
public class ShipyardTransferEvent extends Event {
public String shipType, shipTypeLocalised, system;
public Integer shipID;
public Long shipMarketID, transferPrice, transferTime, marketID;
public Double distance;
public interface Listener extends Event.Listener {
@Override
default <T extends Event> void onTriggered(T event) {
this.onShipyardTransferEventTriggered((ShipyardTransferEvent) event);
}
void onShipyardTransferEventTriggered(ShipyardTransferEvent shipyardTransferEvent);
}
}
|
By instinct, like many on the left, I am a European. I recognise that many issues – perhaps most – can no longer be resolved only within our borders. Among them are grave threats to our welfare and our lives: climate change and the collapse of the living world; the spread of epidemics whose vectors are corporations (obesity, diabetes and diseases associated with smoking, alcohol and air pollution); the global wealth-grab by the very rich; antibiotic resistance; terrorism and conflict.
I recognise that the only legitimate corrective to transnational power is transnational democracy. So I want to believe; I want to belong. But it seems to me that all that is good about the European Union is being torn down, and all that is bad enhanced and amplified.
Britain’s part in making the EU a plaything of corporate interests | Letters Read more
Nowhere is this clearer than in the draft agreement secured by David Cameron. For me, the most disturbing elements are those that have been widely described in the media as “uncontroversial”: the declarations on regulations and competitiveness. The draft decisions on these topics are a long series of euphemisms, but they amount to a further dismantling of the safeguards defending people, places and the living world.
What Cameron described in parliament as “pettifogging bureaucracy” is the rules that prevent children from being poisoned by exhaust fumes, rivers from being turned into farm sewers and workers from being exploited by their bosses. What the European commission calls reducing the “regulatory burden for EU business operators” often means increasing the costs the rest of us must carry: costs imposed on our pockets, our health and our quality of life. “Cutting red tape” is everywhere portrayed as a good thing. In reality, it often means releasing business from democracy.
There is nothing rational or proportionate about the deregulation the commission contemplates. When Edmund Stoiber, the conservative former president of Bavaria, reviewed European legislation, he discovered that the combined impact of all seven environmental directives incurred less than 1% of the cost to business caused by European law. But, prodded by governments including ours, the commission threatens them anyway. It is still considering a merger and downgrading of the habitats and birds directives, which are all that impede the destruction of many of our precious places and rare species.
Alongside such specific threats, the EU is engineering treaties that challenge the very principle of parliamentary control of corporations. As well as the Transatlantic Trade and Investment Partnership (TTIP), it has been quietly negotiating something even worse: a Trade in Services Agreement (Tisa). These claim to be trade treaties, but they are nothing of the kind. Their purpose is to place issues in which we have a valid and urgent interest beyond the reach of democratic politics. And the commission defends them against all comers.
Are such tendencies accidental, emergent properties of a highly complex system, or are they hardwired into the structure of the EU? The more I see, the more it seems to me that the EU’s problems are intrinsic and systemic. The organisation that began as an industrial cartel still works at the behest of the forces best equipped to operate across borders: transnational corporations. The commission remains a lobbyists’ paradise: opaque, sometimes corruptible, almost unnavigable by those without vast resources.
People such as the former Labour home secretary Alan Johnson, who claim the EU is a neutral political forum – “simply a place we have built where we can manage our interdependence” – are myth-makers. They are the equivalent of the tabloid fabulists who maintain that European rules will reclassify Kent as part of France, force people to trade in old battery-operated sex toys for new ones, and ensure that dead pets are boiled for half an hour in a pressure cooker before they are buried.
So should those who seek a decent, protective politics vote to stay or vote to leave? If you wish to remain within the EU because you imagine it is a progressive force, I believe you are mistaken. That time, if it ever existed, has passed. The EU is like democracy, diplomacy and old age: there is only one thing to be said for it – it is not as bad as the alternative.
If you are concerned about arbitrary power, and the ability of special interests to capture and co-opt the apparatus of the state, the UK is in an even worse position outside the EU than it is within. Though the EU’s directives are compromised and under threat, they are a lot better than nothing. Without them we can kiss goodbye to the protection of our wildlife, our health, our conditions of employment and, one day perhaps, our fundamental rights. Without a formal constitution, with our antiquated voting arrangements and a corrupt and corrupting party funding system, nothing here is safe.
Though the EU’s directives are compromised and under threat, they are a lot better than nothing
The UK government champs and rears against the European rules that constrain it. It was supposed to have ensured that all our rivers were in good ecological condition by the end of last year: instead, lobbied by Big Farmer and other polluting businesses, it has achieved a grand total of 17%. On behalf of the motor industry, it has sought to undermine new European limits on air pollution, after losing a case in the supreme court over its failure to implement existing laws. Ours is the least regulated labour market in Europe, and workers here would be in an even worse fix without the EU.
On behalf of party donors, old school chums, media proprietors and financial lobbyists, the government is stripping away any protections that European law has not nailed down. The EU’s enthusiasm for treaties such as TTIP is exceeded only by Cameron’s. His defence of national sovereignty, subsidiarity and democracy mysteriously evaporates as soon as they impinge upon corporate power.
I believe that we should remain within the union. But we should do so in the spirit of true scepticism: a refusal to believe anything until we have read the small print; a refusal to suspend our disbelief. Is it possible to be a pro-European Eurosceptic? I hope so, because that is what I am. |
<filename>utils/logger.go
/*
* Copyright 2020 The CCID Authors.
*
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http: //www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import (
"fmt"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
"log"
"os"
"path/filepath"
"reflect"
"strings"
"sync"
)
var l = &Log{}
type Log struct {
logger *zap.Logger
wg sync.WaitGroup
rev string
logPath string
logInfoPath string
logErrorPath string
}
func GetCurrentDirectory() string {
dir, err := filepath.Abs(filepath.Dir(os.Args[0]))
if err != nil {
log.Fatal(err)
}
return strings.Replace(dir, "\\", "/", -1) +"/"
}
func init() {
l.logPath = GetCurrentDirectory()+"logs/"
l.logInfoPath = l.logPath +"info.log"
l.logErrorPath = l.logPath + "error.log"
_, err := os.Stat(l.logPath)
if err != nil {
// 创建文件夹
err := os.Mkdir(l.logPath, os.ModePerm)
if err != nil {
fmt.Printf("mkdir failed![%v]\n", err)
}
}
encoderConfig := zapcore.EncoderConfig{
TimeKey: "time",
LevelKey: "level",
NameKey: "logger",
CallerKey: "linenum",
MessageKey: "",
StacktraceKey: "stacktrace",
LineEnding: zapcore.DefaultLineEnding,
EncodeLevel: zapcore.LowercaseLevelEncoder, // 小写编码器
EncodeTime: zapcore.ISO8601TimeEncoder, // ISO8601 UTC 时间格式
EncodeDuration: zapcore.SecondsDurationEncoder, //
EncodeCaller: zapcore.FullCallerEncoder, // 全路径编码器
EncodeName: zapcore.FullNameEncoder,
}
// 设置日志级别
atom := zap.NewAtomicLevelAt(zap.DebugLevel)
config := zap.Config{
Level: atom, // 日志级别
Development: true, // 开发模式,堆栈跟踪
Encoding: "console", // 输出格式 console 或 json
EncoderConfig: encoderConfig, // 编码器配置
OutputPaths: []string{"stdout", l.logInfoPath}, // 输出到指定文件 stdout(标准输出,正常颜色)
ErrorOutputPaths: []string{"stderr", l.logErrorPath}, // stderr(错误输出,红色)
}
// 构建日志
l.logger, _ = config.Build()
}
func LogMsg(msg string) {
go l.logger.Info(msg)
}
func LogError(err error, msg string) {
go l.logger.Error(msg+err.Error())
}
func LogOne(desc string, u interface{}) {
go logObj(desc, u)
}
func LogList(list map[string]interface{}) {
for k, v := range list {
go logObj(k, v)
}
}
//log interface 类型
func logObj(desc string, u interface{}) {
keys := reflect.TypeOf(u)
values := reflect.ValueOf(u)
m := &[]zap.Field{}
// 获取方法字段
// 1. 先获取interface的reflect.Type,然后通过NumField进行遍历
// 2. 再通过reflect.Type的Field获取其Field
// 3. 最后通过Field的Interface()得到对应的value
fmt.Println(keys.NumField())
for i := 0; i < keys.NumField(); i++ {
field := keys.Field(i)
value := values.Field(i)
*m = append(*m, zap.String(field.Name, fmt.Sprintf("%v", value)))
}
fmt.Println(m)
l.logger.Info(desc, *m...)
}
|
Chapter 68
"Mmmmmn…" Summer just about moaned as she opened the oven, the thick, hot scent of baked cookie and melted chocolate washing over her, and hitting Weiss like a pillow to the face. "God I'll never get tired of that."
"I think I just gained weight." Weiss joked.
"Worth it, worth it, worth it." Summer chanted as she moved the tray onto a cooling rack.
"Is this what it's like to be Ruby?" Weiss wondered aloud, closing her eyes and drawing in more of the scent. It's richness almost made her tingle...well, maybe it did a little.
"Ruby is a sweet seeking missile, and nothing in the world makes me happier than indulging that." Summer smiled.
"I know what you mean." Weiss agreed.
"I bet you do." Summer smirked. "Now imagine the look on Ruby's cute little face when she bites into this." She turned around, plate in hand, two fresh cookies upon it. One was sliced in half, oozing its contents onto the plate.
"There's something obscene about this image." Weiss noted.
"Isn't it beautiful?" Summer admired. "And fulfilling. We made this Weiss. We made this happen. Now enjoy the spoils of our efforts." She took half the cookie, blowing to cool it down before taking a large bite. "Mnngh."
"These can't be healthy." Weiss observed.
"Mng, not in the slightest, delicious though." Summer confirmed. "Go on, indulge."
"...These are going straight to my thighs." Weiss lamented, taking the other half and biting in. They were even better fresh baked.
"Worth it, every pound gained, worth it." Summer insisted. "Besides, a little cushion won't hurt you. Ruby tells me you borderline starve yourself."
"As opposed to stuffing my face with anything vaguely sweet?" Weiss shot back.
"As opposed to having three healthy meals a day." Summer countered. "Ruby pays attention to your diet Weiss. It's healthy, but not enough."
"You sound like Blake." Weiss said.
"Well, Blake has the right idea." Summer continued. "You and Winter both, shutting yourselves in your rooms, eating quick light meals. You're like stick figures, beautiful stick figures, but even so. Winter looks more healthy every day. Her cheeks are filling out, and so are...other things." Summer flushed. "A few weeks time with my cooking and you'll be the same."
Weiss finished her half of the cookie, and looked down at herself. Her outfit was casual today, jeans and a long sleeved top. Unlike other days she neglected to wear a padded bra, and until now she had not thought about it. Her legs were thinner than she remembered, wrists painfully bony, and her collarbones - which Ruby occasionally paid an embarrassing amount of attention to - jutted a little too harshly beneath her skin. She had never really given much thought to it, that her lack of weight may be contributing to her somewhat lacking appearance. Perhaps she could stand to gain a few pounds. After all, Ruby and Summer looked amazing. Yang looked amazing, and even late in her pregnancy, Nora looked amazing too. Maybe she could stop wearing padded bras!
"I'd appreciate that Summer." Weiss smiled. "I never gave it much thought."
"Don't worry, I'll have you plumped up in no time, starting now." Summer held out the place, the second cookie steaming before her.
Weiss felt her teeth cry. "I'm full for now Summer, thank you."
The door opened, and the thudding of feet was the only warning Weiss had to brace herself before a crimson missile slammed into her, hugging her until her feet left the ground. "Weeeeeeeeeis, let's go on a date!" Ruby exclaimed.
"Hurgh, okay Ruby please I can't breathe!" Weiss choked, gasping as Ruby dropped her, giggling.
"Awesome!" Ruby cheered. "First dibs on the shower!" Pecking Weiss on the lips, Ruby dashed off to her room, leaving Weiss a little shellshocked.
Blake approached, pulled Weiss into another hug, this one gentler, but also damper. "Hey beautiful." Blake murmured.
"You smell." Weiss jabbed, but did not pull away.
"So do you." Blake smirked. "Been baking have we?"
"Hmph...Summer did the baking, I merely helped." Weiss replied. "Where are we going?"
"You and Ruby?" Blake responded. "It's a surprise. I'm spending some quality time with Yang and Pyrrha."
Weiss pulled back, looking confused. "Why?"
"They asked me first, and you two haven't had a date yet." Blake leaned down, bumping her forehead against Weiss', planting a kiss on her nose. "I want you to enjoy yourselves together, have a good night out, eat, drink, love." She cupped Weiss' cheek, directing her into a kiss, short but no less sweet. "And I'll go take pictures of Yang if she gets drunk, deal?"
"Deal." Weiss murmured, dazed, lost in Blake's eyes. "Heh, I never noticed before."
"What?" Blake asked.
"Your eyes are golden, Ruby's are silver." Weiss observed, smiling. "You're perfect for each other."
"We're perfect for you...draped in silver and gold like the princess you are." Blake teased.
"Oh shush, that nickname is staying in high school where it belongs." Weiss huffed.
"Are you saying you don't want Ruby to kneel before you?" Blake pressed. "To kiss your feet? And your legs…" Blake nipped at Weiss' ear, making her shiver as she purred. "Between them."
"I'll need a cold shower if you don't stop." Weiss whimpered.
"Hmm, I'm tempted." Blake reached down to squeeze Weiss' rear, digging her nails in, eliciting a gasp.
"You're evil." Weiss growled.
Blake smirked. "Muahahaha."
Not long afterward, Ruby finished her shower, calling out and dashing into the bedroom. Weiss had to tear herself away from the image of Ruby slipping off her towel before she did something drastic. She resisted, poorly, thinking about it as she washed. Her shower was longer than Ruby's, as expected. Ruby was quite quick when alone, Weiss took greater care of her body, and her hair, short as it was. She spent no less time washing its cropped fur than her formerly lengthy locks. She was still conflicted about the look. Combined with her thin hips and flat chest, she could easily pass as a boy now...maybe that was a turn on to Ruby? Did she wish Weiss was a boy? No, that was stupid. Ruby liked her for who she was, even if she felt inadequate, Ruby never judged, her gaze always loving, her touch gentle, it made her heart melt.
Weiss eventually finished, heading back to find Blake alone in the room, a set of clothes laid out upon the bed, and their makeup kit on the dresser. With her help, Weiss found herself well dressed and made up in no time. Her attire was notably different from usual. Blake had picked out a short black pencil skirt that she rarely wore on formal occasions. Normally Weiss preferred dresses, but if she desired a more stern appearance it came in handy. Beneath it she wore thick black tights, already normal for her in these colder months. Her top was a pale blue button up, tastefully unbuttoned to show off her collar - at Blake's suggestion - and to cap it all off, black high heels, enough to ensure Ruby remained shorter than her, even if she wore heels or platforms of her own.
"Hold still a moment." Blake ordered, reaching around Weiss' neck to fasten a pendant in place. The small silver apple was an old, but cherished gift from her mother. "There, perfect, every last inch of you." Blake admired, pecking her on the forehead.
"I look...dashing?" Weiss observed, looking herself up and down in the mirror.
Blake chuckled, pecking Weiss again on the temple. "You look beautiful Weiss, strong, intelligent, powerful. Ruby will love it."
"You think?" Weiss asked.
"I know." Blake confirmed. "Now come on, she's waiting in the kitchen, trying not to spoil her appetite."
Blake pulled Weiss out of the room, escorting her to the living room where she found herself froze, jaw dropping. "Hey Weiss, whaddaya' think?" Ruby asked. She twirled around, her black miniskirt showing just a little shy of a tasteful amount of thigh. Weiss nearly drooled as she took in Ruby's attire. Ruby had always loved black and red, but until now never showed off her gothic side. Long leather combat boots complemented dark stockings, the black skirt was rimmed in red, and led to an unsurprisingly black corset, which combined with the white undershirt to form a…
"Is that a boob window?" Weiss asked.
"Yeah!" Ruby cheered. "I saw it online and thought it was so cool, so I ordered one and it arrived before I got here!"
"You look...amazing Ruby." Weiss admired, approaching her partner to pull her into an embrace.
"I wasn't sure if you'd like it." Ruby murmured, leaning into Weiss' arms. "I don't dress like this much anymore."
"I love it...I love you." Weiss whispered, planting a kiss against Ruby's cheek.
Ruby was silent for a moment. "I love you too Weiss." She pulled back to meet Weiss' surprised gaze. "I'm sorry I haven't said it enough. I wanted to mean it, forgive me?"
Weiss choked a laugh, and caught Ruby in a fiery kiss. "Forgive you? I love you, you silly dolt. I would have waited forever to hear you say that." She kissed Ruby again, and Ruby had to pull away, laughing.
"Weiss, we're messing up our lipstick!" Ruby warned.
"We've got plenty to spare!" Weiss countered.
"Not if you keep smearing it all over each other." Winter suddenly snarked from the kitchen doorway. Weiss tried to jerk away but Ruby kept a firm grip, giggling.
"Come on you two." Summer smirked from beside Winter. "They won't hold the reservation forever." Weiss blushed a deep red.
"And I need to redo your makeup." Blake sighed.
"They're just going to mess it up again." Winter chuckled.
"Winter." Weiss admonished.
Ruby drove the car toward Signal. It was odd, Weiss had never once thought of Ruby as a driver, having a license, sitting behind the wheel of a car. Something about the image just seemed weird, but here they were, in the white sedan they had borrowed from Summer. Weiss admired Ruby from the passenger's seat as she bobbed her head to the stereo, playing some music from her scroll, something harsh and punk. The music was not to Weiss' usual taste, but lively and understandable enough to be tolerable. At least it was not Winter's screeching racket.
"Watchya' thinkin' about?" Ruby piped up.
"Dinner." Weiss fibbed.
"Oh, I'm hungry too." Ruby noted. "Raven ran me through some practice drills and I only had an energy bar for lunch."
"Drills?" Weiss asked.
"Yeah, some grapples, strikes and throws, self-defense stuff." Ruby explained. "Raven always tried to encourage me to learn, but until now I never really wanted to." Ruby reached over with her free hand, taking Weiss' and squeezing it. "Next time someone messes with me, I wanna' be able to defend myself."
"You shouldn't have to." Weiss lifted Ruby's hand to plant a kiss on it.
"No one should, it sucks, but that's life." Ruby shrugged. "Gotta' roll with the punches, so says Yang."
"I thought she preferred spanking." Weiss snarked.
"Pfft, I was gonna' say something, but you didn't, so I thought oh well, that happened." Ruby chuckled.
"She was very firm." Weiss noted. "She's definitely...had practice."
Ruby laughed again.
Yang decided the perfect venue for the girls' night out would be Junior's. She had always liked the club, but had not been since the unpleasantness with her one night stand. She wanted to show off her sweet new arm to Junior and the Twins anyway. After a short drive, the trio arrived to find the club was not a busy as usual. On most nights it was packed, but it seemed the holidays had thinned the crowd out a bit. Yang walked straight up to the bar with Blake and Pyrrha in tow.
"Hey Blondie." Junior greeted. "It's been a while...again." He turned around to mix a drink.
"Yeah, I've had a lot going on." Yang responded. "But now I'm rearmed and ready to rock."
"That's a nice piece of tech." Junior glanced back over his shoulder. "Must have cost a fortune."
"Nope." Yang said. "Beta testing has its advantages."
"Well, it's good to have you back and two-handed." Junior turned back around, placing a strawberry sunrise on the bar before Yang. "Just don't ask me to arm wrestle." He pushed the glass forward. "This one's on the house."
"Oh, no I'm designated driver tonight." Yang pushed the glass back.
"You, designated driver?" Junior raised an eyebrow. "You've changed Blondie."
"For the better, mostly." Yang smirked.
"So what am I supposed to do with this drink?" Junior asked.
Yang glanced at Blake and Pyrrha. "I'll give it a try." Blake volunteered. "I'm not big on mixed drinks, but it looks good."
"Junior, meet my girlfriend Pyrrha, and my sister's girl Blake." Yang introduced.
"Be careful, Blondie's a bad influence." Junior warned. "And she threw a hell of a punch before the robot arm." He watched as Blake downed her drink. "Want another?" Blake nodded. "How about you?"
"I'll have a screwdriver." Pyrrha replied.
"You guys gonna' find a table or hang out here?" Junior asked, starting to mix the drinks.
Yang looked to Pyrrha and Blake. "Let's grab a table." Pyrrha suggested.
"Okay, I'll have someone bring it over." Junior nodded.
Blake and Pyrrha headed off in search of a table, leaving Yang alone at the bar. "The Twins here tonight?" Yang asked. "I wanna' show off my new arm."
"Yeah, they're around here somewhere." Junior replied. "So, your sister's girl, you bring her here to soften her up for a little interrogation?"
"Nah, we're just here to have fun." Yang answered. "I trust her."
"You're going soft Blondie." Junior teased.
Yang laughed. "Yeah, but that's probably for the best."
Parking in a lot down the street, Ruby and Weiss donned jackets and walked hand-in-hand to their destination, Ruby leading the way. In the frigid air her hand felt heavenly, Weiss wanted nothing more than to pull her close and never let go. But there would be time for that later. Ruby was almost skipping in her excitement, eyes bright and full of mirth. "And here we are!" Ruby stopped, gesturing to the property in front of them.
At first glance the place seemed like an old fashioned restaurant, but with a much darker tone. Gazing through the window, Weiss was able to see a rich interior, various shades of red glowed in the soft amber light, cast equally by the stained glass lamps that hung from the ceiling, and candelabra on the tables. All the tables seemed to be crafted of a dark stained wood. Mahogany, Weiss would have guessed.
"Wow...this is...upscale." Weiss admired.
"It's so cool!" Ruby cheered. "I never knew it existed until mum told me about it!"
"Summer came here?" Weiss tried to picture Summer in Ruby's outfit...that was probably a bad idea.
"Raven and Glynda took her." Ruby explained. "Apparently it's popular in the S&M community."
Weiss took note of some of the guests, many of whom were wearing similar attire, or more traditional variants, to Ruby. They were both male and female, but distinctly more of the latter. Weiss suddenly felt underdressed. "I can see why."
"Do you like it?" Ruby asked. "I wasn't sure if you'd be intereste-"
Weiss cut Ruby off with a kiss. "It looks wonderful my dear, shall we enter?" She grinned, bumping her nose against Ruby's.
"Y-yeah, yes, we shall, enter, the uh...abode." Ruby stammered as Weiss chuckled, allowing herself to be drawn inside.
Stepping inside, the pair were met by the greeter behind a podium. Ruby stated her reservation, and they were escorted to a table near the rear of the establishment. It was not the biggest, nor the smallest of places Weiss had dined in, but it was certainly one of the cosiest. The dark, warm atmosphere felt comforting, their seats were lined with plush velvet, and privacy curtains flanked their table on three sides. It was like being in a luxury tent, almost, just her and Ruby, camping, alone…
"Something saucy on your mind?" Ruby teased.
"Hum?" Weiss snapped out of her daydream.
Ruby smirked. "You're biting your lip, staring off at nothing. I think you even drooled a little."
"I did not!" Weiss snapped, scandalized, reaching up to touch her lips. They were dry.
"Made you check!" Ruby laughed.
Weiss pouted, kicking Ruby lightly beneath the table. "Tease."
A waitress chose that moment to approach, though from her dress, butler would have been a more fitting descriptor, even though she was female. The waitress was tall, pale, with crimson curls tied in a loose ponytail. "Good evening." She greeted smoothly, laying two menus before the couple. "Tonight's special is Mistrali Curry. Would you like something to drink?"
"I'll have a strawberry soda please." Ruby ordered.
"And I'll have a…" Weiss scanned the beverage list. There were several soft drinks, mineral water, then an impressive list of cocktails, and an even more impressive list of wines that made her almost drool for real this time. "Hmm...I'll just have a glass of water for now, thank you."
"Are you sure?" Ruby pressed. "Mum says the wine here is great!"
"There are some nice brands here, but it's rather rude to drink when you can't." Weiss noted.
"Oh pfft, don't worry about that, we're here to celebrate!" Ruby insisted. She turned to the waitress. "Could I get my soda in a wineglass?"
"Of course." The waitress grinned.
Weiss sighed, barely resisting a grin of her own. "I'll have a glass of Atlesian Weiss, please."
"Wait, that exists?" Ruby wondered as the waitress scribbled on her notepad.
"A fine choice." The waitress praised. "I keep a bottle at home for special occasions. I'll leave you to browse the menu and return with your drinks."
"Thank you!" Ruby chirped, shooting the waitress another smile before flipping through the menu's pages. "I love curry, but maybe I should try something different, something I've never had before."
"They have good variety." Weiss observed. "Hmm, why don't we order for each other?" Weiss archer her brow over the menu.
Ruby's face lit up. "Ooh, great idea!" |
import argparse
import numpy as np
import pandas as pd
import dgl
from tqdm import tqdm
from datasets import load_data
parser = argparse.ArgumentParser()
parser.add_argument('data_path')
parser.add_argument('--max-hops', type=int, default=2)
parser.add_argument('--save', default=None)
args = parser.parse_args()
g, __, __, ts = load_data(args.data_path)
ts = np.asarray(ts)
N = ts.size
g = dgl.transform.remove_self_loop(g)
tdiff_dist = []
# ts_uniq = np.unique(ts)
for u in tqdm(g.nodes(), desc="BFS"):
# nodes_t = np.arange(N)[ts == t]
t = ts[u]
# print(f"Node: {u}, t = {t}")
bfs_gen = dgl.traversal.bfs_nodes_generator(g, u)
for hop, neighbors in enumerate(bfs_gen):
neighbors = neighbors.numpy()
# print(f"Hop {hop}:", neighbors)
# if hop == 0:
# continue
tdiff = t - ts[neighbors] # Compute difference in timsteps
tdiff = tdiff[tdiff >= 0] # Only consider same or past timesteps
tdiff_dist.extend(list(tdiff))
if hop == args.max_hops:
break
tdiff_dist = pd.Series(tdiff_dist)
print('=' * 32)
print('=' * 4, args.data_path, '=' * 4)
print(tdiff_dist.describe())
print('=' * 32)
if args.save:
np.save(args.save, tdiff_dist.values)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.