content
stringlengths
10
4.9M
package jadx.plugins.input.dex; import java.util.Collections; import java.util.List; import jadx.api.plugins.options.OptionDescription; import jadx.api.plugins.options.impl.BaseOptionsParser; import jadx.api.plugins.options.impl.JadxOptionDescription; public class DexInputOptions extends BaseOptionsParser { private static final String VERIFY_CHECKSUM_OPT = DexInputPlugin.PLUGIN_ID + ".verify-checksum"; private boolean verifyChecksum = true; @Override public void parseOptions() { verifyChecksum = getBooleanOption(VERIFY_CHECKSUM_OPT, true); } public List<OptionDescription> getOptionsDescriptions() { return Collections.singletonList( JadxOptionDescription.booleanOption( VERIFY_CHECKSUM_OPT, "verify dex file checksum before load", true)); } public boolean isVerifyChecksum() { return verifyChecksum; } }
#include <stdio.h> #include <stdlib.h> #include <iostream> #include <string> #include "RecordStructure.hpp" #include "RecordSequence.hpp" #include "observation_file_writer_txt.hpp" using namespace std; int main (int argc, char** argv) { { // The name of the output file. string outfile_name; // // Data arrays for l-records. Related // dimensions. // double* the_tags; double* the_expectations; double* the_covariance_values; int the_tags_size; int the_expectations_size; int the_covariance_values_size; int n_tags; int n_expectations; int n_covariance_values; // // Data arrays for o-records. Related // dimensions. // int* the_par_iids; int* the_obs_iids; int* the_ins_iids; int the_par_size; int the_obs_size; int the_ins_size; int n_par_iids; int n_obs_iids; int n_ins_iids; // Metadata to generate fake l- and o- records. string sid; char* identifier; int identifier_width; double time_tag; int record_type; int record_id_index; int cycle; // The ASTROLABLE observation text writer. observation_file_writer_txt* writer; // // The auxiliary classes that will generate record // sequence hints and will describe the structure // or these records. // RecordSequence the_record_sequence; RecordStructure record_structure_catalogue; // Other auxiliary variables, as error codes or loop indices. int i; int line_index; int random_value; int io_status; int total_lines; // // Check the number of parameters. We need at least one (besides // the name of the program). // if (argc < 2) { cout << "usage: test_synthtxt output_txt_file_name" << endl; return 1; } outfile_name = argv[1]; // // We will dimension the different variable length data // arrays according to the maximum dimension needed. To // do so, we'll ask the RecordStructure object about // the properties of all the fake records it contain. // identifier_width = 0; the_tags_size = 1; the_expectations_size = 1; the_covariance_values_size = 1; for (i = 0; i < record_structure_catalogue.n_l_record_types(); i++) { record_structure_catalogue.l_record_data(i, sid, n_tags, n_expectations); if (n_tags > the_tags_size ) the_tags_size = n_tags; if (n_expectations > the_expectations_size ) the_expectations_size = n_expectations; if ((int)sid.length() > identifier_width ) identifier_width = (int)sid.length(); } the_covariance_values_size = (the_expectations_size * (the_expectations_size + 1)) / 2; the_tags = new double[the_tags_size]; the_expectations = new double[the_expectations_size]; the_covariance_values = new double[the_covariance_values_size]; the_par_size = 1; the_obs_size = 1; the_ins_size = 1; for (i = 0; i < record_structure_catalogue.n_o_record_types(); i++) { record_structure_catalogue.o_record_data(i, sid, n_par_iids, n_obs_iids, n_ins_iids); if (n_par_iids > the_par_size ) the_par_size = n_par_iids; if (n_obs_iids > the_obs_size ) the_obs_size = n_obs_iids; if (n_ins_iids > the_ins_size ) the_ins_size = n_ins_iids; if ((int)sid.length() > identifier_width ) identifier_width = (int)sid.length(); } the_par_iids = new int[the_par_size]; the_obs_iids = new int[the_obs_size]; the_ins_iids = new int[the_ins_size]; identifier = new char[identifier_width + 1]; identifier[identifier_width] = 0; time_tag = 1.0; // Instantiate the text writer. try { writer = new observation_file_writer_txt(); } catch (...) { cout << "Error instantiating writer. Not enough memory" << endl; return 1; } // // Set the size in bytes of the buffer used by the binary writer. Note, // however, that it is not strictly necessary to set this size at all; // if no size is set (that is, the next method call is not made) // the writer will use a default (internally set) size that may be // queried using method _default_buffer_size(). // writer->set_buffer_size(5 * 1024); // 5 Kbyte. // Open the underlying file. io_status = writer->open(outfile_name.c_str()); if (io_status != 0) { cout << "Error opening output file. Error code in open(): " << io_status << endl; return 1; } // // Insert 1000 full cycles of the sequence of records provided by // our RecordSequence object. Each cycle corresponds to a full epoch. // total_lines = 1000 * the_record_sequence.cycle_length(); for (line_index = 0; line_index < total_lines; line_index++) { // // Get the type of record to generate as well as its identifier // index. // the_record_sequence.get_next(record_type, record_id_index, cycle); time_tag = (double)cycle; // // Retrieve the structural information for the kind of record // just generated. Then, generate fake data for the proper // record type and write it to the output file. // if (record_type == 1) { // This is an l-record. record_structure_catalogue.l_record_data(record_id_index, sid, n_tags, n_expectations); // Identifier. identifier = strcpy(identifier, sid.c_str()); // // Decide randomly the shape of the covariance matrix: none, only // standard deviations, full matrix (including correlations). // Generate fake values at the same time. // random_value = rand() % 100 + 1; if (random_value <= 5) { n_covariance_values = 0; } else if (random_value <= 95) { n_covariance_values = n_expectations; for (i = 0; i < n_covariance_values; i++) { the_covariance_values[i] = (double)(i + line_index); } } else { n_covariance_values = (n_expectations * (n_expectations + 1)) / 2; for (i = 0; i < n_covariance_values; i++) { the_covariance_values[i] = (double)(i + line_index); } for (i = n_expectations; i < n_covariance_values; i++) { the_covariance_values[i] = (double)i / 100; } } // Generate fake data for the tags and expectations. for (i = 0; i < n_tags; i++) the_tags[i] = (double)(i + line_index); for (i = 0; i < n_expectations; i++) the_expectations[i] = (double)(i + line_index + 1); // Write the l-record. io_status = writer->write_l (true, identifier, line_index, time_tag, n_tags, the_tags, n_expectations, the_expectations, n_covariance_values, the_covariance_values); if (io_status != 0) { cout << "Error writing l record. Error code in write_l(): " << io_status << endl; break; } } else { // record_type == 0, meaning an o-record. record_structure_catalogue.o_record_data(record_id_index, sid, n_par_iids, n_obs_iids, n_ins_iids); // Identifier. identifier = strcpy(identifier, sid.c_str()); // Generate fake parameter, observation and instrument instance identifiers. for (i = 0; i < n_par_iids; i++) the_par_iids[i] = i + line_index; for (i = 0; i < n_obs_iids; i++) the_obs_iids[i] = i + line_index + 1; for (i = 0; i < n_ins_iids; i++) the_ins_iids[i] = i + line_index + 2; // Write the o-record. io_status = writer->write_o(true, identifier, time_tag, n_par_iids, the_par_iids, n_obs_iids, the_obs_iids, n_ins_iids, the_ins_iids); if (io_status != 0) { cout << "Error writing o record. Error code in write_o(): " << io_status << endl; break; } } } // Close the writer. io_status = writer->close(); if (io_status != 0) { cout << "Error closing file. Error code in close(): " << io_status; delete writer; writer = NULL; return 1; } delete identifier; delete writer; } return 0; }
n = int(input()) def cal(n): p = [] for i in range(1,int(n**0.5)+1): if n%i == 0: p.append([i,n//i]) return p a = cal(n) t = 1 for i in range(len(a)): if 1 <= a[i][0] <= 9 and 1 <= a[i][1] <= 9: t = 0 print('Yes') break if t == 1: print('No')
It's often said that respect is earned on the battlefield and that regularly it happens in the mixed martial arts cage. Two fighters who are at loggerheads in the build up to a fight are often seen embracing after like long lost members of family after, but not all of them. For some, the feud rages on and that seems to be the case for Tyron Woodley and Stephen Thompson as they build up to their fight at UFC 209 next month. The two men clashed at UFC 205 last November and after five, five minute rounds, two of the three judges could not separate the two and the fight was ruled a majority draw. Three months down the line, the two have continued their feud back and forth in the media and with the rematch now set for March 4, it's only a matter of time before the rivalry is resumed in a physical form. Speaking earlier this week to FloCombat, Thompson spoke about the angst between the two and said that he feels Woodley just needs to forget all of that and focus solely on the fight. "You know, I try not to get into the stuff," Thompson said. "The only thing I want to happen, is Tyron, I don't want him to be distracted with all that. He's talking about race and how he's treated as one of the worst hated champions, but let's be honest, the guy hasn't been champion long enough to feel the effects I think. [instagram url="https://www.instagram.com/p/BQLUUEQlX0S/?taken-by=wonderboymma" hide_caption="0"] "My true advice to him is to focus on the fight because I want Tyron to be a 170 fighter without any distractions when we go out there on March 4." Back before their fight at UFC 205, Woodley made it very clear that Thompson was not someone that had his respect. Speaking last September, Woodley said that Thompson had his respect as an athlete, but not an everyday man. "Wonderboy" hadn't forgotten this statement, but thinks that Woodley's opinion may have changed following their fight in November. "You know, he's obviously said in the past he doesn't respect me--that he respects me as a fighter, but not as a man," Thompson said. "That was something [where] I was like, wait a second, I don't understand. But, I do think he does a little bit more after that last fight [because] he knows I'm not an easy guy to put away. "A lot of these guys they pump themselves up by the trash talking and saying these things. I think he respects me to be honest with you." While some would say the two would be destined for a trilogy if Thompson does win at UFC 209, the welterweight title challenger wasn't quick to say it would happen. Instead, Thompson said it would all depend in the manner of the victory and how it went down. "I think it will depend on the fashion of the win," Thompson said. "I think if it's really, really close again then probably so. If we go out there and do business like we know and like I know we can do, and like don't leave it to the judges' eyes and get a legitimate win, then I think there is no need for a triple fight or a trilogy. "We just have to go out there, I have to go out there do what I know I can and win in either a spectacular way or a unanimous decision."
<reponame>damaaryoung/tagihboss import { PluginFunction } from "vue"; import { VueConstructor } from "vue/types/vue"; import ToastInterface from "./ts/interface"; import { POSITION, TYPE } from "./ts/constants"; import { PluginOptions } from "./types"; import "./scss/index.scss"; declare function createToastInterface(eventBus: InstanceType<VueConstructor>): ReturnType<typeof ToastInterface>; declare function createToastInterface(options?: PluginOptions, Vue?: VueConstructor): ReturnType<typeof ToastInterface>; declare const VueToastificationPlugin: PluginFunction<PluginOptions>; export default VueToastificationPlugin; export { POSITION, TYPE, createToastInterface };
<gh_stars>0 pub mod http; pub mod stock_quote; #[cfg(test)] mod tests { use crate::stock_quote::StockQuote; #[test] fn it_should_serialize_from_json() { let test_input = [ r#"{"symbol":"aapl","source":"yahoo"}"#, r#"{"symbol":"cdr","source":"stooq"}"#, r#"{"symbol":"gbpusd","source":"yahoo","ticker":"gbpusd=x"}"#, ]; for json in test_input { let sq: Result<StockQuote, serde_json::error::Error> = serde_json::from_str(json); assert!(sq.is_ok()); } } }
# Copyright (c) 2013 - 2019 <NAME> and Contributors. # This file is part of YAWAST which is released under the MIT license. # See the LICENSE file or go to https://yawast.org/license/ for full license details. from yawast.scanner.session import Session from yawast.shared import network, output def check_redirect(session: Session): # perform some connection testing if session.url_parsed.scheme == "http": session.supports_http = True try: # check for TLS redirect tls_redirect = network.check_ssl_redirect(session.url) if tls_redirect is not None and tls_redirect != session.url: print(f"Server redirects to TLS: Scanning: {tls_redirect}") session.update_url(tls_redirect) session.redirects_https = True except Exception: output.debug_exception() # we tried to connect to port 80, and it failed # this could mean a couple things, first, we need to # see if it answers to 443 session.update_scheme("https") print("Server does not respond to HTTP, switching to HTTPS") print() print(f"Scanning: {session.url}") # grab the head, to see if we get anything try: network.http_head(session.url, timeout=5) session.supports_https = True session.supports_http = False print() except Exception as err: output.debug_exception() raise ValueError( f"Fatal Error: Can not connect to {session.url} ({str(err)})" ) else: session.supports_https = True # if we are scanning HTTPS, try HTTP to see what it does try: network.http_head(session.get_http_url(), timeout=5) session.supports_http = True print("Server responds to HTTP requests") print() except Exception: output.debug_exception() print("Server does not respond to HTTP requests") print() # check for www redirect www_redirect = network.check_www_redirect(session.url) if www_redirect is not None and www_redirect != session.url: print(f"Server performs WWW redirect: Scanning: {www_redirect}") session.update_url(www_redirect)
// TODO: need optimization, only clone when necessary func (c *schedulerCache) Snapshot() *Snapshot { c.mutex.RLock() defer c.mutex.RUnlock() out := NewEmptySnapshot() out.clusterInfoList = make([]*framework.ClusterInfo, 0, len(c.clusters)) for _, cluster := range c.clusters { cloned := cluster.DeepCopy() out.clusterInfoList = append(out.clusterInfoList, framework.NewClusterInfo(cloned)) } return out }
Brief History Wentworth Castle Gardens & Stainborough Park - extending to over 500 acres – is the former seat of the Earls of Strafford and has been the subject of a major restoration project. Rare formal gardens have been rediscovered and new gardens created to bring the estate back to its former glory. It is the only Grade 1 listed parkland in South Yorkshire. Horticultural Highlights The estate is home to National Collections of Rhododendrons, Camellias and Magnolias, which have been enhanced over recent years and more than 100,000 bulbs have been planted to create a carpet of colour. With atmospheric views, avenues of trees, woodland walks and majestic follies, there is something for everyone to enjoy in the Gardens throughout the year. Other Things to See and Do A wonderful chjldren’s adventure playground, a parkland of 26 listed follies and monuments with deer roaming free and a ruined castle to explore. There are regular special events such as open-air concerts and plays, children’s activities, historical re-enactments and craft fairs. Local Area Yorkshire Sculpture Park, the National Mining Museum and Cannon Hall are just a few of the many great places to visit, and the varied attractions of Leeds and Sheffield are only a half-hour drive away. Situated in delightful countryside just outside Barnsley, Wentworth Castle Gardens is close to the Transpennine Trail – a coast-to-coast path popular with walkers, riders and cyclists. Places to Stay A range of competively-priced accommodation within easy reach. Here are some alternatives VIEW How to Find Us Signposted from the M1, Barnsley, South Yorkshire. Just 5 mins from Junction 37 Opening Times The Gardens are open daily, April to September10am-5pm, last admission 4.30pm. From October to March, 10am-4pm, last admission at 3.30pm. Entry Adult (17 years and over) £4.95 Concession (aged 60+, Registered Disabled, FE/HE Student) £3.95 Children (5-16years) £2.95 Children (under 5) FREE Family (2+1) £12.00 Family (2+2) £15.00 + any additional children on a Family ticket £2.00 Season Tickets also available. Car parking is free for visitors paying Garden admission but visitors to the Parkland must buy a car park token (currently £3.00 per car) from the Long Barn Visitor Centre. RHS members free entry to the Garden on presentation of their membership card, but must pay for car parking. Food & Drink The café in the Long Barn Visitor Centre has a varied seasonal menu of meals and snacks. Postcode for Car Sat Navs S75 3ET Friends/Volunteers In addition to a very active Friends of Wentworth group, there are lots of volunteer opportunities. From gardening to helping out in the shop or café, assisting at the many special events held at Wentworth or working behind the scenes…volunteer help is always welcome and much appreciated. Website/Tel.No www.wentworthcastle.org 01226 776040
<gh_stars>1-10 import { UseGuards } from "@nestjs/common"; import { Args, Context, Resolver } from "@nestjs/graphql"; import { PrismaService } from "../prisma/prisma.service"; import { User } from "./model/user.model"; import { UserService } from "./user.service"; import { Query, Mutation } from "@nestjs/graphql"; import { UserConnection } from "./model/user-connection.model"; import { ChangePasswordInput } from "./inputs/change-passsword.input"; import { findManyCursorConnection } from "@devoxa/prisma-relay-cursor-connection"; import { AuthService } from "../auth/auth-jwt.service"; import { AuthGuard } from "src/common/guards/gql-context.guard"; import { AuthDetailed } from "src/auth/model/auth-detailed.model"; import { FindManyUsersPaginatedInput } from "./inputs/user-paginated-args.input"; import { fromGlobalId, toGlobalId } from "graphql-relay"; import { FindManyEntriesPaginatedInput } from "src/entry/inputs/entry-paginated.input"; import { FindManyMediaItemsPaginatedInput } from "src/media/inputs/find-many-media-items-paginated.input"; import { ContentNodes } from "./outputs/content-nodes.output"; import { AppContext } from "src/gql-config.service"; import { Entry } from "src/entry/model/entry.model"; import { MediaItem } from "src/media/model/media.model"; @Resolver(() => User) export class UserResolver { constructor( private prismaService: PrismaService, private authService: AuthService, private userService: UserService ) {} @UseGuards(AuthGuard) @Query(() => AuthDetailed) async me(@Context() { token }: AppContext): Promise<AuthDetailed | null> { return token ? await this.authService.getUserWithDecodedToken(token) : null; } @Query(() => ContentNodes) async contentNodesUnion( @Args("findManyUsersPaginatedInput", { type: () => FindManyUsersPaginatedInput, nullable: true }) params: FindManyUsersPaginatedInput, @Args("findManyEntriesPaginatedInput") entryParams: FindManyEntriesPaginatedInput, @Args("findManyMediaItemsPaginated", { type: () => FindManyMediaItemsPaginatedInput, nullable: true }) mediaParams: FindManyMediaItemsPaginatedInput ): Promise<ContentNodes> { const edgingUserNodes = await findManyCursorConnection( args => this.prismaService.user.findMany({ take: params.take, skip: params.skip, distinct: params.distinct, where: params.where, orderBy: params.orderBy, cursor: params.cursor, ...args }), () => this.prismaService.user.count({ orderBy: params.orderBy, distinct: params.distinct, skip: params.skip, where: params.where, cursor: params.cursor }), { first: params.pagination.first ?? 10, last: params.pagination.last, before: params.pagination.before, after: params.pagination.after }, { getCursor: (record: { id: string }) => { return record; }, decodeCursor: (cursor: string) => fromGlobalId(cursor), encodeCursor: (cursor: { id: string }) => toGlobalId(User.name, cursor.id) } ); const edgingThoseMediaItems = await findManyCursorConnection( args => this.prismaService.mediaItem.findMany({ skip: mediaParams.skip, take: mediaParams.take, distinct: mediaParams.distinct, where: mediaParams.where, orderBy: mediaParams.orderBy, cursor: mediaParams.cursor, ...args }), () => this.prismaService.mediaItem.count({ skip: mediaParams.skip, take: mediaParams.take, distinct: mediaParams.distinct, where: mediaParams.where, orderBy: mediaParams.orderBy }), { first: mediaParams.pagination.first ?? 10, last: mediaParams.pagination.last, before: mediaParams.pagination.before, after: mediaParams.pagination.after }, { getCursor: (record: { id: string }) => { return record; }, decodeCursor: (cursor: string) => fromGlobalId(cursor), encodeCursor: (cursor: { id: string }) => toGlobalId(MediaItem.name, cursor.id) } ); const edgingThoseNodes = await findManyCursorConnection( args => this.prismaService.entry.findMany({ distinct: entryParams.distinct, take: entryParams.take, skip: entryParams.skip, where: entryParams.where, cursor: entryParams.cursor, orderBy: entryParams.orderBy, ...args }), () => this.prismaService.entry.count({ distinct: entryParams.distinct, skip: entryParams.skip, where: entryParams.where, cursor: entryParams.cursor }), { first: entryParams.pagination.first ?? 10, last: entryParams.pagination.last, before: entryParams.pagination.before, after: entryParams.pagination.after }, { getCursor: (record: { id: string }) => { return record; }, decodeCursor: (cursor: string) => fromGlobalId(cursor), encodeCursor: (cursor: { id: string }) => toGlobalId(Entry.name, cursor.id) } ); const output = edgingThoseMediaItems || edgingThoseNodes || edgingUserNodes; return { contentNodes: { nodes: output } }; } @Query(() => User) async userById(@Args("id") id: string) { const getUserById = await this.userService.relayFindUniqueUser({ id }); return getUserById; } @Query(() => UserConnection) @UseGuards(AuthGuard) async listUsers( @Args("findManyUsersPaginatedInput", { type: () => FindManyUsersPaginatedInput, nullable: true, defaultValue: { findManyUsersPaginatedInput: { pagination: { first: 10 } } } }) params: FindManyUsersPaginatedInput ) { return await findManyCursorConnection( args => this.prismaService.user.findMany({ take: params.take, include: { entries: true, profile: true, _count: true }, skip: params.skip, distinct: params.distinct, where: params.where, orderBy: params.orderBy, ...args }), () => this.prismaService.user.count({ orderBy: params.orderBy, distinct: params.distinct, skip: params.skip, where: params.where, cursor: params.cursor }), { first: params.pagination.first ?? 10, last: params.pagination.last, before: params.pagination.before, after: params.pagination.after }, { getCursor: (record: { id: string }) => { return record; }, decodeCursor: (cursor: string) => fromGlobalId(cursor), encodeCursor: (cursor: { id: string }) => toGlobalId(User.name, cursor.id) } ); } @Query(() => User) async userByRelayId( @Args("cursor", { type: () => String }) cursor: string ): Promise<User> { return await this.userService.relayFindUniqueUser({ id: cursor }); } @UseGuards(AuthGuard) @Mutation(() => User) async changePassword( @Context() ctx: AppContext, @Args("changePasswordInput") changePasswordInput: ChangePasswordInput ) { return await this.userService .changePassword({ changePasswordInput: changePasswordInput, id: ctx.xAuth?.split(/([:])/)[0] ? ctx.xAuth.split(/([:])/)[0] : ctx.viewerId! }) .then(async data => { const changePW = await this.userService.changePassword({ changePasswordInput: { newPassword: <PASSWORD>PasswordInput.newPassword, oldPassword: changePasswordInput.oldPassword }, id: ctx as unknown as string }); if (changePW != null) { return changePW; } return changePW; }); } }
// NewRouter creates a router for an input channel and begins reading immediately func NewRouter() (*Router) { var channelMap = make(map[string]chan OrdinalValue) router := Router { channelMap: channelMap} return &router }
package player import ( "sync" "github.com/golang/protobuf/proto" ) //在线玩家管理器 type OnlinePlayerManager interface { GetPlayerById(playerId int64) *Player BroadcastMsg(msg proto.Message) PlayerEnterServer(p *Player) bool PlayerLeaveServer(p *Player) } type onlinePlayerManager struct { rwm sync.RWMutex //角色map playerMap map[int64]*Player } func (ops *onlinePlayerManager) GetPlayerById(playerId int64) *Player { ops.rwm.RLock() defer ops.rwm.RUnlock() p, ok := ops.playerMap[playerId] if !ok { return nil } return p } func (ops *onlinePlayerManager) BroadcastMsg(msg proto.Message) { ops.rwm.Lock() defer ops.rwm.Unlock() for _, p := range ops.playerMap { p.SendMsg(msg) } } func (ops *onlinePlayerManager) PlayerEnterServer(p *Player) bool { ops.rwm.Lock() defer ops.rwm.Unlock() _, ok := ops.playerMap[p.GetId()] if ok { return false } ops.playerMap[p.GetId()] = p return true } func (ops *onlinePlayerManager) PlayerLeaveServer(p *Player) { ops.rwm.Lock() defer ops.rwm.Unlock() playerId := p.GetId() if playerId != 0 { delete(ops.playerMap, playerId) } } var ( opm = &onlinePlayerManager{ playerMap: make(map[int64]*Player), } ) func GetOnlinePlayerManager() OnlinePlayerManager { return opm }
// SwitchToParentFrame focuses on the immediate parent frame of a frame selected // by Selection.Frame. After switching, all new and existing selections will refer // to the parent frame. All further Page methods will apply to this frame as well. // // This method is not supported by PhantomJS. Please use SwitchToRootFrame instead. func (p *Page) SwitchToParentFrame() error { if err := p.session.FrameParent(); err != nil { return fmt.Errorf("failed to switch to parent frame: %s", err) } return nil }
// Internal copy or move implementation func (storage *PublishedStorage) internalCopyOrMoveBlob(src, dst string, metadata azblob.Metadata, move bool) error { const leaseDuration = 30 dstBlobURL := storage.container.NewBlobURL(filepath.Join(storage.prefix, dst)) srcBlobURL := storage.container.NewBlobURL(filepath.Join(storage.prefix, src)) leaseResp, err := srcBlobURL.AcquireLease(context.Background(), "", leaseDuration, azblob.ModifiedAccessConditions{}) if err != nil || leaseResp.StatusCode() != http.StatusCreated { return fmt.Errorf("error acquiring lease on source blob %s", srcBlobURL) } defer srcBlobURL.BreakLease(context.Background(), azblob.LeaseBreakNaturally, azblob.ModifiedAccessConditions{}) srcBlobLeaseID := leaseResp.LeaseID() copyResp, err := dstBlobURL.StartCopyFromURL( context.Background(), srcBlobURL.URL(), metadata, azblob.ModifiedAccessConditions{}, azblob.BlobAccessConditions{}, azblob.DefaultAccessTier, nil) if err != nil { return fmt.Errorf("error copying %s -> %s in %s: %s", src, dst, storage, err) } copyStatus := copyResp.CopyStatus() for { if copyStatus == azblob.CopyStatusSuccess { if move { _, err = srcBlobURL.Delete( context.Background(), azblob.DeleteSnapshotsOptionNone, azblob.BlobAccessConditions{ LeaseAccessConditions: azblob.LeaseAccessConditions{LeaseID: srcBlobLeaseID}, }) return err } return nil } else if copyStatus == azblob.CopyStatusPending { time.Sleep(1 * time.Second) blobPropsResp, err := dstBlobURL.GetProperties( context.Background(), azblob.BlobAccessConditions{LeaseAccessConditions: azblob.LeaseAccessConditions{LeaseID: srcBlobLeaseID}}, azblob.ClientProvidedKeyOptions{}) if err != nil { return fmt.Errorf("error getting destination blob properties %s", dstBlobURL) } copyStatus = blobPropsResp.CopyStatus() _, err = srcBlobURL.RenewLease(context.Background(), srcBlobLeaseID, azblob.ModifiedAccessConditions{}) if err != nil { return fmt.Errorf("error renewing source blob lease %s", srcBlobURL) } } else { return fmt.Errorf("error copying %s -> %s in %s: %s", dst, src, storage, copyStatus) } } }
Guinness Book of World Records Judge Kimberly Partrick inspects and certifies the world's largest chocolate bar, weighing in at 12,190 pounds and measuring 21-feet long, 4-feet wide and 3-feet high on Tuesday, Sept. 13, 2011 in Chicago. UPI/Brian Kersey | License Photo Garry Hine, left, and Gary Wychocki attach a giant chocolate bar to a scale for verification. The bar, made by Chicago-based World's Finest Chocolate, a leader in chocolate fundraising products, will tour schools across the United States as part of a portion-control education campaign called 'Think Big. Eat Smart.' UPI/Brian Kersey | License Photo CHICAGO, Sept. 13 (UPI) -- A Chicago company said Tuesday it has created the world's largest chocolate bar at 12,000 pounds. The company, World's Finest Chocolate, said the 6-ton bar measures nearly 3 feet high and 21 feet long. The bar is to be unveiled before a World Guinness Record judge before beginning a cross country tour of schools. The tour is part of the company's "Think Big, Eat Smart" campaign, bringing to life the concept of "portion distortion," the company said. The company said the bar will provide 209,000 1-ounce servings -- enough servings for every fan in Chicago's Wrigley Field, U.S. Cellular Field, Soldier Field and United Center with 45,000 leftovers.
def _send_task(self, task, start_time, timeout): task_writer = self._task_channel[1] data_len = len(task.data) data = struct.pack('=I', data_len) + task.data if len(data) <= BUF_SIZE: error_no = try_write(task_writer, data)[1] if error_no: if error_no == errno.EAGAIN: logger.warning( 'The task channel is busy, ' 'please make sure your pipe capacity is at lease %d bytes.', BUF_SIZE ) else: logger.error('The task channel to worker %d is broken.', self._child_pid) return False else: send_timeout = timeout * 0.5 send_deadline = start_time + send_timeout wlist = (task_writer,) while True: _, writable_fds, _ = select_ignore_eintr((), wlist, (), 0.1) if writable_fds: data, error_no = try_write(task_writer, data) if error_no == 0: break if error_no != errno.EAGAIN: logger.error('The task channel to worker %d is broken.', self._child_pid) return False if time.time() > send_deadline: logger.error('Sending task to worker %d timeout.', self._child_pid) return False return True
Photo BEIJING — China announced a further sharp increase in military spending on Tuesday, a day after the United States renewed its warning that a lack of openness surrounding the rapid buildup of China’s armed forces posed a threat to stability in Asia. China’s military budget for 2008 will increase by 17.6 percent to 417.8 billion yuan, or about $58.8 billion, Jiang Enzhu, spokesman for the National People’s Congress, China’s legislature, said at a news conference. This follows a 17.8 percent increase in 2007. Military experts in the United States and elsewhere say Beijing’s real military spending is at least double the announced figure. But even if it was double, China’s yearly military budget would still be only about one-fourth the size of the Pentagon’s. Before the annual legislative session, which begins Wednesday, Mr. Jiang also said the situation in the Taiwan Strait was “grim and complex,” and called on the Taiwanese president, Chen Shui-bian, to halt what Beijing described as unacceptable moves toward independence. China considers the island of Taiwan a breakaway province. Advertisement Continue reading the main story China has increased annual defense outlays by double-digit percentages most years in the past two decades to pay for an array of modern weapons and better training and conditions for the 2.3 million people in its military, the world’s biggest standing force.
// New creates a new ring buffer pipeline element. func New(size int) *Buffer { return &Buffer{ ring: ring.New(size), } }
<reponame>pedro-tavares/iaf /* Copyright 2013 Nationale-Nederlanden Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.jdbc; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import nl.nn.adapterframework.core.IDataIterator; import nl.nn.adapterframework.core.SenderException; /** * Pipe that iterates of rows in in ResultSet. * * Each row is send passed to the sender in the same format a row is usually returned from a query. * * <p><b>Configuration </b><i>(where deviating from IteratingPipe)</i><b>:</b> * <table border="1"> * <tr><th>attributes</th><th>description</th><th>default</th></tr> * <tr><td>className</td><td>nl.nn.adapterframework.jdbc.JdbcIteratingPipeBase</td><td>&nbsp;</td></tr> * <tr><td>{@link #setQuery(String) query}</td><td>the SQL query text to be excecuted each time sendMessage() is called</td><td>&nbsp;</td></tr> * <tr><td>{@link #setLockRows(boolean) lockRows}</td><td>When set <code>true</code>, exclusive row-level locks are obtained on all the rows identified by the SELECT statement (by appending ' FOR UPDATE NOWAIT SKIP LOCKED' to the end of the query)</td><td>false</td></tr> * <tr><td>{@link #setLockWait(int) lockWait}</td><td>when set and >=0, ' FOR UPDATE WAIT #' is used instead of ' FOR UPDATE NOWAIT SKIP LOCKED'</td><td>-1</td></tr> * <tr><td>{@link #setDatasourceName(String) datasourceName}</td><td>can be configured from JmsRealm, too</td><td>&nbsp;</td></tr> * </table> * </p> * @author <NAME> * @since 4.7 */ public class ResultSetIteratingPipe extends JdbcIteratingPipeBase { @Override protected IDataIterator<String> getIterator(Connection conn, ResultSet rs) throws SenderException { try { return new ResultSetIterator(conn, rs); } catch (SQLException e) { throw new SenderException(e); } } }
Affinity modulation of photoresponsive hosts for fullerenes: light-gated corannulene tweezers. Six azobenzene derivatives bearing polyaromatic fragments have been prepared and their reversible photoisomerization has been assessed. Corannulene-functionalized molecules have demonstrated excellent switchable hosting abilities towards fullerenes in which an interesting range of affinities has been found. The success of this design relies upon the reversible formation and destruction of tweezer-like structures.
def edowham(alpha_p, e_eff): param_elasticity = alpha_p * e_eff return param_elasticity
/**************************************************************************** ePMC - an extensible probabilistic model checker Copyright (C) 2017 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. *****************************************************************************/ package epmc.jani.explorer; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import epmc.jani.model.Action; import epmc.jani.model.ModelJANI; import epmc.value.Type; import epmc.value.TypeObject; import epmc.value.Value; import epmc.value.ValueObject; import epmc.value.TypeObject.StorageType; public final class PropertyEdgeAction implements PropertyEdge { private final Map<Action,Integer> actionToNumber = new HashMap<>(); private final Action[] numberToAction; private final TypeObject type; private final ValueObject value; private int[] values = new int[1]; PropertyEdgeAction(ExplorerJANI explorer) { assert explorer != null; ModelJANI model = explorer.getModel(); numberToAction = new Action[model.getActionsOrEmpty().size() + 1]; int actionNumber = 0; actionToNumber.put(model.getSilentAction(), actionNumber); numberToAction[actionNumber] = model.getSilentAction(); actionNumber++; for (Action action : model.getActionsOrEmpty()) { actionToNumber.put(action, actionNumber); numberToAction[actionNumber] = action; actionNumber++; } type = new TypeObject.Builder() .setClazz(Action.class) .setStorageClass(StorageType.NUMERATED_IDENTITY) .build(); value = type.newValue(); } @Override public Value get(int successor) { Action action = numberToAction[values[successor]]; value.set(action); return value; } public int getInt(int successor) { return values[successor]; } @Override public Type getType() { return type; } public void set(int successor, Object value) { assert value instanceof Action : value + " " + value.getClass(); assert actionToNumber.containsKey(value); ensureSuccessorsSize(successor); int actionNumber = actionToNumber.get(value); values[successor] = actionNumber; } void set(int successor, int value) { ensureSuccessorsSize(successor); values[successor] = value; } private void ensureSuccessorsSize(int successor) { int numSuccessors = successor + 1; if (numSuccessors < values.length) { return; } int newLength = values.length; while (newLength <= numSuccessors) { newLength *= 2; } int[] newValues = Arrays.copyOf(values, newLength); values = newValues; } @Override public String toString() { return actionToNumber.toString(); } }
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. import torch from torch.nn import functional as F from maskrcnn_benchmark.layers import smooth_l1_loss from maskrcnn_benchmark.modeling.matcher import Matcher from maskrcnn_benchmark.structures.boxlist_ops import boxlist_iou from maskrcnn_benchmark.modeling.utils import cat from maskrcnn_benchmark.structures.bounding_box import BoxList from maskrcnn_benchmark.structures.segmentation_mask import SegmentationMask def project_masks_on_boxes(segmentation_masks, proposals, discretization_size): """ Given segmentation masks and the bounding boxes corresponding to the location of the masks in the image, this function crops and resizes the masks in the position defined by the boxes. This prepares the masks for them to be fed to the loss computation as the targets. Arguments: segmentation_masks: an instance of SegmentationMask proposals: an instance of BoxList """ masks = [] M = discretization_size device = proposals.bbox.device proposals = proposals.convert("xyxy") assert segmentation_masks.size == proposals.size, "{}, {}".format( segmentation_masks, proposals) # TODO put the proposals on the CPU, as the representation for the # masks is not efficient GPU-wise (possibly several small tensors for # representing a single instance mask) proposals = proposals.bbox.to(torch.device("cpu")) for segmentation_mask, proposal in zip(segmentation_masks, proposals): # crop the masks, resize them to the desired resolution and # then convert them to the tensor representation, # instead of the list representation that was used cropped_mask = segmentation_mask.crop(proposal) scaled_mask = cropped_mask.resize((M, M)) mask = scaled_mask.convert(mode="mask") masks.append(mask) if len(masks) == 0: return torch.empty(0, dtype=torch.float32, device=device) return torch.stack(masks, dim=0).to(device, dtype=torch.float32) def project_boxes_on_boxes(matched_bboxes, proposals, discretization_size): """ Given segmentation masks and the bounding boxes corresponding to the location of the masks in the image, this function crops and resizes the masks in the position defined by the boxes. This prepares the masks for them to be fed to the loss computation as the targets. Arguments: matched_bboxes: an instance of BoxList proposals: an instance of BoxList """ masks = [] M = discretization_size device = proposals.bbox.device proposals = proposals.convert("xyxy") original_size = proposals.size assert matched_bboxes.size == proposals.size, "{}, {}".format( matched_bboxes, proposals) # TODO put the proposals on the CPU, as the representation for the # masks is not efficient GPU-wise (possibly several small tensors for # representing a single instance mask) proposals = proposals.bbox.to(torch.device("cpu")) matched_bboxes = matched_bboxes.bbox.to(torch.device("cpu")) # Generate segmentation masks based on matched_bboxes polygons = [] for matched_bbox in matched_bboxes: x1, y1, x2, y2 = matched_bbox[0], matched_bbox[1], matched_bbox[ 2], matched_bbox[3] p = [[x1, y1, x1, y2, x2, y2, x2, y1]] polygons.append(p) segmentation_masks = SegmentationMask(polygons, original_size) for segmentation_mask, proposal in zip(segmentation_masks, proposals): # crop the masks, resize them to the desired resolution and # then convert them to the tensor representation, # instead of the list representation that was used cropped_mask = segmentation_mask.crop(proposal) scaled_mask = cropped_mask.resize((M, M)) mask = scaled_mask.convert(mode="mask") masks.append(mask) if len(masks) == 0: return torch.empty(0, dtype=torch.float32, device=device) return torch.stack(masks, dim=0).to(device, dtype=torch.float32) class MaskRCNNLossComputation(object): def __init__(self, proposal_matcher, discretization_size): """ Arguments: proposal_matcher (Matcher) discretization_size (int) """ self.proposal_matcher = proposal_matcher self.discretization_size = discretization_size mask_h = mask_w = self.discretization_size self.center_weight = torch.tensor([[0., 0., 0.], [0., 1., 0.], [0., 0., 0.]]) #, device=device) # TODO: modified this as one conv with 8 channels for efficiency self.pairwise_weights_list = [ torch.tensor([[0., 0., 0.], [1., 0., 0.], [0., 0., 0.]]), #, device=device), torch.tensor([[0., 0., 0.], [0., 0., 1.], [0., 0., 0.]]), #, device=device), torch.tensor([[0., 1., 0.], [0., 0., 0.], [0., 0., 0.]]), #, device=device), torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 1., 0.]]), #, device=device), torch.tensor([[1., 0., 0.], [0., 0., 0.], [0., 0., 0.]]), #, device=device), torch.tensor([[0., 0., 1.], [0., 0., 0.], [0., 0., 0.]]), #, device=device), torch.tensor([[0., 0., 0.], [0., 0., 0.], [1., 0., 0.]]), #, device=device), torch.tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 1.]]), #, device=device), ] def match_targets_to_proposals(self, proposal, target): match_quality_matrix = boxlist_iou(target, proposal) matched_idxs = self.proposal_matcher(match_quality_matrix) # Mask RCNN needs "labels" and "masks "fields for creating the targets target = target.copy_with_fields(["labels", "masks"]) # get the targets corresponding GT for each proposal # NB: need to clamp the indices because we can have a single # GT in the image, and matched_idxs can be -2, which goes # out of bounds matched_targets = target[matched_idxs.clamp(min=0)] matched_targets.add_field("matched_idxs", matched_idxs) return matched_targets def prepare_targets(self, proposals, targets): labels = [] masks = [] for proposals_per_image, targets_per_image in zip(proposals, targets): matched_targets = self.match_targets_to_proposals( proposals_per_image, targets_per_image) matched_idxs = matched_targets.get_field("matched_idxs") labels_per_image = matched_targets.get_field("labels") labels_per_image = labels_per_image.to(dtype=torch.int64) # this can probably be removed, but is left here for clarity # and completeness neg_inds = matched_idxs == Matcher.BELOW_LOW_THRESHOLD labels_per_image[neg_inds] = 0 # mask scores are only computed on positive samples positive_inds = torch.nonzero(labels_per_image > 0).squeeze(1) segmentation_masks = matched_targets.get_field("masks") segmentation_masks = segmentation_masks[positive_inds] positive_proposals = proposals_per_image[positive_inds] masks_per_image = project_masks_on_boxes(segmentation_masks, positive_proposals, self.discretization_size) labels.append(labels_per_image) masks.append(masks_per_image) return labels, masks # 2019.4.16 Add for negative sample part for MIL. def prepare_targets_labels(self, proposals, targets): # Sample both negative and positive proposals # Only with image labels (without mask) labels = [] for proposals_per_image, targets_per_image in zip(proposals, targets): matched_targets = self.match_targets_to_proposals( proposals_per_image, targets_per_image) matched_idxs = matched_targets.get_field("matched_idxs") labels_per_image = matched_targets.get_field("labels") labels_per_image = labels_per_image.to(dtype=torch.int64) # this can probably be removed, but is left here for clarity # and completeness neg_inds = matched_idxs == Matcher.BELOW_LOW_THRESHOLD labels_per_image[neg_inds] = 0 labels.append(labels_per_image) return labels # 2019.4.18 Add for per col/row MIL. def prepare_targets_cr(self, proposals, targets): # Sample both negative and positive proposals # Only with per col/row labels (without mask) labels = [] for proposals_per_image, targets_per_image in zip(proposals, targets): device = proposals_per_image.bbox.device matched_targets = self.match_targets_to_proposals( proposals_per_image, targets_per_image) matched_idxs = matched_targets.get_field("matched_idxs") labels_per_image = matched_targets.get_field("labels") labels_per_image = labels_per_image.to(dtype=torch.int64) # this can probably be removed, but is left here for clarity # and completeness neg_inds = matched_idxs == Matcher.BELOW_LOW_THRESHOLD labels_per_image[neg_inds] = 0 # mask scores are only computed on positive samples pos_inds = torch.nonzero(labels_per_image > 0).squeeze(1) # delete field "mask" new_matched_targets = matched_targets.copy_with_fields( ["matched_idxs", "labels"]) # generate bbox corresponding proposals pos_masks_per_image = project_boxes_on_boxes( new_matched_targets[pos_inds], proposals_per_image[pos_inds], self.discretization_size) # generate label per image # initialize as zeros, and thus all labels of negative sample is zeros M = self.discretization_size labels_per_image = torch.zeros( (len(proposals_per_image.bbox), M + M), device=device) # (n_proposal, 56) # generate label of positive sample pos_labels = [] for mask in pos_masks_per_image: label_col = [ torch.any(mask[col, :] > 0) for col in range(mask.size(0)) ] label_row = [ torch.any(mask[:, row] > 0) for row in range(mask.size(1)) ] label = torch.stack(label_col + label_row) pos_labels.append(label) pos_labels = torch.stack(pos_labels).float() labels_per_image[pos_inds] = pos_labels # save labels.append(labels_per_image) return labels def __call__(self, proposals, mask_logits, mil_score, targets): """ Arguments: proposals (list[BoxList]) mask_logits (Tensor) targets (list[BoxList]) Return: mask_loss (Tensor): scalar tensor containing the loss """ # MIL term # Stack mil score and mask_logits if len(mil_score.shape) > 2 or mask_logits.size(1) > 1: # multi-class class_labels, _ = self.prepare_targets(proposals, targets) class_labels = cat(class_labels, dim=0) if len(mil_score.shape) > 2: mil_score = [s[c] for s, c in zip(mil_score, class_labels)] mil_score = torch.stack(mil_score) if mask_logits.size(1) > 1: mask_logits = [m[c] for m, c in zip(mask_logits, class_labels)] mask_logits = torch.stack(mask_logits).unsqueeze(1) # Prepare target labels for mil loss of each col/row. labels = self.prepare_targets_cr( proposals, targets) # for both positive/negative samples labels = cat(labels, dim=0) # Compute MIL term for each col/row MIL mil_loss = F.binary_cross_entropy_with_logits(mil_score, labels) # Pairwise term device = mask_logits.device mask_h, mask_w = mask_logits.size(2), mask_logits.size(3) pairwise_loss = [] # Sigmoid transform to [0, 1] mask_logits_normalize = mask_logits.sigmoid() # Compute pairwise loss for each col/row MIL for w in self.pairwise_weights_list: conv = torch.nn.Conv2d(1, 1, 3, bias=False, padding=(1, 1)) weights = self.center_weight - w weights = weights.view(1, 1, 3, 3).to(device) conv.weight = torch.nn.Parameter(weights) for param in conv.parameters(): param.requires_grad = False aff_map = conv(mask_logits_normalize) cur_loss = (aff_map**2) cur_loss = torch.mean(cur_loss) pairwise_loss.append(cur_loss) pairwise_loss = torch.mean(torch.stack(pairwise_loss)) return 1.0 * mil_loss, 0.05 * pairwise_loss def make_roi_mask_loss_evaluator(cfg): matcher = Matcher( cfg.MODEL.ROI_HEADS.FG_IOU_THRESHOLD, cfg.MODEL.ROI_HEADS.BG_IOU_THRESHOLD, allow_low_quality_matches=False, ) loss_evaluator = MaskRCNNLossComputation( matcher, cfg.MODEL.ROI_MASK_HEAD.RESOLUTION) return loss_evaluator
/* Copyright (c) 2017-2018 <NAME> Software Co., Ltd. http://www.cocos.com Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated engine source code (the "Software"), a limited, worldwide, royalty-free, non-assignable, revocable and non-exclusive license to use Cocos Creator solely to develop games on your target platforms. You shall not use Cocos Creator software for developing other software or tools that's used for developing games. You are not granted to publish, distribute, sublicense, and/or sell copies of Cocos Creator. The software or tools in this License Agreement are licensed, not sold. Xiamen Yaji Software Co., Ltd. reserves all rights not expressly granted to you. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * @hidden */ import { DEBUG, JSB } from 'internal:constants'; import { NativeBufferPool, NativeObjectPool, NativeArrayPool } from './native-pools'; import { GFXRasterizerState, GFXDepthStencilState, GFXBlendState, IGFXDescriptorSetInfo, GFXDevice, GFXDescriptorSet, GFXShaderInfo, GFXShader, IGFXInputAssemblerInfo, GFXInputAssembler, IGFXPipelineLayoutInfo, GFXPipelineLayout, GFXFramebuffer, IGFXFramebufferInfo, GFXPrimitiveMode, GFXDynamicStateFlags, GFXClearFlag } from '../../gfx'; import { RenderPassStage } from '../../pipeline/define'; import { BatchingSchemes } from './pass'; import { Vec3, Mat4, Color, Rect, Quat, Vec4, Vec2 } from '../../math'; import { Layers } from '../../scene-graph/layers'; import { plane } from '../../geometry'; type Vec4Compatibles = Color | Rect | Quat | Vec4 | plane; interface ITypedArrayConstructor<T> { new(buffer: ArrayBufferLike, byteOffset: number, length?: number): T; readonly BYTES_PER_ELEMENT: number; } // a little hacky, but works (different specializations should not be assignable to each other) interface IHandle<T extends PoolType> extends Number { // we make this non-optional so that even plain numbers would not be directly assignable to handles. // this strictness will introduce some casting hassle in the pool implementation itself // but becomes generally more useful for client code type checking. _: T; } type BufferManifest = { [key: string]: number | string; COUNT: number; }; type StandardBufferElement = number | IHandle<any>; type GeneralBufferElement = StandardBufferElement | Vec3 | Mat4 | Vec4Compatibles | Vec2; type BufferTypeManifest<E extends BufferManifest> = { [key in E[keyof E]]: GeneralBufferElement }; type Conditional<V, T> = T extends V ? T : never; class BufferPool<P extends PoolType, T extends TypedArray, E extends BufferManifest, M extends BufferTypeManifest<E>> { // naming convension: // this._bufferViews[chunk][entry][element] private _viewCtor: ITypedArrayConstructor<T>; private _elementCount: number; private _entryBits: number; private _stride: number; private _entriesPerChunk: number; private _entryMask: number; private _chunkMask: number; private _poolFlag: number; private _arrayBuffers: ArrayBuffer[] = []; private _freelists: number[][] = []; private _bufferViews: T[][] = []; private _nativePool: NativeBufferPool; constructor (poolType: P, viewCtor: ITypedArrayConstructor<T>, enumType: E, entryBits = 8) { this._viewCtor = viewCtor; this._elementCount = enumType.COUNT; this._entryBits = entryBits; const bytesPerElement = viewCtor.BYTES_PER_ELEMENT || 1; this._stride = bytesPerElement * this._elementCount; this._entriesPerChunk = 1 << entryBits; this._entryMask = this._entriesPerChunk - 1; this._poolFlag = 1 << 30; this._chunkMask = ~(this._entryMask | this._poolFlag); this._nativePool = new NativeBufferPool(poolType, entryBits, this._stride); } public alloc (): IHandle<P> { let i = 0; for (; i < this._freelists.length; i++) { const list = this._freelists[i]; if (list.length) { const j = list[list.length - 1]; list.length--; return (i << this._entryBits) + j + this._poolFlag as unknown as IHandle<P>; } } // add a new chunk const buffer = this._nativePool.allocateNewChunk(); const bufferViews: T[] = []; const freelist: number[] = []; for (let j = 0; j < this._entriesPerChunk; j++) { bufferViews.push(new this._viewCtor(buffer, this._stride * j, this._elementCount)); if (j) { freelist.push(j); } } this._arrayBuffers.push(buffer); this._bufferViews.push(bufferViews); this._freelists.push(freelist); return (i << this._entryBits) + this._poolFlag as unknown as IHandle<P>; // guarantees the handle is always not zero } /** * Get the specified element out from buffer pool. * * Note the type inference does not work when `element` is not directly * an pre-declared enum value: (e.g. when doing arithmetic operations) * ```ts * SubModelPool.get(handle, SubModelView.SHADER_0 + passIndex); // the return value will have type GeneralBufferElement * ``` * * To properly declare the variable type, you have two options: * ```ts * const hShader = SubModelPool.get(handle, SubModelView.SHADER_0 + passIndex) as ShaderHandle; // option #1 * const hShader = SubModelPool.get<SubModelView.SHADER_0>(handle, SubModelView.SHADER_0 + passIndex); // option #2 * ``` */ public get<K extends E[keyof E]> (handle: IHandle<P>, element: K): Conditional<StandardBufferElement, M[K]> { const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._bufferViews.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return 0 as Conditional<StandardBufferElement, M[K]>; } return this._bufferViews[chunk][entry][element as number] as Conditional<StandardBufferElement, M[K]>; } public set<K extends E[keyof E]> (handle: IHandle<P>, element: K, value: Conditional<StandardBufferElement, M[K]>) { const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._bufferViews.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return; } this._bufferViews[chunk][entry][element as number] = value as number; } public setVec2<K extends E[keyof E]> (handle: IHandle<P>, element: K, vec2: Conditional<Vec2, M[K]>) { // Web engine has Vec2 property, don't record it in shared memory. if (!JSB) return; const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._bufferViews.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return; } let index = element as unknown as number; const view = this._bufferViews[chunk][entry]; view[index++] = vec2.x; view[index++] = vec2.y; } public setVec3<K extends E[keyof E]> (handle: IHandle<P>, element: K, vec3: Conditional<Vec3, M[K]>) { // Web engine has Vec3 property, don't record it in shared memory. if (!JSB) return; const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._bufferViews.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return; } let index = element as unknown as number; const view = this._bufferViews[chunk][entry]; view[index++] = vec3.x; view[index++] = vec3.y; view[index] = vec3.z; } public setVec4<K extends E[keyof E]> (handle: IHandle<P>, element: K, vec4: Conditional<Vec4Compatibles, M[K]>) { // Web engine has Vec4 property, don't record it in shared memory. if (!JSB) return; const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._bufferViews.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return; } let index = element as unknown as number; const view = this._bufferViews[chunk][entry]; view[index++] = vec4.x; view[index++] = vec4.y; view[index++] = vec4.z; view[index] = vec4.w; } public setMat4<K extends E[keyof E]> (handle: IHandle<P>, element: K, mat4: Conditional<Mat4, M[K]>) { // Web engine has mat4 property, don't record it in shared memory. if (!JSB) return; const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._bufferViews.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return; } let index = element as unknown as number; const view = this._bufferViews[chunk][entry]; view[index++] = mat4.m00; view[index++] = mat4.m01; view[index++] = mat4.m02; view[index++] = mat4.m03; view[index++] = mat4.m04; view[index++] = mat4.m05; view[index++] = mat4.m06; view[index++] = mat4.m07; view[index++] = mat4.m08; view[index++] = mat4.m09; view[index++] = mat4.m10; view[index++] = mat4.m11; view[index++] = mat4.m12; view[index++] = mat4.m13; view[index++] = mat4.m14; view[index] = mat4.m15; } public free (handle: IHandle<P>) { const chunk = (this._chunkMask & handle as unknown as number) >> this._entryBits; const entry = this._entryMask & handle as unknown as number; if (DEBUG && (!handle || chunk < 0 || chunk >= this._freelists.length || entry < 0 || entry >= this._entriesPerChunk || this._freelists[chunk].find((n) => n === entry))) { console.warn('invalid buffer pool handle'); return; } this._bufferViews[chunk][entry].fill(0); this._freelists[chunk].push(entry); } } class ObjectPool<T, P extends PoolType, A extends any[]> { private _ctor: (args: A, obj?: T) => T; private _dtor?: (obj: T) => void; private _indexMask: number; private _poolFlag: number; private _array: T[] = []; private _freelist: number[] = []; private _nativePool: NativeObjectPool<T>; constructor (poolType: P, ctor: (args: A, obj?: T) => T, dtor?: (obj: T) => void) { this._ctor = ctor; if (dtor) { this._dtor = dtor; } this._poolFlag = 1 << 29; this._indexMask = ~this._poolFlag; this._nativePool = new NativeObjectPool(poolType, this._array); } public alloc (...args: A): IHandle<P> { const freelist = this._freelist; let i = -1; if (freelist.length) { i = freelist[freelist.length - 1]; freelist.length--; this._array[i] = this._ctor(arguments as unknown as A, this._array[i]); } if (i < 0) { i = this._array.length; const obj = this._ctor(arguments as unknown as A); if (!obj) { return 0 as unknown as IHandle<P>; } this._array.push(obj); } return i + this._poolFlag as unknown as IHandle<P>; // guarantees the handle is always not zero } public get (handle: IHandle<P>) { const index = this._indexMask & handle as unknown as number; if (DEBUG && (!handle || index < 0 || index >= this._array.length || this._freelist.find((n) => n === index))) { console.warn('invalid object pool handle'); return null!; } return this._array[index]; } public free (handle: IHandle<P>) { const index = this._indexMask & handle as unknown as number; if (DEBUG && (!handle || index < 0 || index >= this._array.length || this._freelist.find((n) => n === index))) { console.warn('invalid object pool handle'); return; } if (this._dtor) { this._dtor(this._array[index]); } this._freelist.push(index); } } /** * P: pool type * D: pool data type */ export class ArrayPool<P extends PoolType, D extends PoolType> { private _nativeArrayPool: NativeArrayPool; private _arrayMap: Map<number, Uint32Array> = new Map<number, Uint32Array>(); private _curArrayHandle: number = 0; private _arrayHandleFlag: number; private _arrayHandleMask: number; private _size: number = 0; private _step: number = 0; /** * Constructor. * @param size The size of the array * @param step The step size to extend the array when exceeding the array size. * It is the same as size if it is not set. */ constructor (arrayType: P, size: number, step?: number) { this._arrayHandleFlag = 1 << 30; this._arrayHandleMask = ~this._arrayHandleFlag; this._size = size + 1; this._step = step || size; this._nativeArrayPool = new NativeArrayPool(arrayType, this._size); } /** * Allocate a new array. * @param size The size of the array * @param step The step size to extend the array when exceeding the array size. * It is the same as size if it is not set. */ public alloc (): IHandle<P> { const handle = this._curArrayHandle++; const array = this._nativeArrayPool.alloc(handle); this._arrayMap.set(handle, array); return (handle | this._arrayHandleFlag) as unknown as IHandle<P>; } public free (handle: IHandle<P>) { const arrayHandle = this._arrayHandleMask & handle as unknown as number; if (this._arrayMap.get(arrayHandle) === undefined) { if (DEBUG) console.warn('invalid array pool handle'); return; } this._arrayMap.delete(arrayHandle); } public assign (handle: IHandle<P>, index: number, value: IHandle<D>) { const arrayHandle = this._arrayHandleMask & handle as unknown as number; let array = this._arrayMap.get(arrayHandle); if (array === undefined) { if (DEBUG) console.warn('invalid array pool handle'); return; } // First element is the length of array. index = index + 1; if (index >= array.length) { let length = array.length; while (index >= length) { length += this._step; } array = this._nativeArrayPool.resize(array, length); this._arrayMap.set(arrayHandle, array); } array[index] = value as unknown as number; // There may be holes in the array. const len = array[0]; array[0] = index > len ? index : len; } public erase (handle: IHandle<P>, index: number) { const array = this._arrayMap.get(this._arrayHandleMask & handle as unknown as number); if (array === undefined || index > array[0]) { if (DEBUG) console.warn('invalid array pool index or invalid array handle'); return; } for (let i = index + 1; i < array[0]; ++i) { array[i] = array[i + 1]; } --array[0]; } public push (handle: IHandle<P>, value: IHandle<D>) { const array = this._arrayMap.get(this._arrayHandleMask & handle as unknown as number); if (array === undefined) { if (DEBUG) console.warn('invalid array pool handle'); return; } this.assign(handle, array[0], value); } public pop (handle: IHandle<P>) { const array = this._arrayMap.get(this._arrayHandleMask & handle as unknown as number); if (array === undefined) { if (DEBUG) console.warn('invalid array pool handle'); return; } if (array[0] === 0) { return; } else { --array[0]; } } /** * Clear the contents of array. * @param handle Handle to be clear. */ public clear (handle: IHandle<P>) { const array = this._arrayMap.get(this._arrayHandleMask & handle as unknown as number); if (array === undefined) { if (DEBUG) console.warn('invalid array pool handle'); return; } array[0] = 0; } }; enum PoolType { // objects RASTERIZER_STATE, DEPTH_STENCIL_STATE, BLEND_STATE, DESCRIPTOR_SETS, SHADER, INPUT_ASSEMBLER, PIPELINE_LAYOUT, FRAMEBUFFER, // buffers PASS = 100, SUB_MODEL, MODEL, SCENE, CAMERA, NODE, ROOT, AABB, RENDER_WINDOW, FRUSTUM, AMBIENT, FOG, SKYBOX, SHADOW, // array SUB_MODEL_ARRAY = 200, MODEL_ARRAY, } export const NULL_HANDLE = 0 as unknown as IHandle<any>; export type RasterizerStateHandle = IHandle<PoolType.RASTERIZER_STATE>; export type DepthStencilStateHandle = IHandle<PoolType.DEPTH_STENCIL_STATE>; export type BlendStateHandle = IHandle<PoolType.BLEND_STATE>; export type DescriptorSetHandle = IHandle<PoolType.DESCRIPTOR_SETS>; export type ShaderHandle = IHandle<PoolType.SHADER>; export type InputAssemblerHandle = IHandle<PoolType.INPUT_ASSEMBLER>; export type PipelineLayoutHandle = IHandle<PoolType.PIPELINE_LAYOUT>; export type FramebufferHandle = IHandle<PoolType.FRAMEBUFFER>; export type PassHandle = IHandle<PoolType.PASS>; export type SubModelHandle = IHandle<PoolType.SUB_MODEL>; export type ModelHandle = IHandle<PoolType.MODEL>; export type SceneHandle = IHandle<PoolType.SCENE>; export type CameraHandle = IHandle<PoolType.CAMERA>; export type NodeHandle = IHandle<PoolType.NODE>; export type RootHandle = IHandle<PoolType.ROOT>; export type AABBHandle = IHandle<PoolType.AABB>; export type FrustumHandle = IHandle<PoolType.FRUSTUM>; export type RenderWindowHandle = IHandle<PoolType.RENDER_WINDOW>; export type SubModelArrayHandle = IHandle<PoolType.SUB_MODEL_ARRAY>; export type ModelArrayHandle = IHandle<PoolType.MODEL_ARRAY>; export type AmbientHandle = IHandle<PoolType.AMBIENT>; export type FogHandle = IHandle<PoolType.FOG>; export type SkyboxHandle = IHandle<PoolType.SKYBOX>; export type ShadowsHandle = IHandle<PoolType.SHADOW>; // don't reuse any of these data-only structs, for GFX objects may directly reference them export const RasterizerStatePool = new ObjectPool(PoolType.RASTERIZER_STATE, () => new GFXRasterizerState()); export const DepthStencilStatePool = new ObjectPool(PoolType.DEPTH_STENCIL_STATE, () => new GFXDepthStencilState()); export const BlendStatePool = new ObjectPool(PoolType.BLEND_STATE, () => new GFXBlendState()); export const ShaderPool = new ObjectPool(PoolType.SHADER, (args: [GFXDevice, GFXShaderInfo], obj?: GFXShader) => obj ? (obj.initialize(args[1]), obj) : args[0].createShader(args[1]), (obj: GFXShader) => obj && obj.destroy(), ); export const DSPool = new ObjectPool(PoolType.DESCRIPTOR_SETS, (args: [GFXDevice, IGFXDescriptorSetInfo], obj?: GFXDescriptorSet) => obj ? (obj.initialize(args[1]), obj) : args[0].createDescriptorSet(args[1]), (obj: GFXDescriptorSet) => obj && obj.destroy(), ); export const IAPool = new ObjectPool(PoolType.INPUT_ASSEMBLER, (args: [GFXDevice, IGFXInputAssemblerInfo], obj?: GFXInputAssembler) => obj ? (obj.initialize(args[1]), obj) : args[0].createInputAssembler(args[1]), (obj: GFXInputAssembler) => obj && obj.destroy(), ); export const PipelineLayoutPool = new ObjectPool(PoolType.PIPELINE_LAYOUT, (args: [GFXDevice, IGFXPipelineLayoutInfo], obj?: GFXPipelineLayout) => obj ? (obj.initialize(args[1]), obj) : args[0].createPipelineLayout(args[1]), (obj: GFXPipelineLayout) => obj && obj.destroy(), ); export const FramebufferPool = new ObjectPool(PoolType.FRAMEBUFFER, (args: [GFXDevice, IGFXFramebufferInfo], obj?: GFXFramebuffer) => obj ? (obj.initialize(args[1]), obj) : args[0].createFramebuffer(args[1]), (obj: GFXFramebuffer) => obj && obj.destroy(), ); export const SubModelArrayPool = new ArrayPool<PoolType.SUB_MODEL_ARRAY, PoolType.SUB_MODEL>(PoolType.SUB_MODEL_ARRAY, 10); export const ModelArrayPool = new ArrayPool<PoolType.MODEL_ARRAY, PoolType.MODEL>(PoolType.MODEL_ARRAY, 50, 10); export enum PassView { PRIORITY, STAGE, PHASE, BATCHING_SCHEME, PRIMITIVE, DYNAMIC_STATES, HASH, RASTERIZER_STATE, // handle DEPTH_STENCIL_STATE, // handle BLEND_STATE, // handle DESCRIPTOR_SET, // handle PIPELINE_LAYOUT, // handle COUNT, } interface IPassViewType extends BufferTypeManifest<typeof PassView> { [PassView.PRIORITY]: number; [PassView.STAGE]: RenderPassStage; [PassView.PHASE]: number; [PassView.BATCHING_SCHEME]: BatchingSchemes; [PassView.PRIMITIVE]: GFXPrimitiveMode; [PassView.DYNAMIC_STATES]: GFXDynamicStateFlags; [PassView.HASH]: number; [PassView.RASTERIZER_STATE]: RasterizerStateHandle; [PassView.DEPTH_STENCIL_STATE]: DepthStencilStateHandle; [PassView.BLEND_STATE]: BlendStateHandle; [PassView.DESCRIPTOR_SET]: DescriptorSetHandle; [PassView.PIPELINE_LAYOUT]: PipelineLayoutHandle; [PassView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const PassPool = new BufferPool<PoolType.PASS, Uint32Array, typeof PassView, IPassViewType>(PoolType.PASS, Uint32Array, PassView); export enum SubModelView { PRIORITY, PASS_COUNT, PASS_0, // handle PASS_1, // handle PASS_2, // handle PASS_3, // handle SHADER_0, // handle SHADER_1, // handle SHADER_2, // handle SHADER_3, // handle DESCRIPTOR_SET, // handle INPUT_ASSEMBLER, // handle COUNT, } interface ISubModelViewType extends BufferTypeManifest<typeof SubModelView> { [SubModelView.PRIORITY]: number; [SubModelView.PASS_COUNT]: number; [SubModelView.PASS_0]: PassHandle; [SubModelView.PASS_1]: PassHandle; [SubModelView.PASS_2]: PassHandle; [SubModelView.PASS_3]: PassHandle; [SubModelView.SHADER_0]: ShaderHandle; [SubModelView.SHADER_1]: ShaderHandle; [SubModelView.SHADER_2]: ShaderHandle; [SubModelView.SHADER_3]: ShaderHandle; [SubModelView.DESCRIPTOR_SET]: DescriptorSetHandle; [SubModelView.INPUT_ASSEMBLER]: InputAssemblerHandle; [SubModelView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const SubModelPool = new BufferPool<PoolType.SUB_MODEL, Uint32Array, typeof SubModelView, ISubModelViewType> (PoolType.SUB_MODEL, Uint32Array, SubModelView); export enum ModelView { ENABLED, VIS_FLAGS, CAST_SHADOW, WORLD_BOUNDS, // handle NODE, // handle TRANSFORM, // handle SUB_MODEL_ARRAY, // array handle COUNT } interface IModelViewType extends BufferTypeManifest<typeof ModelView> { [ModelView.ENABLED]: number; [ModelView.VIS_FLAGS]: number; [ModelView.CAST_SHADOW]: number; [ModelView.WORLD_BOUNDS]: AABBHandle; [ModelView.NODE]: NodeHandle; [ModelView.TRANSFORM]: NodeHandle; [ModelView.SUB_MODEL_ARRAY]: SubModelArrayHandle; [ModelView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const ModelPool = new BufferPool<PoolType.MODEL, Uint32Array, typeof ModelView, IModelViewType>(PoolType.MODEL, Uint32Array, ModelView); export enum AABBView { CENTER, // Vec3 HALF_EXTENSION = 3, // Vec3 COUNT = 6 } interface IAABBViewType extends BufferTypeManifest<typeof AABBView> { [AABBView.CENTER]: Vec3; [AABBView.HALF_EXTENSION]: Vec3; [AABBView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const AABBPool = new BufferPool<PoolType.AABB, Float32Array, typeof AABBView, IAABBViewType>(PoolType.AABB, Float32Array, AABBView); export enum SceneView { MAIN_LIGHT, // TODO AMBIENT, // TODO FOG, // TODO SKYBOX, // TODO PLANAR_SHADOW, // TODO MODEL_ARRAY, // array handle COUNT, } interface ISceneViewType extends BufferTypeManifest<typeof SceneView> { [SceneView.MAIN_LIGHT]: number; [SceneView.AMBIENT]: number; [SceneView.FOG]: number; [SceneView.SKYBOX]: number; [SceneView.PLANAR_SHADOW]: number; [SceneView.MODEL_ARRAY]: ModelArrayHandle; [SceneView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const ScenePool = new BufferPool<PoolType.SCENE, Uint32Array, typeof SceneView, ISceneViewType>(PoolType.SCENE, Uint32Array, SceneView); export enum CameraView { WIDTH, HEIGHT, EXPOSURE, CLEAR_FLAG, CLEAR_DEPTH, CLEAR_STENCIL, NODE, // handle SCENE, // handle FRUSTUM, // handle FORWARD, // Vec3 POSITION = 12, // Vec3 VIEW_PORT = 15, // Rect CLEAR_COLOR = 19, // Color MAT_VIEW = 23, // Mat4 MAT_VIEW_PROJ = 39, // Mat4 MAT_VIEW_PROJ_INV = 55, // Mat4 MAT_PROJ = 71, // Mat4 MAT_PROJ_INV = 87, // Mat4 COUNT = 103 } interface ICameraViewType extends BufferTypeManifest<typeof CameraView> { [CameraView.WIDTH]: number; [CameraView.HEIGHT]: number; [CameraView.EXPOSURE]: number; [CameraView.CLEAR_FLAG]: GFXClearFlag; [CameraView.CLEAR_DEPTH]: number; [CameraView.CLEAR_STENCIL]: number; [CameraView.NODE]: NodeHandle; [CameraView.SCENE]: SceneHandle; [CameraView.FRUSTUM]: FrustumHandle; [CameraView.FORWARD]: Vec3; [CameraView.POSITION]: Vec3; [CameraView.VIEW_PORT]: Rect; [CameraView.CLEAR_COLOR]: Color; [CameraView.MAT_VIEW]: Mat4; [CameraView.MAT_VIEW_PROJ]: Mat4; [CameraView.MAT_VIEW_PROJ_INV]: Mat4; [CameraView.MAT_PROJ]: Mat4; [CameraView.MAT_PROJ_INV]: Mat4; [CameraView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const CameraPool = new BufferPool<PoolType.CAMERA, Float32Array, typeof CameraView, ICameraViewType>(PoolType.CAMERA, Float32Array, CameraView); export enum NodeView { LAYER, WORLD_SCALE, // Vec3 WORLD_POSITION = 4, // Vec3 WORLD_ROTATION = 7, // Quat WORLD_MATRIX = 11, // Mat4 COUNT = 27 } interface INodeViewType extends BufferTypeManifest<typeof NodeView> { [NodeView.LAYER]: Layers.Enum; [NodeView.WORLD_SCALE]: Vec3; [NodeView.WORLD_POSITION]: Vec3; [NodeView.WORLD_ROTATION]: Quat; [NodeView.WORLD_MATRIX]: Mat4; [NodeView.COUNT]: never; } // @ts-ignore Don't alloc memory for Vec3, Quat, Mat4 on web, as they are accessed by class member variable. if (!JSB) { delete NodeView[NodeView.COUNT]; NodeView[NodeView.COUNT = NodeView.LAYER + 1] = 'COUNT'; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const NodePool = new BufferPool<PoolType.NODE, Float32Array, typeof NodeView, INodeViewType>(PoolType.NODE, Float32Array, NodeView); export enum RootView { CUMULATIVE_TIME, FRAME_TIME, COUNT } interface IRootViewType extends BufferTypeManifest<typeof RootView> { [RootView.CUMULATIVE_TIME]: number; [RootView.FRAME_TIME]: number; [RootView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const RootPool = new BufferPool<PoolType.ROOT, Float32Array, typeof RootView, IRootViewType>(PoolType.ROOT, Float32Array, RootView, 1); export enum RenderWindowView { HAS_ON_SCREEN_ATTACHMENTS, HAS_OFF_SCREEN_ATTACHMENTS, FRAMEBUFFER, // handle COUNT } interface IRenderWindowViewType extends BufferTypeManifest<typeof RenderWindowView> { [RenderWindowView.HAS_ON_SCREEN_ATTACHMENTS]: number; [RenderWindowView.HAS_OFF_SCREEN_ATTACHMENTS]: number; [RenderWindowView.FRAMEBUFFER]: FramebufferHandle; [RenderWindowView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const RenderWindowPool = new BufferPool<PoolType.RENDER_WINDOW, Uint32Array, typeof RenderWindowView, IRenderWindowViewType> (PoolType.RENDER_WINDOW, Uint32Array, RenderWindowView, 2); export enum FrustumView { VERTICES, // Vec3[8] PLANES = 24, // plane[6] COUNT = 48 } interface IFrustumViewType extends BufferTypeManifest<typeof FrustumView> { [FrustumView.VERTICES]: Vec3; [FrustumView.PLANES]: plane; [FrustumView.COUNT]: never; } // Theoretically we only have to declare the type view here while all the other arguments can be inferred. // but before the official support of Partial Type Argument Inference releases, (microsoft/TypeScript#26349) // we'll have to explicitly declare all these types. export const FrustumPool = new BufferPool<PoolType.FRUSTUM, Float32Array, typeof FrustumView, IFrustumViewType>(PoolType.FRUSTUM, Float32Array, FrustumView); export enum AmbientView { ENABLE, ILLUM, SKY_COLOR, // vec4 GROUND_ALBEDO = 6, // vec4 COUNT = 10 } interface IAmbientViewType extends BufferTypeManifest<typeof AmbientView> { [AmbientView.ENABLE]: number; [AmbientView.ILLUM]: number; [AmbientView.SKY_COLOR]: Color; [AmbientView.GROUND_ALBEDO]: Color; [AmbientView.COUNT]: never; } // @ts-ignore Don't alloc memory for Vec3, Quat, Mat4 on web, as they are accessed by class member variable. if (!JSB) {delete AmbientView[AmbientView.COUNT]; AmbientView[AmbientView.COUNT = AmbientView.ILLUM + 1] = 'COUNT'; } export const AmbientPool = new BufferPool<PoolType.AMBIENT, Float32Array, typeof AmbientView, IAmbientViewType>(PoolType.AMBIENT, Float32Array, AmbientView, 1); export enum SkyboxView { ENABLE, IS_RGBE, USE_IBL, MODEL, COUNT } interface ISkyboxViewType extends BufferTypeManifest<typeof SkyboxView> { [SkyboxView.ENABLE]: number; [SkyboxView.IS_RGBE]: number; [SkyboxView.USE_IBL]: number; [SkyboxView.MODEL]: ModelHandle; [SkyboxView.COUNT]: never; } export const SkyboxPool = new BufferPool<PoolType.SKYBOX, Float32Array, typeof SkyboxView, ISkyboxViewType>(PoolType.SKYBOX, Float32Array, SkyboxView, 1); export enum FogView { ENABLE, TYPE, DENSITY, START, END, ATTEN, TOP, RANGE, COLOR, COUNT = 12 } interface IFogViewType extends BufferTypeManifest<typeof FogView> { [FogView.ENABLE]: number; [FogView.TYPE]: number; [FogView.DENSITY]: number; [FogView.START]: number; [FogView.END]: number; [FogView.ATTEN]: number; [FogView.TOP]: number; [FogView.RANGE]: number; [FogView.COLOR]: Color; [FogView.COUNT]: never; } // @ts-ignore Don't alloc memory for Vec3, Quat, Mat4 on web, as they are accessed by class member variable. if (!JSB) {delete FogView[FogView.COUNT]; FogView[FogView.COUNT = FogView.RANGE + 1] = 'COUNT'; } export const FogPool = new BufferPool<PoolType.FOG, Float32Array, typeof FogView, IFogViewType>(PoolType.FOG, Float32Array, FogView); export enum ShadowsView { ENABLE, DIRTY, TYPE, DISTANCE, INSTANCE_PASS, PLANAR_PASS, NEAR, FAR, ASPECT, PCF_TYPE, ORTHO_SIZE, SIZE, // Vec2 NORMAL = 13, // Vec3 COLOR = 16, // Vec4 SPHERE = 20, // Vec4 COUNT = 24 } interface IShadowsViewType extends BufferTypeManifest<typeof ShadowsView> { [ShadowsView.ENABLE]: number; [ShadowsView.TYPE]: number; [ShadowsView.DISTANCE]: number; [ShadowsView.INSTANCE_PASS]: PassHandle; [ShadowsView.PLANAR_PASS]: PassHandle; [ShadowsView.NEAR]: number; [ShadowsView.FAR]: number; [ShadowsView.ASPECT]: number; [ShadowsView.PCF_TYPE]: number; [ShadowsView.DIRTY]: number; [ShadowsView.ORTHO_SIZE]: number; [ShadowsView.SIZE]: Vec2; [ShadowsView.NORMAL]: Vec3; [ShadowsView.COLOR]: Color; [ShadowsView.SPHERE]: Vec4; [ShadowsView.COUNT]: never; } // @ts-ignore Don't alloc memory for Vec3, Quat, Mat4 on web, as they are accessed by class member variable. if (!JSB) {delete ShadowsView[FogView.COUNT]; ShadowsView[ShadowsView.COUNT = ShadowsView.ORTHO_SIZE + 1] = 'COUNT'; } export const ShadowsPool = new BufferPool<PoolType.SHADOW, Float32Array, typeof ShadowsView, IShadowsViewType>(PoolType.SHADOW, Float32Array, ShadowsView, 1);
package org.dxworks.jiraminer.configuration; public class JiraMinerConfigValidation { public static void notNull(Object o, String message, Object... values) { if (o == null) throw new InvalidConfigurationException(String.format(message, values), new NullPointerException()); } }
/** * Based on <a href="http://schmidt.devlib.org/jiu/">Java Image Util</a>. * <p> * Note that the filter method is not thread-safe. * </p> * * @author Morten Nobel-Joergensen * @author Heinz Doerr */ public class ResampleOp extends AdvancedResizeOp { private static final int MAX_CHANNEL_VALUE = 255; private int nrChannels; private int srcWidth; private int srcHeight; private int dstWidth; private int dstHeight; static class SubSamplingData { // individual - per row or per column - nr of contributions private final int[] arrN; // 2Dim: [wid or hei][contrib] private final int[] arrPixel; // 2Dim: [wid or hei][contrib] private final float[] arrWeight; // the primary index length for the 2Dim arrays : arrPixel and arrWeight private final int numContributors; private SubSamplingData( int[] arrN, int[] arrPixel, float[] arrWeight, int numContributors ) { this.arrN = arrN; this.arrPixel = arrPixel; this.arrWeight = arrWeight; this.numContributors = numContributors; } public int getNumContributors() { return numContributors; } public int[] getArrN() { return arrN; } public float[] getArrWeight() { return arrWeight; } } private SubSamplingData horizontalSubsamplingData; private SubSamplingData verticalSubsamplingData; private final int threadCount = getRuntime().availableProcessors(); private final AtomicInteger multipleInvocationLock = new AtomicInteger(); private final ResampleFilter mFilter; public ResampleOp( final ResampleFilter filter, final int destWidth, final int destHeight ) { this( filter, createAbsolutionDimension( destWidth, destHeight ) ); } public ResampleOp( final ResampleFilter filter, ConstrainedDimension dimensionConstrain ) { super( dimensionConstrain ); mFilter = filter; } public BufferedImage doFilter( BufferedImage srcImg, BufferedImage dest, int dstWidth, int dstHeight ) { this.dstWidth = dstWidth; this.dstHeight = dstHeight; if( dstWidth < 3 || dstHeight < 3 ) { throw new IllegalArgumentException( "Target must be at least 3x3." ); } assert multipleInvocationLock.incrementAndGet() == 1 : "Multiple concurrent invocations detected"; final var srcType = srcImg.getType(); if( srcType == TYPE_BYTE_BINARY || srcType == TYPE_BYTE_INDEXED || srcType == TYPE_CUSTOM ) { srcImg = ImageUtils.convert( srcImg, srcImg.getColorModel().hasAlpha() ? TYPE_4BYTE_ABGR : TYPE_3BYTE_BGR ); } this.nrChannels = ImageUtils.nrChannels( srcImg ); assert nrChannels > 0; this.srcWidth = srcImg.getWidth(); this.srcHeight = srcImg.getHeight(); byte[][] workPixels = new byte[ srcHeight ][ dstWidth * nrChannels ]; // Pre-calculate sub-sampling horizontalSubsamplingData = createSubSampling( mFilter, srcWidth, dstWidth ); verticalSubsamplingData = createSubSampling( mFilter, srcHeight, dstHeight ); final BufferedImage scrImgCopy = srcImg; final byte[][] workPixelsCopy = workPixels; final Thread[] threads = new Thread[ threadCount - 1 ]; for( int i = 1; i < threadCount; i++ ) { final int finalI = i; threads[ i - 1 ] = new Thread( () -> horizontallyFromSrcToWork( scrImgCopy, workPixelsCopy, finalI, threadCount ) ); threads[ i - 1 ].start(); } horizontallyFromSrcToWork( scrImgCopy, workPixelsCopy, 0, threadCount ); waitForAllThreads( threads ); byte[] outPixels = new byte[ dstWidth * dstHeight * nrChannels ]; // -------------------------------------------------- // Apply filter to sample vertically from Work to Dst // -------------------------------------------------- final byte[] outPixelsCopy = outPixels; for( int i = 1; i < threadCount; i++ ) { final int finalI = i; threads[ i - 1 ] = new Thread( () -> verticalFromWorkToDst( workPixelsCopy, outPixelsCopy, finalI, threadCount ) ); threads[ i - 1 ].start(); } verticalFromWorkToDst( workPixelsCopy, outPixelsCopy, 0, threadCount ); waitForAllThreads( threads ); //noinspection UnusedAssignment workPixels = null; // free memory final BufferedImage out; if( dest != null && dstWidth == dest.getWidth() && dstHeight == dest.getHeight() ) { out = dest; int nrDestChannels = ImageUtils.nrChannels( dest ); if( nrDestChannels != nrChannels ) { final var errorMgs = format( "Destination image must be compatible width source image. Source " + "image had %d channels destination image had %d channels", nrChannels, nrDestChannels ); throw new RuntimeException( errorMgs ); } } else { out = new BufferedImage( dstWidth, dstHeight, getResultBufferedImageType( srcImg ) ); } ImageUtils.setBGRPixels( outPixels, out, 0, 0, dstWidth, dstHeight ); assert multipleInvocationLock.decrementAndGet() == 0 : "Multiple " + "concurrent invocations detected"; return out; } private void waitForAllThreads( final Thread[] threads ) { try { for( final Thread thread : threads ) { thread.join( Long.MAX_VALUE ); } } catch( final InterruptedException e ) { currentThread().interrupt(); throw new RuntimeException( e ); } } static SubSamplingData createSubSampling( ResampleFilter filter, int srcSize, int dstSize ) { final float scale = (float) dstSize / (float) srcSize; final int[] arrN = new int[ dstSize ]; final int numContributors; final float[] arrWeight; final int[] arrPixel; final float fwidth = filter.getSamplingRadius(); float centerOffset = 0.5f / scale; if( scale < 1.0f ) { final float width = fwidth / scale; // Add 2 to be safe with the ceiling numContributors = (int) (width * 2.0f + 2); arrWeight = new float[ dstSize * numContributors ]; arrPixel = new int[ dstSize * numContributors ]; final float fNormFac = (float) (1f / (Math.ceil( width ) / fwidth)); for( int i = 0; i < dstSize; i++ ) { final int subindex = i * numContributors; float center = i / scale + centerOffset; int left = (int) Math.floor( center - width ); int right = (int) Math.ceil( center + width ); for( int j = left; j <= right; j++ ) { float weight; weight = filter.apply( (center - j) * fNormFac ); if( weight == 0.0f ) { continue; } int n; if( j < 0 ) { n = -j; } else if( j >= srcSize ) { n = srcSize - j + srcSize - 1; } else { n = j; } int k = arrN[ i ]; //assert k == j-left:String.format("%s = %s %s", k,j,left); arrN[ i ]++; if( n < 0 || n >= srcSize ) { weight = 0.0f;// Flag that cell should not be used } arrPixel[ subindex + k ] = n; arrWeight[ subindex + k ] = weight; } // normalize the filter's weight's so the sum equals to 1.0, very // important for avoiding box type of artifacts final int max = arrN[ i ]; float tot = 0; for( int k = 0; k < max; k++ ) { tot += arrWeight[ subindex + k ]; } if( tot != 0f ) { // 0 should never happen except bug in filter for( int k = 0; k < max; k++ ) { arrWeight[ subindex + k ] /= tot; } } } } else { // super-sampling // Scales from smaller to bigger height numContributors = (int) (fwidth * 2.0f + 1); arrWeight = new float[ dstSize * numContributors ]; arrPixel = new int[ dstSize * numContributors ]; // for( int i = 0; i < dstSize; i++ ) { final int subindex = i * numContributors; float center = i / scale + centerOffset; int left = (int) Math.floor( center - fwidth ); int right = (int) Math.ceil( center + fwidth ); for( int j = left; j <= right; j++ ) { float weight = filter.apply( center - j ); if( weight == 0.0f ) { continue; } int n; if( j < 0 ) { n = -j; } else if( j >= srcSize ) { n = srcSize - j + srcSize - 1; } else { n = j; } int k = arrN[ i ]; arrN[ i ]++; if( n < 0 || n >= srcSize ) { weight = 0.0f;// Flag that cell should not be used } arrPixel[ subindex + k ] = n; arrWeight[ subindex + k ] = weight; } // normalize the filter's weight's so the sum equals to 1.0, very // important for avoiding box type of artifacts final int max = arrN[ i ]; float tot = 0; for( int k = 0; k < max; k++ ) { tot += arrWeight[ subindex + k ]; } assert tot != 0 : "should never happen except bug in filter"; if( tot != 0f ) { for( int k = 0; k < max; k++ ) { arrWeight[ subindex + k ] /= tot; } } } } return new SubSamplingData( arrN, arrPixel, arrWeight, numContributors ); } private void verticalFromWorkToDst( byte[][] workPixels, byte[] outPixels, int start, int delta ) { if( nrChannels == 1 ) { verticalFromWorkToDstGray( workPixels, outPixels, start, threadCount ); return; } boolean useChannel3 = nrChannels > 3; for( int x = start; x < dstWidth; x += delta ) { final int xLocation = x * nrChannels; for( int y = dstHeight - 1; y >= 0; y-- ) { final int yTimesNumContributors = y * verticalSubsamplingData.numContributors; final int max = verticalSubsamplingData.arrN[ y ]; final int sampleLocation = (y * dstWidth + x) * nrChannels; float sample0 = 0.0f; float sample1 = 0.0f; float sample2 = 0.0f; float sample3 = 0.0f; int index = yTimesNumContributors; for( int j = max - 1; j >= 0; j-- ) { int valueLocation = verticalSubsamplingData.arrPixel[ index ]; float arrWeight = verticalSubsamplingData.arrWeight[ index ]; sample0 += (workPixels[ valueLocation ][ xLocation ] & 0xff) * arrWeight; sample1 += (workPixels[ valueLocation ][ xLocation + 1 ] & 0xff) * arrWeight; sample2 += (workPixels[ valueLocation ][ xLocation + 2 ] & 0xff) * arrWeight; if( useChannel3 ) { sample3 += (workPixels[ valueLocation ][ xLocation + 3 ] & 0xff) * arrWeight; } index++; } outPixels[ sampleLocation ] = toByte( sample0 ); outPixels[ sampleLocation + 1 ] = toByte( sample1 ); outPixels[ sampleLocation + 2 ] = toByte( sample2 ); if( useChannel3 ) { outPixels[ sampleLocation + 3 ] = toByte( sample3 ); } } } } private void verticalFromWorkToDstGray( byte[][] workPixels, byte[] outPixels, int start, int delta ) { for( int x = start; x < dstWidth; x += delta ) { for( int y = dstHeight - 1; y >= 0; y-- ) { final int yTimesNumContributors = y * verticalSubsamplingData.numContributors; final int max = verticalSubsamplingData.arrN[ y ]; final int sampleLocation = y * dstWidth + x; float sample0 = 0.0f; int index = yTimesNumContributors; for( int j = max - 1; j >= 0; j-- ) { int valueLocation = verticalSubsamplingData.arrPixel[ index ]; float arrWeight = verticalSubsamplingData.arrWeight[ index ]; sample0 += (workPixels[ valueLocation ][ x ] & 0xff) * arrWeight; index++; } outPixels[ sampleLocation ] = toByte( sample0 ); } } } /** * Apply filter to sample horizontally from Src to Work */ private void horizontallyFromSrcToWork( BufferedImage srcImg, byte[][] workPixels, int start, int delta ) { if( nrChannels == 1 ) { horizontallyFromSrcToWorkGray( srcImg, workPixels, start, delta ); return; } // Used if we work on int based bitmaps, later used to keep channel values final int[] tempPixels = new int[ srcWidth ]; // create reusable row to minimize memory overhead final byte[] srcPixels = new byte[ srcWidth * nrChannels ]; final boolean useChannel3 = nrChannels > 3; for( int k = start; k < srcHeight; k = k + delta ) { ImageUtils.getPixelsBGR( srcImg, k, srcWidth, srcPixels, tempPixels ); for( int i = dstWidth - 1; i >= 0; i-- ) { int sampleLocation = i * nrChannels; final int max = horizontalSubsamplingData.arrN[ i ]; float sample0 = 0.0f; float sample1 = 0.0f; float sample2 = 0.0f; float sample3 = 0.0f; int index = i * horizontalSubsamplingData.numContributors; for( int j = max - 1; j >= 0; j-- ) { float arrWeight = horizontalSubsamplingData.arrWeight[ index ]; int pixelIndex = horizontalSubsamplingData.arrPixel[ index ] * nrChannels; sample0 += (srcPixels[ pixelIndex ] & 0xff) * arrWeight; sample1 += (srcPixels[ pixelIndex + 1 ] & 0xff) * arrWeight; sample2 += (srcPixels[ pixelIndex + 2 ] & 0xff) * arrWeight; if( useChannel3 ) { sample3 += (srcPixels[ pixelIndex + 3 ] & 0xff) * arrWeight; } index++; } workPixels[ k ][ sampleLocation ] = toByte( sample0 ); workPixels[ k ][ sampleLocation + 1 ] = toByte( sample1 ); workPixels[ k ][ sampleLocation + 2 ] = toByte( sample2 ); if( useChannel3 ) { workPixels[ k ][ sampleLocation + 3 ] = toByte( sample3 ); } } } } /** * Apply filter to sample horizontally from Src to Work */ private void horizontallyFromSrcToWorkGray( BufferedImage srcImg, byte[][] workPixels, int start, int delta ) { // Used if we work on int based bitmaps, later used to keep channel values final int[] tempPixels = new int[ srcWidth ]; // create reusable row to minimize memory overhead final byte[] srcPixels = new byte[ srcWidth ]; for( int k = start; k < srcHeight; k = k + delta ) { ImageUtils.getPixelsBGR( srcImg, k, srcWidth, srcPixels, tempPixels ); for( int i = dstWidth - 1; i >= 0; i-- ) { final int max = horizontalSubsamplingData.arrN[ i ]; float sample0 = 0.0f; int index = i * horizontalSubsamplingData.numContributors; for( int j = max - 1; j >= 0; j-- ) { float arrWeight = horizontalSubsamplingData.arrWeight[ index ]; int pixelIndex = horizontalSubsamplingData.arrPixel[ index ]; sample0 += (srcPixels[ pixelIndex ] & 0xff) * arrWeight; index++; } workPixels[ k ][ i ] = toByte( sample0 ); } } } private static byte toByte( final float f ) { if( f < 0 ) { return 0; } return (byte) (f > MAX_CHANNEL_VALUE ? MAX_CHANNEL_VALUE : f + 0.5f); } protected int getResultBufferedImageType( BufferedImage srcImg ) { return nrChannels == 3 ? TYPE_3BYTE_BGR : nrChannels == 4 ? TYPE_4BYTE_ABGR : srcImg.getSampleModel().getDataType() == TYPE_USHORT ? TYPE_USHORT_GRAY : TYPE_BYTE_GRAY; } }
A covalent organic framework bearing thioether pendant arms for selective detection and recovery of Au from ultra-low concentration aqueous solution. A fluorescent covalent organic framework (COF), featuring precise distribution of thioether pendant arms inside the cavity, was designed. The thioether-functionalized COF exhibits selective sensing and capture of Au ions at ultra-trace levels in water with high sensitivity, selectivity and adsorption capacity, which makes it an excellent candidate for selective detection and recovery of Au.
package com.github.teocci.udpsimglethread.listeners; import com.github.teocci.udpsimglethread.model.DeviceInfo; /** * Created by teocci. * * @author <EMAIL> on 2017/Mar/31 */ public interface ContactUpdateReceiver { void onDeviceUpdate(final DeviceInfo[] deviceInfo); void onDeviceRegistered(final String unique, boolean registered); }
<gh_stars>0 package controller import ( "time" "go.charczuk.com/project/kana-server/pkg/types" ) // CreateHomeViewModel creates a home view model. func CreateHomeViewModel(all []*types.Quiz) (hvm HomeViewModel) { hvm.TotalQuizzes = len(all) for _, q := range all { hvm.TotalQuizResults += len(q.Results) for _, qr := range q.Results { if qr.Correct() { hvm.TotalQuizResultsCorrect++ } hvm.TotalQuizDuration += qr.Elapsed() } } hvm.Quizzes = all return } // HomeViewModel is the viewmodel for the home page. type HomeViewModel struct { TotalQuizzes int TotalQuizResults int TotalQuizResultsCorrect int TotalQuizDuration time.Duration Quizzes []*types.Quiz } // TotalQuizCorrectPct returns the percentage of results correct. func (hvm HomeViewModel) TotalQuizCorrectPct() float64 { if hvm.TotalQuizResults == 0 { return 0 } return (float64(hvm.TotalQuizResultsCorrect) / float64(hvm.TotalQuizResults)) * 100.0 }
/** * Default set of commands. */ public class DefaultCommands implements Supplier<List<Command>> { private final List<Command> commands; @Inject public DefaultCommands(Injector injector) { this.commands = ImmutableList.<Command>builder() .add(injector.getInstance(VersionCommand.class)) .add(injector.getInstance(ExitCommand.class)) .add(injector.getInstance(CallProcedureCommand.class)) .add(injector.getInstance(ConnectCommand.class)) .add(injector.getInstance(CreateDatasetInstanceCommand.class)) .add(injector.getInstance(CreateStreamCommand.class)) .add(injector.getInstance(DeleteAppCommand.class)) .add(injector.getInstance(DeleteDatasetInstanceCommand.class)) .add(injector.getInstance(DeleteDatasetModuleCommand.class)) .add(injector.getInstance(DeployAppCommand.class)) .add(injector.getInstance(DeployDatasetModuleCommand.class)) .add(injector.getInstance(DescribeAppCommand.class)) .add(injector.getInstance(DescribeDatasetModuleCommand.class)) .add(injector.getInstance(DescribeDatasetTypeCommand.class)) .add(injector.getInstance(DescribeStreamCommand.class)) .add(injector.getInstance(ExecuteQueryCommand.class)) .addAll(injector.getInstance(GetProgramRunsCommandSet.class).getCommands()) .addAll(injector.getInstance(GetProgramInstancesCommandSet.class).getCommands()) .addAll(injector.getInstance(GetProgramLiveInfoCommandSet.class).getCommands()) .addAll(injector.getInstance(GetProgramLogsCommandSet.class).getCommands()) .addAll(injector.getInstance(GetProgramStatusCommandSet.class).getCommands()) .addAll(injector.getInstance(GetProgramRuntimeArgsCommandSet.class).getCommands()) .addAll(injector.getInstance(SetProgramRuntimeArgsCommandSet.class).getCommands()) .add(injector.getInstance(GetStreamEventsCommand.class)) .add(injector.getInstance(ListAllProgramsCommand.class)) .add(injector.getInstance(ListAppsCommand.class)) .add(injector.getInstance(ListDatasetInstancesCommand.class)) .add(injector.getInstance(ListDatasetModulesCommand.class)) .add(injector.getInstance(ListDatasetTypesCommand.class)) .addAll(injector.getInstance(ListProgramsCommandSet.class).getCommands()) .add(injector.getInstance(ListStreamsCommand.class)) .add(injector.getInstance(SendStreamEventCommand.class)) .addAll(injector.getInstance(SetProgramInstancesCommandSet.class).getCommands()) .add(injector.getInstance(SetStreamTTLCommand.class)) .addAll(injector.getInstance(StartProgramCommandSet.class).getCommands()) .addAll(injector.getInstance(StopProgramCommandSet.class).getCommands()) .add(injector.getInstance(TruncateDatasetInstanceCommand.class)) .add(injector.getInstance(TruncateStreamCommand.class)) .add(injector.getInstance(CallServiceCommand.class)) .add(injector.getInstance(GetServiceEndpointsCommand.class)) .build(); } @Override public List<Command> get() { return commands; } }
<gh_stars>10-100 package server import ( "context" "strconv" "testing" "google.golang.org/grpc" "google.golang.org/grpc/codes" "github.com/lileio/account_service" "github.com/stretchr/testify/assert" ) func TestCreateSuccess(t *testing.T) { truncate() account := createAccount(t) assert.NotEmpty(t, account.Id) } func BenchmarkCreate(b *testing.B) { truncate() for i := 0; i < b.N; i++ { ctx := context.Background() req := &account_service.CreateAccountRequest{ Account: &account_service.Account{ Name: name, Email: "<EMAIL>@localhost" + strconv.Itoa(i), }, Password: <PASSWORD>, } _, err := as.Create(ctx, req) if err != nil { panic(err) } } } func TestCreateUniqueness(t *testing.T) { truncate() ctx := context.Background() a1 := createAccount(t) req2 := &account_service.CreateAccountRequest{ Account: a1, Password: <PASSWORD>, } a2, err := as.Create(ctx, req2) assert.NotNil(t, err) assert.Equal(t, grpc.Code(err), codes.AlreadyExists) assert.Nil(t, a2) } func TestCreateEmpty(t *testing.T) { truncate() ctx := context.Background() req := &account_service.CreateAccountRequest{} account, err := as.Create(ctx, req) assert.NotNil(t, err) assert.Nil(t, account) }
/* * tree_map_get -- searches for a value of the key */ uint64_t tree_map_get(struct tree_map *map, uint64_t key) { return tree_map_get_from_node(map->root, key); }
<filename>cricle-camel-web/src/main/java/com/qunar/qtalk/cricle/camel/common/dto/CamelPostManageResultDto.java package com.qunar.qtalk.cricle.camel.common.dto; import com.google.common.collect.Lists; import lombok.Data; import java.util.List; /** * 给后台管理系统,将帖子内容按照文本和图片分开 */ @Data public class CamelPostManageResultDto extends CamelPostDto { private PostContentDto postContent; }
Characterization of the Filum terminale as a neural progenitor cell niche in both rats and humans Abstract Neural stem cells (NSCs) reside in a unique microenvironment within the central nervous system (CNS) called the NSC niche. Although they are relatively rare, niches have been previously characterized in both the brain and spinal cord of adult animals. Recently, another potential NSC niche has been identified in the filum terminale (FT), which is a thin band of tissue at the caudal end of the spinal cord. While previous studies have demonstrated that NSCs can be isolated from the FT, the in vivo architecture of this tissue and its relation to other NSC niches in the CNS has not yet been established. In this article we report a histological analysis of the FT NSC niche in postnatal rats and humans. Immunohistochemical characterization reveals that the FT is mitotically active and its cells express similar markers to those in other CNS niches. In addition, the organization of the FT most closely resembles that of the adult spinal cord niche. J. Comp. Neurol. 525:661–675, 2017. © 2016 Wiley Periodicals, Inc. The filum terminale (FT) is a thin band of tissue that connects the spinal cord to the periosteum of the coccyx. It is present in all vertebrates and has been studied in a variety of species, including frogs, cats, rodents, and humans (Gamble, 1971;Nakayama, 1976;Gonzalez-Robles and Glusman, 1979;Chesler and Nicholson, 1985;Rethelyi et al., 2004;Boros et al., 2008). Although it is continuous with the spinal cord, the FT has a unique developmental history, which involves regression from a differentiated state to that of a more primitive tissue. Early in development, the FT is a fully differentiated section of the spinal cord that innervates the embryonic tail and is complete with nerve roots and associated dorsal root ganglia. As development progresses and the tail is absorbed, the FT undergoes a process that Streeter (1919) termed "dedifferentiation," which results in a tissue that appears to have regressed to an earlier developmental state (Kunitomo, 1918;Streeter, 1919;Tarlov, 1938). The postnatal FT is completely vestigial and expendable. It is not interconnected with the central nervous system (CNS) and does not participate in nervous control of the organism. It is routinely sectioned to treat Tethered Cord syndrome, which is a condition characterized by the abnormal attachment of tissue limiting the movement of the spinal cord within the vertebral column (Bakker-Niezen et al., 1984;Nakamura, 1984;Lad et al., 2007). Consequently, the FT is a potential source of autologous cells for cell replacement strategies. There have been several prior histological studies of the FT. Tarlov (1938) observed a loose organization of multiple cell types including neuroblasts, glial cells, and ependymal cells lining the central canal. This initial report has been confirmed and elaborated upon by a number of researchers, including Kernohan (1924), Choi et al. (1992, and Miller (1968). More recently, Rethelyi et al. (2004) used immunohistochemistry to confirm the existence of neuronal precursors and glial cells in the rat FT. Based on this cellular organization, they speculated that the FT may contain neural stem cells (Rethelyi et al., 2004). Recently, several laboratories including our own have isolated neural progenitor cells from the FT of both rats and humans. These cells have been shown to express neural progenitor cell markers such as Nestin, Dlx-2, Sox-2, and Musashi-1. They have also been passaged multiple times as neurospheres and differentiated into neurons, astrocytes, and oligodendrocytes (Varghese et al., 2009;Arvidsson et al., 2011;Jha et al., 2013a,b). FT-derived neurospheres have been differentiated into motor neurons capable of innervating muscle tissue in vitro (Jha et al., 2013a,b), and FT-derived progenitors that have been transplanted into the chick or rat CNS survive and become migratory (Varghese et al., 2009;Jha et al., 2013a). The specific microenvironment that harbors neural stem cells (NSCs) has been well characterized elsewhere in the CNS, most notably in the subventricular zone (SVZ) (Alvarez-Buylla and Garcia-Verdugo, 2002), the hippocampal subgranular zone of the dentate gyrus (Seri et al., 2004), and the spinal cord (Hamilton et al., 2009;Hugnot and Franzen, 2011;Marichal et al., 2012). While each of these stem cell niches has its own unique architecture, they all share similarities in terms of the types of cells present and the immunocytochemical markers they express (Fuentealba et al., 2012). We were interested in determining whether the FT-derived progenitor cells that we have isolated in vitro reside in an in vivo niche that is similar to those described elsewhere in the CNS. Because the FT is a derivative of the embryonic spinal cord, we were particularly interested in comparing its histology to that of the adult spinal cord stem cell niche. In this article we report a histological analysis in both rats and humans using markers that have been characterized in progenitor cell niches elsewhere in the CNS. We find that FT is mitotically active, that cells in FT have immunocytochemical profiles similar to what is seen in other CNS niches, and that its organization closely resembles that of the spinal cord. MATERIALS AND METHODS Animals Postnatal Sprague-Dawley rats (RRID:RGD_734476, Charles River, Wilmington, MA) aged 1, 10, or 82-367 days were housed at a controlled temperature and kept on a 12-hour light/dark cycle with food and water available ad libitum. All experiments were approved by the Institutional Animal Care and Use Committee at Harvard Medical School and were conducted in accordance with the NIH Guide for the Care and Use of Laboratory Animals. Human autopsy tissue Tissue from the FT was obtained from human autopsies (subjects aged 51-81 years old) that were performed in the autopsy suite at Brigham and Women's Hospital (Boston, MA). Each FT was removed, placed in ice-cold Hanks solution, and transported to the lab on ice where it was fixed with 4% paraformaldehyde (PFA) for 2 hours before being processed as described below. EdU (5-ethynyl-2 0 -deoxyuridine) labeling Postnatal day (P)10 Sprague-Dawley rats were weighed, anesthetized with isoflurane (Phoenix Pharmaceuticals, Burlingame, CA), and injected subcutaneously with EdU (Invitrogen, Carlsbad, CA) at a concentration of 10 lg/g body weight. After a 4-hour incubation period, the animals were euthanized and the tissue was processed for detection of EdU labeling with the Click iT Alexa Fluor Azide system (Invitrogen). Both frozen sections and whole mounts were incubated in the detection solution for 2 hours, washed extensively in phosphate-buffered saline (PBS), and mounted on Superfrost slides (VWR, Radnor, PA) using Vectashield Mounting Medium (Vector Laboratories, Burlingame, CA). EdU/Ki67 double labeling To assess proliferation rates, P2 Sprague-Dawley rats were weighed and injected subcutaneously with EdU (Invitrogen) at a concentration of 10 lg/g body weight. The rats were then euthanized after 2 hours, 3 days, or 7 days and the FT of each animal was dissected, postfixed in 4% PFA for 2 hours, cryoprotected in a 30% sucrose solution, and embedded in O.C.T. (Tissue-Tek, Torrance, CA). Frozen sections were cut, mounted on Superfrost slides (VWR), and incubated in a blocking solution that contained 0.2% bovine serum albumin (BSA), 10% goat serum, and 0.3% Triton X-100 in PBS (all reagents from Sigma, St. Louis, MO). A monoclonal mouse anti-human Ki67 antibody (RRID:AB_393778, BD Biosciences, San Jose, CA) was diluted in the same blocking solution (1:200) and applied to the frozen sections overnight at 48C. After being washed in PBS, the slides were incubated for 2 hours at room temperature in a goat antimouse IgG Alexa Fluor 568 secondary antibody (RRID:AB_143011, Invitrogen) that had been diluted in the same blocking solution. Finally, after numerous, extensive washes with PBS, EdU labeling was detected with the Click iT Alexa Fluor Azide system (Invitrogen). Tissue preparation Postnatal rats were anesthetized with isoflurane and sacrificed by cervical dislocation. Ventral laminectomies were performed, and the FT was dissected, washed once in Hanks Balanced Salt Solution (Lonza, Hopkintown, MA), and fixed in 4% PFA for 2 hours. After fixation, the FT were washed three times in PBS, and some samples were set aside for use as whole mounts. Both human and rat FT tissue underwent cryoprotection in a 30% sucrose solution in preparation for frozen sectioning. After sinking (thus ensuring tissue saturation), the tissue was transferred to a 50:50 30% sucrose/O.C.T. (Tissue-Tek) solution for 24 hours. Next, the tissue was incubated in O.C.T. for 2 hours, rapidly frozen with 5methylbutane and liquid nitrogen, and stored at -208C. Finally, the tissue was sectioned at 20 lm using a Leica CM3050S Crysotat (Leica Microsystems, Buffalo Grove, IL) and stored at -208C. Immunohistochemistry Slides holding frozen sections were placed on a slide warmer for 2 hours and washed twice with PBS. Both frozen sections and whole mounts were then incubated for 1 hour in a permeabilization/blocking solution that contained 0.2% BSA, 2% fish skin gelatin, 10% goat serum, 0.3% Triton X-100, and 0.25% NaN 3 in PBS (all reagents from Sigma). Primary antibodies (polyclonal Table 1 for details) and centrifuged at 13,000 rpm for 30 minutes at 48C prior to application. Once applied, frozen sections were incubated in primary antibody at room temperature for 24 hours, and whole mounts were incubated at 48C for 4-5 days on a gentle agitator. Antibody characterization The GFAP mouse antiserum detects a single 51-kDa band from purified spinal cord that corresponds to the GFAP protein on a western blot (Debus et al., 1983). This antibody has been shown to recognize astrocytes in the adult CNS (Castellano et al., 1991) along with a subpopulation of ependymal cells in both the SVZ and the central canal of the adult spinal cord (Alfaro-Cervello et al., 2012). The Nestin mouse antiserum detects a single 200-220 kDa protein by western blot of newborn rat and mouse cell extracts (manufacturer's data sheet) and has been previously identified as a marker for neuroepithelial progenitor cells in both the embryonic rat brain (Hockfield and McKay, 1985;Lendahl et al., 1990) and the SVZ of adult animals (Doetsch et al., 1997). The 3CB2 mouse antiserum recognizes an intracellular, 55-kDa protein expressed in the developing and adult CNS of several vertebrate species including rats (Prada et al., 1995). This antibody has previously been used to identify radial glia in the developing rat spinal cord in vivo (Shibuya et al., 2003;Barry and McDermott, 2005) along with progenitor cell-containing neurospheres (Marchal-Victorion et al., 2003) and SVZderived explants of ependymal cells (Perez-Martin et al., 2003) in vitro. The Olig-2 rabbit antiserum detects a single 32-kDa protein by western blot of CNS tissue from rat/mouse brain and spinal cord (manufacturer's data sheet). Previous studies have established its specificity by noting the absence of staining in tissue that lacks oligodendrocytes as well as the presence of signal in a characteristic nuclear localization pattern in tissues that contain oligodendrocytes (Cai et al., 2007). The mouse Ki67 antiserum detects a nuclear antigen that is expressed by all proliferating cells during late G1, S, M, and G2 phases of the cell cycle, and its utility as a marker for cells undergoing neurogenesis in the adult CNS has previously been established (Kee et al., 2002), including cells in the SC stem cell niche (Hamilton et al., 2009). This antibody recognizes two bands (345 and 395 kDa) by western blot, which are consistent with the molecular weights of alternatively spliced Ki67 (Schluter et al., 1993). Its specificity has been confirmed by an enzyme-linked immunosorbent assay (ELISA) (Kubbutat et al., 1994) and also by flow cytometry (manufacturer's data sheet). Image acquisition and processing Images were captured using a Zeiss LSM 510 Meta laser scanning confocal microscope with LSM software. In some cases, images were transferred to ImageJ (NIH, Bethesda, MD), where the "despeckle" feature was used to decrease nonspecific background fluorescence levels. Finally, images were exported into Adobe Photoshop (San Jose, CA) for cropping, resizing, and adjustment of brightness and contrast levels. RESULTS To characterize the FT NSC niche, we stained sections from different ages of postnatal rats and humans for both cell proliferation markers and progenitor/stem cell markers, taking careful note of cellular morphology and orientation. For our initial characterization of the FT, we used P10 rats, and to observe changes in the niche over time, we examined the niche at various postnatal stages. First, we determined whether the FT was mitotically active and which cell types were involved. We then focused on the presence of the different classes of niche cells, their morphology and location, and the degree to which they expressed multiple markers. Finally, we examined the organization of the FT at four developmental timepoints. In this study, we report the appearance of proliferating cells that express Nestin, GFAP, 3CB2, and Olig-2 in the FT, and we propose a model for the in vivo architecture of this tissue. Presence, location, identity, and characterization of proliferative cells in P10 rat The ability to produce new cells is an essential feature of the stem cell niche. With this in mind, we determined the extent to which the postnatal FT preserves its proliferative ability. The presence and location of proliferating cells in the P10 rat FT were assessed by EdU labeling. Animals were injected with 5 mg/mL EdU and sacrificed 4 hours postinjection (n 5 12). Wholemount longitudinal sections revealed abundant proliferating cells throughout the FT (Fig. 1A). In transverse sections, EdU 1 nuclei were observed at a high frequency in both the ependymal zone and the subependymal zone. At 4 hours postinjection, the majority of EdU 1 cells existed singly. Occasionally, doublets and small clusters of even-numbered cells were also observed ( Fig. 1B). Using random sections, we counted the number of EdU 1 cells per 20 lm section and found 12.6 6 4.5 (n 5 17). Next, the identity of the proliferating cells in the postnatal FT was determined by immunohistochemistry using established stem/progenitor cell markers that are expressed in other niches, including the spinal cord and the SVZ niches. First, we assessed Nestin immunoreactivity. Nestin, a marker of undifferentiated neural progenitor cells, has been detected in all previously described NSC niches. Many of the EdU 1 nuclei in the FT colocalized with Nestin. These double-labeled cells were distributed throughout the FT and were particularly prominent in the subependymal zone ( Fig. 1C; n 5 3). Because radial glia have been shown to play an integral role in the stem cell niche in the SVZ, the presence of this cell type among the proliferating cells in the FT was also examined using the selective marker 3CB2 (Prada et al., 1995). Although EdU 1 nuclei were often found in close association with 3CB2 1 processes, difficulty in identifying 3CB2 1 cell bodies made it difficult to discern whether the processes originated from the somas of dividing cells ( Fig. 1D; n 5 3). Olig-2 is a marker that is expressed by niche cells in both the SVZ and the spinal cord. In the SVZ, Olig-2 labels Type C transit amplifying cells, but in the spinal cord niche, it is instead a marker for oligodendrocyte and motor neuron progenitors (Menn et al., 2006;Hamilton et al., 2009). While we observed extensive Olig-2 staining, only on very rare occasions was EdU seen to colocalize with Olig-2 ( Fig. 1E; n 5 3). Finally, we determined whether any cells coexpress EdU and GFAP. GFAP, a marker of differentiated astrocytes, is also expressed in the SVZ by the quiescent stem cell population (Type B Cells) . In the SVZ, GFAP 1 cells (either differentiated astrocytes or Type B cells) are found in the subependymal zone and have been shown to act as postnatal NSCs. We detected a small number of GFAP 1 /EdU 1 cells, which were located in the subependymal zone and exhibited a highly branched morphology ( Fig. 1F; n 5 3). The pattern of EdU staining revealed in Figure 1 does not distinguish between neural stem cells and transient amplifying cells, which have differing rates of proliferation. To determine whether the cells we had identified as EDU 1 cells had the slow-cycling character of neural stem cells, we performed double-labeling experiments with Ki67 at different timepoints. As a thymidine analog, EdU is incorporated into the DNA of a dividing cell during the S-phase of mitosis. Ki67, on the other hand, is a marker for proliferation that is present in all cells during active phases of the cell cycle. To identify any slow-cycling progenitors in the FT, P2 rats were subcutaneously injected with 5 mg/ml EdU (n 5 12), and then a group of these EdU-labeled rats was sacrificed at each of the following timepoints postinjection: 2 hours (n 5 4), 3 days (n 5 4), and 7 days (n 5 4). Transverse sections were first labeled with Ki67 followed by fluorescent detection of the EdU-labeled cells. Figure 2A shows cells at 2 hours post-EdU injection that were either EdU 1 (1), Ki67 1 (2), or positive for both markers (3). By 2 hours postinjection, virtually all cells that had incorporated EdU were also labeled with Ki67 (93 6 1.5%), suggesting that they are still in an active phase of the cell cycle. After 3 days, 66 6 2.6% of the cells were double-labeled, and after 7 days, 30 6 3.3% Filum terminale NSC niche The Journal of Comparative Neurology | Research in Systems Neuroscience of EDU 1 cells also expressed Ki67, suggesting that this represents a population of slow-dividing progenitors (Fig. 2B) (Ponti et al., 2013). Progenitor cell morphology and distribution Next, we wished to determine the distribution and morphology of the various cell types in the FT niche. In the P10 rat FT (n 5 10), GFAP 1 cells exhibited various morphologies. Large cells with ramified processes were often observed (Fig. 1F). These cells were always located in the subependymal zone and infrequently projected an apical process toward the lumen of the central canal. GFAP 1 fibers were noted to wind through the ependymal zone, ultimately contacting the lumen of the central canal (Fig. 3A). Rarely, GFAP 1 cell bodies were located directly in the ependymal zone (Fig. 3B). These cells appeared to send out long basal processes that often spanned the entire diameter of the FT and ultimately reached the pial surface. Whole-mount staining revealed that there was also a prominent level of GFAP staining among cells with a more superficial location (Fig. 5A). Interestingly, GFAP reactivity occurred homogeneously throughout the FT just below the level of the conus but restricted itself to the ventral half more caudally, and this transition occurred in the rostral third of the tissue (Fig. 3C). The distribution of additional stem and progenitor cell markers was also investigated in both transverse and whole-mount longitudinal sections. Nestin reactivity occurred predominantly in subependymal cell bodies and radially oriented processes ( Fig. 4A; n 5 5). Staining for Nestin was stronger in the dorsal half of the FT in comparison to other regions of the tissue, which can be seen in Figure 4A,B. In contrast to GFAP 1 cells, Nestin 1 cells prominently projected their apical processes into the central canal (Figs. 4B, 5B). 3CB2 1 immunoreactivity, directed against radial glia, also stained radially oriented processes ( Fig. 4C; n 5 3). Olig-2 reactivity was observed primarily in the subependymal zone and in peripheral locations, although on rare occasions, Olig-2 1 nuclei were detected in the ependymal zone ( Fig. 4D; n 5 5). Colocalization of progenitor cell markers We were also interested in the extent to which the niche cells expressed multiple developmental markers. Examination of both whole-mount and transverse sections (n 5 4) revealed that Nestin and GFAP are expressed by different populations of cells. No colocalization of these markers was observed (Figs. 5A, 6A). In fact, at P10 the strongest staining for each marker was observed in different regions of the FT. GFAP 1 immunoreactivity predominated in the ventral FT, while Nestin 1 Figure 2. Identification of a population of slow-dividing progenitors in the FT. A: In a transverse section at the central canal 2 hours after EdU injection, cells in S-phase are labeled with EdU (green, 1) and cells in all active stages of the cell cycle are labeled with Ki67 (red, 2). Most Edu 1 cells are also Ki67 1 (merged image in 3). B: The graph displays the number of cells colabeled with Ki67 and EdU in the FT at either 2 hours, 3 days, or 7 days after EdU injection. Individual counts from 16 tissue sections from four different animals are shown, and the dots represent the average number of colabeled cells at each timepoint. A population of doubled-labeled cells, which represents slowdividing progenitors, can be identified even after 7 days. Scale bar 5 50 lm in A. cells were primarily located in the dorsal half. When we compared Nestin staining with that for 3CB2, we found a high degree of overlap. At P10, the majority of radial processes were positive for both Nestin and 3CB2 (Fig. 6B,1-3). We did not detect 3CB2 and GFAP costaining in P10 animals, although there were some examples of coreactivity in aged rats. Olig-2 staining did not overlap with any of the other markers. Effect of age on niche architecture In the SVZ, the NSC niche changes with the age of the animal (Conover and Shook, 2011). With this in mind, we examined the FT niche architecture as a function of age. Sections of FT were stained for Nestin, 3CB2, GFAP, and Olig-2 at stages P1 (n 5 16), P10 (n 5 12), and rats aged >P82 (n 5 4). Additionally, the P5 FT was examined in whole mount. All markers remained present in the tissue throughout the entire age range. However, the architecture of the niche changed over time, as revealed by a modification in the frequency of appearance, intensity, and morphology of stained cells (Fig. 7). The number of Nestin 1 processes and intensity of staining decreased with age ( Fig. 7A,1-3). Furthermore, the morphology of the Nestin 1 cells changed throughout development. In P1 rats, Nestin expression was observed in immature, radial glial-like processes that spanned the distance from the ependymal zone to the pial surface, and there was a higher density of processes at both the dorsal and ventral poles of the central canal ependymal and subependymal zones (Fig. 7A,1). By P10, the intensity of Nestin staining had decreased, especially in the ventral zone, and the strongest staining occurred in cell bodies in the region of the dorsal pole of the central canal (Figs. 4A,B, 7A,2). In aged rats, a small number of cells retained Nestin reactivity; however, these cells often lost contact with the pial surface while maintaining contact with the central canal (Fig. 7A,3). While age did not affect the morphology of radial 3CB2 1 processes, it did affect their distribution (Fig. 7B). These processes contacted both the central canal and pial surface of the FT in P1 rats (Fig. 7B,1), whereas processes in older rats did not fully retain these Filum terminale NSC niche The Journal of Comparative Neurology | Research in Systems Neuroscience contacts (Figs. 6B-D, 7B,(2 and 3)). GFAP reactivity was observed in only a small number of radial processes in P1 pups (Fig. 7C,1). This staining increased with age, and by P10 GFAP 1 processes had significantly increased in number and complexity of shape. Additionally, as noted earlier, these processes were more abundant in the ventral FT (Fig. 7C,2). In the adult, GFAP reactivity was abundant throughout the FT, and processes were highly branched (Fig. 7C,3). In contrast to the other markers, the pattern of Olig-2 reactivity remained consistent throughout development (Fig. 7D), with Olig-2 1 cells dispersed throughout the FT, including both the ependymal and subependymal zones. We were interested in comparing the staining patterns for progenitors observed in the rat FT to those in the adult human FT. Transverse and longitudinal sections from autopsy specimens aged 51-81 years (n 5 4) were examined for Nestin and GFAP immunoreactivity. Nestin 1 cells were detected in the ependymal and subependymal zones as well as in more lateral locations (Fig. 8A). GFAP 1 cells were observed in the subependymal zone and were dispersed more laterally throughout the FT. Their processes projected both radially and longitudinally (Fig. 8B). When sections were examined for colocalization of GFAP and Nestin, we detected a small proportion of cells and processes that were double-labeled and sometimes colocalized with GFAP. In contrast, this colocalization was never observed in the rat FT (Fig. 8A,B,3). DISCUSSION We find that the rat FT contains the same cell types as those found in the other CNS NSC niches. Each cell type has a unique distribution. The numbers and morphologies of these cells along with the niche characteristics change with the age of the animal. For P10 and younger, the GFAP 1 and Nestin 1 cells represent separate populations, with the GFAP 1 cells located more ventrally and the Nestin 1 cells biased dorsally. At this stage, there is a high degree of costaining for Nestin and 3CB2, suggesting that the Nestin 1 cells are radial glia. Interestingly, the Nestin 1 cells extend their processes the entire diameter of the FT and maintain contact with the lumen of the central canal. This is reminiscent of NSCs that have been described in other CNS niches (Fuentealba et al., 2012). The overall organization of the FT niche ( Fig. 9) is similar to that described for the spinal cord (Hamilton et al., 2009;Hugnot and Franzen, 2011;Marichal et al., 2012). This is not unreasonable, given that the FT is a vestigial form of the spinal cord and might be expected to have a corresponding organization (Streeter, 1919). FT histology The histology of the FT has been examined in many species using light microscopy, electron microscopy, immunocytochemistry, and confocal microscopy. This tissue is a mixture of longitudinally oriented fibrous structures that are interspersed with primitive SC elements, which consist of ependymal cells, glia, and neurons. The fibrous aspect results from an abundance of Type 1 collagen bundles and elastic fibers, and it is hypothesized that the elastic property of the FT provides a buffering capacity against the stretching of the spinal cord during flexion of the spine (Fontes et al., 2006). The human FT was originally studied by Harmeier (1933) and by Tarlov (1938). Harmeier observed that the FT contained all the elements found in the spinal cord, including ependymal cells, neurons, and glia. In addition, he noted the presence of neuroblasts scattered throughout the tissue and a high density of small blood vessels. Tarlov confirmed these findings, and he also noted the presence of many small neurons, which he thought likely to be neuroblasts. In addition, he noted the absence of anterior horn cells; he observed astrocytes in close association with blood vessels; he noted the presence of oligodendrocytes; and he described amorphous clusters of cells. His summary diagram bears a striking resemblance to modern descriptions of the NSC niche (Tarlov, 1938). These human studies were extended by Choi et al. (1992) using electron microscopy and immunocytochemistry. They noted GFAP 1 glial cells, NSE 1 neurons, and Vimentin 1 precursor cells. They also noted large clusters of peri-ependymal glial cells that formed acinar-like structures. Miller (1968) used electron microscopy to characterize the ependymal cells and the neuropil of the FT in both cats and monkeys (Miller, 1968). The frog FT has been studied by three groups (Gonzalez-Robles and Glusman, 1979;Chesler and Nicholson, 1985;Chvatal et al., 2001). Using light and electron microscopy, Gonzalez-Robles and Glusman (1979) identified three regions of the FT: a central region that contains ependymal and subependymal cells, an intermediate zone where astrocytes predominate, and a peripheral zone that contains neuronal fibers astrocyte processes. Chesler and Nicholson (1985) elaborated on this, describing a radial arrangement of the FT organized around the central canal. In addition, they noted the presence of TH 1 CSF-contacting neurons abutting the central canal. Chavatal's group (2001) confirmed the presence of astrocytes and oligodendrocytes using immunocytochemistry for GFAP and Rip staining, respectively. They also identified neurons using electrophysiology followed by dye-filling (Chvatal et al., 2001). A similar organization of cellular elements around the central canal has also been observed in the FT of rabbits, guinea pigs, and rats by Nakayama (1976). George and colleagues conducted an extensive immunohistochemical study of both the normal and tethered human FT George et al., 2003). They stained the FT for caudal neural tube developmental, neuroglial, neural crest, epithelial and mesenchymal markers and confirmed the presence of an abundance of ependymal, neural, and glial tissue. R ethelyi's group studied the FT in the rat, where they observed that it represented a continuation of the spinal cord's periventricular gray matter (Rexed's Filum terminale NSC niche The Journal of Comparative Neurology | Research in Systems Neuroscience Lamina X) accompanied by a loss of the dorsal and ventral columnar structure (Rethelyi et al., 2004Boros et al., 2008). Using NeuN staining, they identified clusters of small neurons in close apposition to the ependymal cells and raised the possibility that they may represent neurons in an early phase of differentiation (Rethelyi et al., 2004). Subsequently, they distinguished three groups of neurons, a small group just dorsal to the central canal and two lateral clusters on either side of the central canal . Additionally, they found a radial arrangement of GFAP 1 astrocytic processes that extended from the central canal to the pial surface. In a later study, Attia and Shehab (2010) conducted an immunohistochemical analysis that was consistent with the findings of R ethelyi's group, and more recently, Gaddam et al. (2012) reported the presence Nestin 1 cells in the FT from two postmortem human specimens. Neural stem cell niches NSC niches have previously been described in the SVZ (Alvarez-Buylla and Garcia-Verdugo, 2002), the subgranular zone of the dentate gyrus in the hippocampus (Seri et al., 2004), and the spinal cord (SC) (Hamilton et al., 2009;Hugnot and Franzen, 2011;Marichal et al., 2012). Here, we add a fourth location to the list, the FT. Although these four niches all share many key features, their subcellular organization also contains some distinct elements. Previous studies have described the localization of cells that express GFAP, Nestin, and Olig-2 along with proliferating cells in the various NSC niches. Unsurprisingly, given its developmental origins, the expression pattern of these markers in the FT niche is most similar to the previously described SC niche in adult animals. For example, proliferating cells were observed in these two niches in both the ependymal and subependymal layers, and the proliferating cells primarily expressed Nestin, with few GFAP 1 proliferating cells being identified in either case (Hamilton et al., 2009). Nestin 1 cells are also similarly distributed in the SC and FT niches. In the SC niche, Nestin 1 cells are located dorsally and have long, basally projecting processes (Hamilton et al., 2009). Similarly, in FT, we observed a dorsal bias of Nestin 1 cells that extend processes to span the entire diameter of the FT. Additionally, Nestin expression appears to be downregulated in all regions of the SC by age P6 (Barry and McDermott, 2005), which corresponds to our observation in the FT that the intensity of Nestin staining decreased by P10 and was only retained in a small number of cells in aged rats. The localization of Olig-2 1 and GFAP 1 cells was similar among the SVZ, SC, and FT niches. In the SC stem cell niche, Olig-2 1 cells are most often observed in the subependymal layer closely bordering ependymal cells, which also corresponds to the organization of the SVZ niche. Although we occasionally observed Olig-2 1 nuclei in the ependymal zone, Olig-2 1 cells are also primarily located outside the ependyma in the FT. GFAP 1 cells are located subependymally in both the SVZ (Alvarez- and the SC niche (Hamilton et al., 2009). Although Hamilton et al. could not always identify the location of the GFAP 1 cell bodies, they did note that GFAP 1 fibers extended into the ependymal layer. Similarly, we observed GFAP 1 cells that were located subependymally and extended processes that wind through the ependymal zone and ultimately contact the lumen of the central canal. There are some key differences between the architectures of the SC and FT niches. Hamilton et al. (2009) noted an asymmetric distribution of proliferating cells, with a higher concentration at the dorsal pole of Figure 7. Age-related changes in progenitor cell marker expression in the rat FT. Nuclei are indicated by DAPI incorporation (blue) in all panels. Cross-sections with ventral left and dorsal on right. Left: P1; Middle: P10; Right: aged rats >P82. A: Nestin (red) reactivity decreased markedly with age, especially after P10, and processes progressively lost their basal contacts with the pial surface of the FT. B: 3CB2 (green) expression was consistently observed in radial processes across the age range studied. C: GFAP (red) reactivity increased with age, and the GFAP 1 cell morphology changed from radial to highly branched. D: Olig-2 (green) expression remained unchanged. Scale bars 5 A: 20 lm in (1), 10 lm in (2) Filum terminale NSC niche The Journal of Comparative Neurology | Research in Systems Neuroscience the SC niche. In contrast, we observed EdU 1 cells distributed uniformly throughout the entire FT, although in both cases the proliferating cells were primarily Nestin 1 . Additionally, while Hamilton et al. (2009) identified a subpopulation of Vimentin 1 cells in the SC niche that coexpressed GFAP and Nestin, we observed a distinct segregation between the populations of cells that expressed the two markers. In the FT, GFAP 1 immunoreactivity is restricted to the ventral half, and Nestin 1 cells are primarily located in the dorsal half. The functional consequence of this segregation of cell populations remains unclear. Interestingly, Hamilton et al. (2009) noted that 51% of the BrdU-retaining cells remained proliferative after 21 days as determined by double labeling with Ki67. We also found a significant number (30%) of double-labeled cells 7 days postinjection. This study relied extensively on the use of markers to identify various cells types in the FT niche. Marker analyses can sometimes be misleading because many antigens are expressed by multiple cell types, and the presence of a marker does not necessarily dictate the function of that cell. For example, GFAP is commonly used to identify astrocytes, but it is also expressed by adult NSCs, including the quiescent stem cell population (Type B cells) in the SVZ (Alvarez-Buylla and Garcia-Verdugo, 2002). Similarly, the nuclear transcription factor Olig-2 is involved in regulating progenitor cell fate of both SC oligodendrocytes and motor neurons. Therefore, conclusive identification of a cell type using a single marker can be problematic. To combat this, we looked for colocalization of markers whenever possible. For example, the coexpression of Nestin and 3CB2 in a single cell suggests that these cells are most likely radial glial neural progenitors. There is still a great deal to be done in order to effectively compare the FT niche with other previously established neural stem cell niches. For example, we and others have noted Dlx2 expression in the FT (Varghese et al., 2009;Jha et al., 2013b). Because this marker is normally restricted to the forebrain's stem cell niche (Bulfone et al., 1993), it will be important to establish the identity of the FT neurons produced in terms of their anterior-posterior identity. We are currently investigating this by examining HOX gene expression in the FT (Jessell, 2000). An additional level of complexity is introduced by our lack of understanding of the functionality of the FT in postnatal animals. In the SVZ stem cell niche, adult neurogenesis is known to occur in a stepwise fashion and to contribute to the continuous production of interneurons destined for the olfactory bulb. In that niche, Olig-2 is expressed by a subpopulation of cells that has been identified as transit-amplifying progenitors (Type C cells), which are named for their properties of proliferation and differentiation within a finite life span. In contrast, the fate of the NCS in the SC and FT is still not established. Consequently, identifying distinct functional populations of cells based on markers alone is not possible. One possibility is that the FT cells are destined to migrate into the spinal cord and differentiate into motor neurons. In that case, Olig-2 might play a role in determining their fate as motor neurons. However, it is equally possible that these cells remain relatively quiescent and do not continuously generate new neurons, which would require an alternate explanation for Olig-2 expression. Further studies are needed to construct a functional map of the FT and determine the fate of the cells contained within this structure before any markerbased subpopulations can be conclusively named. Along with the niche cell types we examined, the extracellular matrix (ECM) and the vasculature are both emerging as key components of the stem cell niche. These components not only contribute to the health of the niche cells by delivering nutrients and anchoring cells to the niche, but they can also influence stem cell fates through diffusible signals and/or direct contact. Therefore, careful characterization of the ECM and the surrounding vasculature in the FT will be a necessary component of a more complete description of the niche. CONCLUSION Here we analyzed the distribution of cells in the FT niche and identified many key features of the forebrain and SC niches that are also present in the FT. Additionally, the FT niche has some unique organizational elements that set it apart from the other niches. Understanding the interactions and eventual fate of these FT cells in healthy animals may help to unlock their therapeutic potential for future applications involving cell replacement strategies. Figure 9. Proposed model of the progenitor cell niche in the P10 rat FT (transverse plane). Proliferating cells (green) are located in all regions of the FT. Nestin 1 cells (blue) are also labeled with EdU and exhibit radial morphology with some of their nuclei located in the ependymal zone. GFAP 1 cells (red) exhibit various morphologies ranging from radial to ramified. GFAP 1 cells with complex morphology are located in the subependymal region and rarely contain EdU 1 nuclei. Radial 3CB2 1 fibers (brown) frequently colocalize with Nestin. Finally, Olig-2 1 nuclei (yellow) can be found in both ependymal and subependymal regions, although they are rarely labeled with EdU in the subependymal zone. Filum terminale NSC niche The Journal of Comparative Neurology | Research in Systems Neuroscience
<filename>src/download/download.ts<gh_stars>0 import { DataUrlConfig, ResponseType } from '../models' // tslint:disable-next-line: no-var-requires const axios = require('axios') export class Download { private getMimeType(responseType: ResponseType): any { switch (responseType) { case 'application/json': return 'application/json' case 'image/jpeg': return 'image/jpeg' case 'video/mp4': return 'video/mp4' default: return 'application/json' } } downloadFileFromUrl(url: string, responseType: ResponseType, fileName: string) { const type = this.getMimeType(responseType) console.log(`Url: ${url}`) axios .request({ url, crossDomain: true, method: 'GET', responseType: responseType === 'application/json' ? 'json' : 'blob', }) .then((response: any) => { const responseData = responseType === 'application/json' ? JSON.stringify(response.data, null, 2) : response.data const hrefUrl = window.URL.createObjectURL(new Blob([responseData], { type })) const link = document.createElement('a') link.href = hrefUrl link.download = fileName document.body.appendChild(link) link.click() document.body.removeChild(link) link.remove() }) } downloadFileFromCanvas( canvas: HTMLCanvasElement, fileName: string, dataUrlConfig: DataUrlConfig, ) { const dataUrl = canvas.toDataURL(dataUrlConfig.type, dataUrlConfig.quality) this.downloadSingleFile(dataUrl, fileName) } private downloadSingleFile(uri: string, name: string) { const link = document.createElement('a') link.download = name link.href = uri document.body.appendChild(link) link.click() document.body.removeChild(link) link.remove() } }
package cpp.course.vo; import cpp.course.po.CourseContent; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; // 包含子目录的课程章节 public class CourseContentVo extends CourseContent { List<CourseContent> contents = new ArrayList<>(); public List<CourseContent> getContents() { return contents; } public void setContents(List<CourseContent> contents) { this.contents = contents; } @Override public String toString() { return "CourseContentVo{" + "contents=" + contents + '}'; } }
Have you ever noticed some shady character mucking about in a subway station on the opposite platform, on the other side of the tracks, perhaps yelling profanities and acting belligerent, and thought yourself, "Golly, I'm glad that ne'er-do-well isn't on my side! Over here I can enjoy my Lady Chatterley's Lover in peace." Well, it seems the other side of the tracks isn't so very far away, as one unfortunate man discovered earlier this month at the 6 train station at Lexington Avenue and 77th Street. Police tell DNAinfo the unidentified 31-year-old victim got into "a stare down" with his alleged assailant while waiting for the 6 train on December 11th. The hoodlum, seen here in an image captured from a surveillance camera, was on the uptown side of the station, but he climbed down and walked across the trackbed to attack the man on the other side. Sounds like somebody can't handle losing a little staring contest. ABC 7 reports that the victim was taken to Columbia Presbyterian Hospital with a laceration to his head and bleeding to his brain. Police say the suspect is described as a 30 to 40 year old black man, around six feet tall and 200 pounds. If you see him, DON'T MAKE EYE CONTACT. Call Crime Stoppers at (800) 577-TIPS (8477) or text tips to CRIMES (274637), then enter TIP577.
// NewRegion creates a track region by specifying its start corner, length, // and width. func NewRegion(track *Track, c1 Point, len, width phys.Meters) *Region { if c1.Dofs < 0 { panic(fmt.Sprintf("NewRegion: c1.Dofs=%v invalid; must be >= 0", c1.Dofs)) } if c1.Dofs >= track.CenLen() { panic(fmt.Sprintf("NewRegion: c1.Dofs=%v invalid; must be < track.CenLen()=%v", c1.Dofs, track.CenLen())) } if width <= 0 { panic(fmt.Sprintf("NewRegion: width=%v invalid; must be >0", width)) } if len <= 0 { panic(fmt.Sprintf("NewRegion: len=%v invalid; must be >0", len)) } return &Region{ c1: c1, len: len, width: width, track: track, } }
// for sorting the new population private class Priority implements Comparator<Node> { public int compare(Node n1, Node n2) { int num1 = n1.fitness_value; int num2 = n2.fitness_value; return num2 - num1; } }
async def wait_for( self, event_cls: type[EventT], /, *, check: Optional[Callable[[EventT], bool]] = None, timeout: Optional[int] = None, ) -> EventT: future = self._event_handler.add_waiter(event_cls, check=check) return await asyncio.wait_for(future, timeout=timeout)
<reponame>cnmydida/pandapay package org.pandapay; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.springframework.test.util.AssertionErrors.assertTrue; /** * Created by LANLI on 2015/12/18. */ @RunWith(SpringJUnit4ClassRunner.class) @ActiveProfiles("unit-test") @SpringApplicationConfiguration(classes = TestConfig.class) public class EmbeddedDbTest { @Test public void testOutput() { assertTrue("test running", true); } }
# Generated by Django 3.1.7 on 2021-04-13 14:32 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('ensembl_metadata', '0003_auto_20210325_1242'), ] operations = [ migrations.CreateModel( name='DatasetSource', fields=[ ('dataset_source_id', models.AutoField(primary_key=True, serialize=False)), ('type', models.CharField(choices=[('core', 'Core'), ('cdna', 'Cdna'), ('datafile', 'Datafile'), ('otherfeatures', 'Otherfeatures'), ('rnaseq', 'Rnaseq'), ('compara', 'Compara'), ('funcgen', 'Funcgen'), ('variation', 'Variation')], max_length=32)), ('name', models.CharField(max_length=255, unique=True)), ], options={ 'db_table': 'dataset_source', }, ), migrations.RemoveField( model_name='dataset', name='dataset_database', ), migrations.RemoveField( model_name='genomedataset', name='is_current', ), migrations.AddField( model_name='genomedataset', name='release', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='genome_datasets', to='ensembl_metadata.release'), preserve_default=False, ), migrations.AlterModelTable( name='genomedataset', table='genome_dataset', ), migrations.DeleteModel( name='DatasetDatabase', ), migrations.AddField( model_name='dataset', name='dataset_source', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='datasets', to='ensembl_metadata.datasetsource'), preserve_default=False, ), ]
/** * Copy the previous synchronized projects into a new collection. */ private Collection<ProjectVo> copyProjects() { Collection<ProjectVo> oldProjects = new ArrayList<>(); if(this.projects != null) { oldProjects.addAll(this.projects); } return oldProjects; }
package com.innorb.recipeapp.service; import java.util.ArrayList; import com.innorb.recipeapp.model.Recipe; import retrofit2.Call; import retrofit2.http.GET; public interface UdacityService { @GET(" ") Call<ArrayList<Recipe>> getUdacityService(); }
from tpg.program import Program import numpy as np import random from tpg.utils import flip from tpg.action_object import ActionObject """ Action Object has a program to produce a value for the action, program doesn't run if just a discrete action code. """ class ConfActionObject: def init_def(self, initParams=None, action = None): ''' Defer importing the Team class to avoid circular dependency. This may require refactoring to fix properly ''' from tpg.team import Team # The action is a team if isinstance(action, Team): self.teamAction = action self.actionCode = None #print("chose team action") return # The action is another action object if isinstance(action, ActionObject): self.actionCode = action.actionCode self.teamAction = action.teamAction return # An int means the action is an index into the action codes in initParams if isinstance(action, int): if "actionCodes" not in initParams: raise Exception('action codes not found in init params', initParams) try: self.actionCode = initParams["actionCodes"][action] self.teamAction = None except IndexError as err: ''' TODO log index error ''' print("Index error") return def init_real(self, initParams=None, action=None): ''' Defer importing the Team class to avoid circular dependency. This may require refactoring to fix properly ''' from tpg.team import Team if isinstance(action, Team): # The action is a team self.actionCode = None self.actionLength = None self.teamAction = action self.program = Program(initParams=initParams, maxProgramLength=initParams["initMaxActProgSize"], nOperations=initParams["nOperations"], nDestinations=initParams["nDestinations"], inputSize=initParams["inputSize"]) elif isinstance(action, ActionObject): # The action is another action object self.actionCode = action.actionCode self.actionLength = action.actionLength self.teamAction = action.teamAction self.program = Program(instructions=action.program.instructions, initParams=initParams) elif isinstance(action, int): # An int means the action is an index into the action codes in initParams if "actionCodes" not in initParams: raise Exception('action codes not found in init params', initParams) try: self.actionCode = initParams["actionCodes"][action] self.actionLength = initParams["actionLengths"][action] self.teamAction = None self.program = Program(initParams=initParams, maxProgramLength=initParams["initMaxActProgSize"], nOperations=initParams["nOperations"], nDestinations=initParams["nDestinations"], inputSize=initParams["inputSize"]) except IndexError as err: ''' TODO log index error ''' print("Index error") self.registers = np.zeros(max(initParams["nActRegisters"], initParams["nDestinations"])) """ Returns the action code, and if applicable corresponding real action. """ def getAction_def(self, state, visited, actVars=None, path_trace=None): if self.teamAction is not None: # action from team return self.teamAction.act(state, visited, actVars=actVars, path_trace=path_trace) else: # atomic action return self.actionCode """ Returns the action code, and if applicable corresponding real action(s). """ def getAction_real(self, state, visited, actVars=None, path_trace=None): if self.teamAction is not None: # action from team return self.teamAction.act(state, visited, actVars=actVars, path_trace=path_trace) else: # atomic action if self.actionLength == 0: return self.actionCode, None else: return self.actionCode, self.getRealAction(state, actVars=actVars) """ Gets the real action from a register. """ def getRealAction_real(self, state, actVars=None): Program.execute(state, self.registers, self.program.instructions[:,0], self.program.instructions[:,1], self.program.instructions[:,2], self.program.instructions[:,3]) return self.registers[:self.actionLength] """ Gets the real action from a register. With memory. """ def getRealAction_real_mem(self, state, actVars=None): Program.execute(state, self.registers, self.program.instructions[:,0], self.program.instructions[:,1], self.program.instructions[:,2], self.program.instructions[:,3], actVars["memMatrix"], actVars["memMatrix"].shape[0], actVars["memMatrix"].shape[1], Program.memWriteProbFunc) return self.registers[:self.actionLength] """ Returns true if the action is atomic, otherwise the action is a team. """ def isAtomic_def(self): return self.teamAction is None """ Change action to team or atomic action. """ def mutate_def(self, mutateParams, parentTeam, teams, pActAtom, learner_id): # mutate action if flip(pActAtom): # atomic ''' If we already have an action code make sure not to pick the same one. TODO handle case where there is only 1 action code. ''' if self.actionCode is not None: options = list(filter(lambda code: code != self.actionCode,mutateParams["actionCodes"])) else: options = mutateParams["actionCodes"] # let our current team know we won't be pointing to them anymore if not self.isAtomic(): #print("Learner {} switching from Team {} to atomic action".format(learner_id, self.teamAction.id)) self.teamAction.inLearners.remove(str(learner_id)) self.actionCode = random.choice(options) self.teamAction = None else: # team action selection_pool = [t for t in teams if t is not self.teamAction and t is not parentTeam] # If we have a valid set of options choose from them if len(selection_pool) > 0: # let our current team know we won't be pointing to them anymore oldTeam = None if not self.isAtomic(): oldTeam = self.teamAction self.teamAction.inLearners.remove(str(learner_id)) self.teamAction = random.choice(selection_pool) # Let the new team know we're pointing to them self.teamAction.inLearners.append(str(learner_id)) #if oldTeam != None: # print("Learner {} switched from Team {} to Team {}".format(learner_id, oldTeam.id, self.teamAction.id)) return self """ Change action to team or atomic action. """ def mutate_real(self, mutateParams, parentTeam, teams, pActAtom, learner_id): # first maybe mutate just program if self.actionLength > 0 and flip(0.5): self.program.mutate(mutateParams) # mutate action if flip(pActAtom): # atomic ''' If we already have an action code make sure not to pick the same one. TODO handle case where there is only 1 action code. ''' if self.actionCode is not None: options = list(filter(lambda code: code != self.actionCode, mutateParams["actionCodes"])) else: options = mutateParams["actionCodes"] # let our current team know we won't be pointing to them anymore if not self.isAtomic(): #print("Learner {} switching from Team {} to atomic action".format(learner_id, self.teamAction.id)) self.teamAction.inLearners.remove(str(learner_id)) self.actionCode = random.choice(options) self.actionLength = mutateParams["actionLengths"][self.actionCode] self.teamAction = None else: # team action selection_pool = [t for t in teams if t is not self.teamAction and t is not parentTeam] # If we have a valid set of options choose from them if len(selection_pool) > 0: # let our current team know we won't be pointing to them anymore oldTeam = None if not self.isAtomic(): oldTeam = self.teamAction self.teamAction.inLearners.remove(str(learner_id)) self.teamAction = random.choice(selection_pool) # Let the new team know we're pointing to them self.teamAction.inLearners.append(str(learner_id)) #if oldTeam != None: # print("Learner {} switched from Team {} to Team {}".format(learner_id, oldTeam.id, self.teamAction.id)) return self
package s3 import ( "github.com/mitchellh/goamz/aws" goamzs3 "github.com/mitchellh/goamz/s3" ) type Client struct { s3 *goamzs3.S3 bucket *goamzs3.Bucket } func New(accessKey, secretKey, bucketName, endPointURL string) *Client { auth := aws.Auth{ AccessKey: accessKey, SecretKey: secretKey, } region := getRegion(endPointURL) s3 := goamzs3.New(auth, region) bucket := s3.Bucket(bucketName) return &Client{ s3: s3, bucket: bucket, } } func getRegion(endPointURL string) aws.Region { for _, region := range aws.Regions { if region.S3Endpoint == endPointURL { return region } } return aws.Region{ Name: "custom", S3Endpoint: endPointURL, S3LocationConstraint: true, } } func (c *Client) Store(path string, fileContent []byte) error { return c.bucket.Put(path, fileContent, "", "") } func (c *Client) Delete(path string) error { return c.bucket.Del(path) } func (c *Client) List(path string) (Versions, error) { resp, err := c.bucket.List(path+"/", "", "", 100) if err != nil { return []Version{}, err } files := []Version{} for _, file := range resp.Contents { version, err := NewVersion(file) if err != nil { return Versions{}, err } files = append(files, version) } return files, nil } func (c *Client) Get(path string) ([]byte, error) { return c.bucket.Get(path) }
def reloader(mod): importlib.reload(mod)
// SetGlobalConfig imports settings from the global configuration func (c *ConfigRequest) SetGlobalConfig(g *shared.GlobalConfig) { c.V1.Sys.Mlsa = g.V1.Mlsa c.V1.Sys.Service.ExternalFqdn = g.V1.Fqdn if logLevel := g.GetV1().GetLog().GetLevel().GetValue(); logLevel != "" { c.V1.Sys.Log.Level.Value = GlobalLogLevelToDexLevel(logLevel) } if g.GetV1().GetDisclosure().GetShow() != nil { c.V1.Sys.Disclosure.Show.Value = g.GetV1().GetDisclosure().GetShow().GetValue() if messageFilePath := g.GetV1().GetDisclosure().GetMessageFilePath().GetValue(); messageFilePath != "" { fileContent, _ := ioutil.ReadFile(messageFilePath) message := strings.TrimSuffix(string(fileContent), "\n") message = strings.Replace(message, `"`, `\"`, -1) c.V1.Sys.Disclosure.DisclosureMessage.Value = message } } if g.GetV1().GetBanner().GetShow() != nil { c.V1.Sys.Banner.Show.Value = g.GetV1().GetBanner().GetShow().GetValue() if bannerMessage := g.GetV1().GetBanner().GetMessage().GetValue(); bannerMessage != "" { c.V1.Sys.Banner.Message.Value = bannerMessage } if textColor := g.GetV1().GetBanner().GetTextColor().GetValue(); textColor != "" { c.V1.Sys.Banner.TextColor.Value = textColor } if backgroundColor := g.GetV1().GetBanner().GetBackgroundColor().GetValue(); backgroundColor != "" { c.V1.Sys.Banner.BackgroundColor.Value = backgroundColor } } }
Some Parametric Studies on Separation of Palladium from Perchloric Acid Medium by Radiolytic Reduction Abstract The radiolytic reduction of palladium ions to palladium metal in perchloric acid solutions has been studied with the aim of separating palladium from aqueous acidic waste. Fraction of Pd separated out as precipitate has been studied as a function of initial Pd concentration and strength of HClO4. Addition of t‐butanol to the system has been found to cause substantial enhancement in the amount of Pd precipitated as compared to that in its absence. At a given absorbed dose, the extent of Pd separated is found to increase with the concentration of HClO4 in presence of t‐butanol. However, the converse is true for the radiolysis in absence of t‐butanol. The decrease in the extent of reduction of Pd(II) to Pd(0) has been found to be due to increasing formation of chloride ions that tend to form reduction resistant cholorocomplexes of Pd. This is supported by the red‐shifting in the absorption bands of Pd(ClO4)2 observed for the spectra of gamma radiolysed solutions of Pd(II) at higher HClO4 concentrations. External addition of chloride ions to aqueous Pd(II)/HClO4 system even in presence of t‐butanol has been found to cause substantial inhibition to the radiolytic reduction of Pd(II) owing to formation of reduction‐resistant complexes. Correspondingly, the addition of nitrate ions to Pd(II)/HClO4/t‐butanol system showed inhibition effect at much greater stoichiometric amount of nitrate.
/** * This class is used to score the plant. Plants are scored for how green they are after a specified * number of iterations. */ public class PlantScore implements ScoreFunction { /** * {@inheritDoc} */ @Override public double calculateScore(final MLMethod algo) { DoubleArrayGenome genome = (DoubleArrayGenome) algo; PlantUniverse universe = new PlantUniverse(); universe.reset(); PlantPhysics physics = new PlantPhysics(); PlantGrowth growth = new PlantGrowth(); // Run the generations. for (int i = 0; i < PlantUniverse.EVALUATION_CYCLES; i++) { physics.runPhysics(universe); growth.runGrowth(universe, genome.getData()); } // Count the amount of green. int count = 0; double sum = 0; for (int row = 0; row < PlantUniverse.UNIVERSE_HEIGHT; row++) { for (int col = 0; col < PlantUniverse.UNIVERSE_WIDTH; col++) { PlantUniverseCell cell = universe.getCell(row, col); if (cell.isAlive()) { if (row >= PlantUniverse.GROUND_LINE) { sum += 0.5; } else { sum += cell.getLeafyness(); } } count++; } } return sum / count; } /** * {@inheritDoc} */ @Override public boolean shouldMinimize() { return false; //To change body of implemented methods use File | Settings | File Templates. } }
Interview With Tim Tuohy (October 2011) Tim Tuohy edited DAREDEVIL for a short run at the end of Volume 1 and had the unenviable task of taking over from former editor Jaye Gardner just as the "Flying Blind" arc was underway. Here we discuss that oft-maligned arc, the final issue by Chichester and Weeks, and working at Marvel in the late 90's. This interview was conducted over several months, and Mr. Tuohy supplied many of the memos and images presented. Many thanks to Mr. Tuohy for this opportunity. Kuljit Mithra: When I first think back to the time you were editing DAREDEVIL, all I can remember is Marvel's bankruptcy and the general malaise about the title itself... Joe Kelly had finished his last issue and rumors of cancellation were appearing... a deal with Event Comics was also rumored, and then Jaye Gardner left the book and then you were onboard with "Flying Blind"... can you describe what it was like at Marvel at that time, and how you got the DD editing job? DAREDEVIL #376-379 interlocking covers by Cully Hamner & Jason Martin Tim Tuohy: It was a rough time, no doubt. Malaise could also be combined with anger, frustration. You went to work everyday pretty much not knowing what was going to happen once you walked through the door. There were daily updates on who was buying us, who was not buying us, who was making a new bid for us...it was draining. There were forces, internal and external, which were weighing heavily on everybody in the building. [Former Marvel inker] Bob Almond commented to me on how somber it was coming into the offices during those days. Added to the financial situation was a seemingly constant attack on the Marvel Editors. Upper Management, for whatever their reasons, felt that outsourcing titles was a good idea. I can truthfully appreciate getting Jim Lee and Rob Liefeld to come in to bring up the sales numbers for "Heroes Reborn". I get it. But I never understood why it was perceived that they could have done a better job at the day to day editing process then Marvel's own editors. And then once the Event Comics rumors started, well, you can just imagine how we were feeling at the time. For reasons that were never made truly clear to me, I was brought into the publisher's office some time in January of 1998 and told that, "due to an editorial restructuring," I would be the editor of DAREDEVIL starting with issue #376. I'm going to include dates because, hopefully they will help set the story straight about what really happened with "Flying Blind." Some people have said that I was given DAREDEVIL because Jaye Gardner was laid off at the time. Jaye and I were both laid off on the same day October 24, 1998. The editorial reshuffling never made sense to me because there were already some pretty solid rumors about Quesada and Palmiotti coming in to take over the book. So giving me the book with only 5 issues left seemed strange. Mithra: From interviewing Cully Hamner and Scott Lobdell in the past, I know that you basically inherited Flying Blind and the creative team in place once Gardner left the book. How much of it was already underway, and was it too late to maybe go a different route? Tuohy: I did inherit the book and unfortunately, I inherited a myriad of problems as well. To understand just how bad the situation was; you have to understand some of the behind-the-scenes that goes on in making a monthly comic. As I stated earlier, DAREDEVIL was handed over to me in January of 1998, sometime between the 8th and the 21st. On whatever day it was, there was a brief handover meeting between Jaye Gardner and me. During that meeting we spoke about what was going on, who his creative teams were and what had been completed so far. It was that last part that caused me the most worry and the most problems. Jaye handed over to me the plots for the first two issues and a piece of art that was created as an ad for the storyline. That was it. The plot for issue #376 was faxed over on 12/01/1997 and the plot for #377 was faxed over on 1/08/1998. Mr. Lobdell was still working with Jaye on the second plot because on page 8 of the fax he makes a note to "Jay." I was also made aware about how late the book was. A page from Scott Lobdell's plot for the second issue of "Flying Blind" My first priority was to read the plots and see what the story was about. Despite what some people would like or believe, an editor's responsibility is to be more than a traffic manager. Marvel editors are responsible for some of the most well known characters in Pop Culture. That was a responsibility that I took very seriously. So seriously that when I was an assistant editor under Don Daley, I ran afoul of a writer who had presented Don with a PUNISHER story and kept asking me if Don had read the story or not. The writer finally said that if Don wasn't going to read it then he/she would just change the character to Batman and submit it to DC. I said, "Then it's not a PUNISHER story and Don doesn't want it." I read the plots and realized first and foremost that this should never have gone past the pitch stage. Writers throw out a lot of ideas. Some are good and some are not. I think that Mr. Lobdell has a lot of great ideas, but "Flying Blind" had too many things going on that, not just as an editor, but as a reader I couldn't reconcile. After the events of #375, Matt wakes up in France, has undergone complete facial reconstruction plastic surgery, had his brain re-wired to wipe his memory, he speaks and understands French fluently, and most shocking of all has his sight restored. Finally, there is only a single S.H.I.E.L.D agent who knows what is going on who gets killed at the end of the first issue. Is this agent a plastic surgeon/neuro-surgeon? And this was when Matt's identity was still a secret. As an editor, there were problems in the fact that Mr. Lobdell was using a villain, Electro, who was not approved by the proprietary editor and a villain who was dead, The Matador. Also troubling was the fact that after the four issues were done, the story was done. DAREDEVIL was completely back to staus quo. To me, this continued to push along the argument that Marvel's editors had no idea what they were doing and outsourcing will make for better books. I wrote notes, some were admittedly quite harsh, on the two plots and attached them to a memo to the Editor in Chief dated 1/21/1998. The first sentence of that memo read, "Simply and as eloquently as I can put this, this is not a DAREDEVIL story." By that date, issue #376 which had a solicited, on sale date of 4/1/98 was exactly 10 weeks from shipping without a single penciled page of story. Tuohy's notes on Scott Lobdell's plots, in an internal memo to editor-in-chief Bob Harras In case of situations like this, an editor is supposed to have a completely generic inventory story that can be run. Jaye did not have one and in a meeting with the EIC, probably on the 22nd, I was told to do the best that I could. I was very upset about the situation and asked that my name be taken off the book. I was surprised and not a little bit shocked to be told that, if I took my name off the project, I would be fired. Hindsight being 20/20, I should not have written such harsh notes on a plot from a writer who was a personal friend of the EIC. It was also stupid of me to now expect any support from the EIC for his editor. The lack of support would continue throughout working on the first issue. I had to go back to my office and get on the phone with Mr. Lobdell to change some things and talk a lot to Cully. Through no fault of his own, he was under the gun and we had to figure out some way to get this book on the fast track artwise. Kuljit Mithra: So this probably explains why you had to get Tom Morgan involved to help out with the arc because it was already far behind. I also remember the arc was solicited for 4 issues, then I heard it was down to 3, then back to 4 again... I didn't realize things were a mess behind the scenes. Is this kind of politics something you had to endure for most of your time at Marvel? For the most part, people don't understand what editors do and how their decisions on what happens in the comic is sometimes not of their own choosing. With everything going on with Marvel and now this situation with DAREDEVIL, there must have been some conflicting thoughts about your job. Solicitation for issue #376, from Marvel Vision Tuohy: Before I talk about Tom, I have to say what a pro Cully was. He really was given the sort end of the stick on this one. It was his idea to save time by drawing on boards that were smaller than the usual 11"x17" ones. To this day, I have a full set of copies of his pencil art for the first issue. Having Jason Martin inking was also a great time saver but, in the end, I knew that I had to get in another artist or else every issue would be late. Cully and I talked it over and he agreed with me that the best course of action was to get Scott to turn in all of the plots as soon as possible and give him the last issue. Getting Tom Morgan was a no-brainer. Tom pretty much saved my ass on DS9# 13 when I had to let the original penciler go. Tom stepped in and did an outstanding job under a tight deadline. I knew that he could do the same, if not better, on DAREDEVIL. I called him and laid out what was going on and he came onboard. While Cully was still working on #376, I had Tom start on #377. I have no recollection whatsoever about the solicitation thing... 4 issues to 3 issues back to 4 issues. If I was to hazard a guess, I would say that it had more to do with Quesada and Palmiotti's schedule than mine. I was already making calls to get #380 in the works having gotten the double sized issue approved. As Cully was cranking out pages for #376 as fast as he could, I was constantly trying to get ahold of Mr. Lobdell to script the pages. He was not an easy man for me get ahold of. It might have been because he was made aware of my displeasure over the whole situation. Only Mr. Lobdell can answer that. However, once I got those scripted pages, they had to be read, revised and then faxed over to ComicCraft. It was insane the amount of faxing that went on for this one 24 page story. When all was said and done, on 3/17/98, I was able to send the book to the manufacturing department. From there, the book was sent to the color separator, proofs would come back for approvals, revisions made, then off to the printer and finally, the stores. Within the space of about six or seven weeks after getting a book that should have been sent out five weeks earlier, #376 was done. Mr. Lobdell came through and turned around the plots to #378 and #379. I had Cully do the covers for all four issues and he was able to jump right into #379 while Tom had just started #378. On 3/19/98, I received a memo from the EIC telling me that DAREDEVIL #376 was going to ship two weeks late. As a consequence of Marvel's new policy regarding late shipping books I would be assessed my first "strike." Marvel had created this policy in an effort to stop the lateness of books leaving the offices. If an editor received three strikes, he/she was put on probation. A fourth could result in possible termination. Bob Harras's memo to Tuohy about his "first strike". Needless to say, I was furious about this. Furious is really an understatement. The memo was left on my chair when I went out to lunch! I immediately, and quite unprofessionally, fired off a reply memo citing every single instance of what went wrong with #376 and how I fixed it. *Full Disclosure: Before I was laid off, I had received a total of 2 strikes. The first was DAREDEVIL #376. Say what you will; not my fault. The second was for Seeker 3000 #1 It was one week late because I hesitated to farm out work to multiple people when a freelancer didn't turn in work as promised.* In his memo, the EIC cited that the cover was late. The same EIC that said the cover should be one of the first things done for solicitations. Well, by that logic, the cover should have been Jaye's responsibility. After the EIC and I had quite the shouting match, I completely washed my hands of the project. Mr. Lobdell could have written dog on people sex for all I cared. My sole purpose was to get those remaining issues out on time and that they would look good. I made sure that Cully, Jason, Tom, Scott Hanna, ComiCraft, Christie Scheele and even Mr. Lobdell knew that I would still be in their corner, but I was done fighting a fight that was rigged against me. Tuohy replies to Harras's memo I saw what was going on and put all of my energy into getting Dan Chichester and Lee Weeks for #380. If DAREDEVIL was going to be outsourced, I would at least give it a great sendoff! To answer your question about the thoughts of my job, I always tell people this: "When I started at Marvel, I was in at 8:59am and the custodians had to kick me out." During my last 6 months I was in at 9am and out at 4:59pm. The place changed. It wasn't fun anymore. Mithra: I'll come back to Chichester and Weeks, because I really liked that issue and it really was a good send off. I wanted to get your feelings on the whole Event Comics deal. You've already mentioned how you felt about outsourcing the comic, but what did you think of their work? I know that must be difficult to honestly answer, considering you were let go shortly after they took over the book. Tuohy: Yeah, I was mad, but this was business. It was what it was and there was nothing I could do about it. There was nothing personal about it. Jimmy Palmiotti and I go way back to the days when I was an intern and then an assistant for Don Daley. I was Jimmy's background inker on some Punisher issues. I went to the 2010 DragonCon, my first con in 13 years, and Jimmy and I had a great reunion. He is one of the best people in the business. Many people forget that Don Daley, and technically me as well, gave Joe Quesada some of his first work at Marvel. Joe did the cover to Punisher #62 and he drew issue #12 of Sleepwalker. Both of those were in 1992! My assistant, Julio Soto, and I helped them when they started and we wrote their DAREDEVIL gatefold covers for them before we got laid off. My only complaint and this was after some time had passed was in the amount of issues they got out. It took 16 months to get out 12 issues. DAREDEVIL #12 was what, four months late? I got a strike for being one week late and Joe gets made EIC for being 16 weeks late! I'm laughing as I write this. He has probably been one of the best EICs Marvel has had. Mithra: Were you assigned other titles to work on before being laid off? Had there been any suggestions that DAREDEVIL and the other titles included in the Event Comics deal would return to Marvel sooner, rather than later? I remember that the initial news said it was for 12 issues. Tuohy: I had two issues of the MICRONAUTS in the can. Yes, those MICRONAUTS! Shon Bury wrote a great story and the art was by Cary Nord and Dan Green. I was developing a Science Fiction line that would have piggybacked off of my relaunch of SEEKER 3000. I was obsessed with DEATHLOK. Bruce Canwell who had written the great DC bookshelf, Robin: The Gauntlet and I had come up with a completely believable way to return the mind of Michael Collins back to his own body and allow Luther Manning to return as DEATHLOK. Sal Velluto did about three full pages of samples based on Bruce's treatment. They were beautiful I wish I had those copies. There was another project called SPECTERS that Mitch Byrd had done samples for and Dan Abnett and Ian Edginton had written. Other than MICRONAUTS, the other project that I was really looking forward to was STRIKEFORCE: MORITURI! Julio and I went back, read and re-read all the old issues and had a pretty great pitch from Ian Edginton on it. We were very excited about it. We even thought about killing the characters the same way as was shown in one of the issues, with the editor and writer throwing darts at a board with the character's names on it! I was the editor of the Conan books when I worked for Carl Potts and I had just finished my return to Conan with the three issue tale CONAN: THE USURPER. The publisher, Shirrel Rhoades thought it was a good idea to send me to lunch with Arthur Lieberman of Conan Properties to talk about keeping Conan at Marvel. I was very excited about doing more Conan books and wanted to revisit a proposal that I made once of having the classic "Red Nails" redone by Roy Thomas but this time with John Romita, Jr. on art. There was also a four issue follow up to MEN IN BLACK: RETRIBUTION called MEN IN BLACK: ALIEN IN NEW YORK. Lowell Cunningham the MIB creator was going to write it and the art was going to be by Rod Whigham and Phil Moy. Julio and I also wanted to update the HANDBOOK TO THE MARVEL UNIVERSE. We were going to get all new art and keep one inker on the project. Walden Wong if I'm not mistaken. The craziest thing about this time was that even though I was penalized for DAREDEVIL #376 shipping late, I was given the job of being a consulting editor on a cross company project with Chaos Comics called "The Supernaturals." The artist on it was Ivan Reis. I was completely and utterly floored by this guy. At the same time, Julio showed me samples from a young artist named Ethan Van Sciver. I couldn't believe the amount of legitimate detail he put into his work. By "legitimate" I mean he wasn't just putting in lines all over everything because it looked cool. He knew what he was doing. So, I kept watching Ivan just grow and grow on "The Supernaturals" and I got Ethan to do samples for a new POWER PACK. Around the second issue of "The Supernaturals," I took copies of Ivan and Ethan's work into the EIC's office. I wanted these two guys to be Marvel artists in the worst way possible. But the EIC looked at the pages and said that they'd never get out of the independents! Um, right! I was also working on a proposal to restart the entire Marvel Universe from scratch. I was told that was an idea that would never happen. I would have liked to get DAREDEVIL back. Just wasn't going to happen. Mithra: Back to the final issue #380... you've already spoken about sending DD off on a high note... and you brought back Dan Chichester and Lee Weeks to do it. When had you begun planning this issue, and had you approached other creative teams to work on it? Tuohy: What I remember is that I was angry. I was angry at a lot of things but mostly it was just anger directed at those who never gave Marvel editorial a chance. We could have done anything that Image or Event had done if we were given the opportunity. We were so hamstrung by so many conflicting things it was sickening. There were things we wanted to do in editorial but we just kept getting our ideas shot down and then someone "in charge" would come in and say, "We're going to get such and such to breathe new life into our books." And it would be just what we wanted to do in house! It wasn't that our characters needed new life, they needed good stories! The story is what mattered. In comics, good comics, you need the story and art to work together. To be on the same level. Too many times in the '90s, when I was there, it was just about the art. I wanted to send DAREDEVIL off with a bang. It had nothing to do with Joe or Jimmy. I wanted to show the powers-that-be that Marvel heroes didn't have to be outsourced. I don't remember anything about the solicitation thing that was going on, but I do remember talking to Julio and saying that we had to do something big after "Flying Blind." We went back and forth on what we liked about DAREDEVIL and who could make those things happen in one issue. We both came to the decision that if anyone could do what we asked it was Dan Chichester and Lee Weeks. Once we really committed ourselves to them, it was my job to get on the phone and sell the idea to them. I probably called Dan first, gave him an idea of what was going on and why I wanted him so badly on this. He then probably told me to call Lee and talk to him and see if Lee would do it. If Lee would do it, then Dan would do it. I remember calling Lee and really having to work the phone! Lee really didn't want to go back to DAREDEVIL at that point. He was working for the WB Animation department doing storyboards. I wouldn't be surprised if at one point I was literally on my knees begging him to do this one issue. He said that if Dan would do it then he would do it. I'm serious. It was like a sitcom. I called back Dan; got him to agree; called back Lee with the news; he agreed and we were off. I want to say that I have these great memories of dealing with Dan and Lee. Those crazy stories like all my previous answers had. Sorry. Dan was one of the most amazing writers I had ever had the chance to work with. It was his idea to do a "Pulp Fiction" out of time sequence story. It was timely and relevant in regards to things that were going on in the world. There are two things about Lee that I remember clearly. The first, what a great artist he is. Really great. The second, trying to convince him that he was good enough. It was funny. I'd call him up every time his pages got delivered to the office. We'd talk about things and then like clockwork he'd start worrying about how he thought the art wasn't good enough. Julio and I are just amazed over every page and he was worried. It was one of those great days to be an editor. Another great memory is Robert Campanella! What a great inker. As an inker myself, I was jealous of his skills. His brush and pen skills were incredible. And I kept Christie [Scheele] as the colorist. She was working with Ian Laughlin at the time and I had my good friend Mark Bernardo color the last few pages to make the deadline. Life was good until the issue came out. It got past three sets of proof readers. My credit listed me as "Ediitor!" UGH! Oh, well. It was still one good story! Mithra: We've talked about your work on DD, so now, 13 years later (yes, it makes me feel old too), where has your career taken you after the layoff from Marvel? You've also talked about the anger you had back then... does it still bother you with what happened back then? Tuohy: Let me switch the order of your question for my answer. I'm not angry anymore. I have differing feelings of sadness and indifference which, at times, border on disgust and strangely, relief. But I am definitely still "bothered" by what happened and I'm sure it's something that I will never get over. Marvel was a great experience for me. I learned a lot of things about myself and about others. All of the negative things that happened were balanced and even surpassed by many more positives. I met and have kept many friends from those days. I learned that how you deal with others is genuinely reflected back on you. Chuck Dixon said I was always one of the "Straight Shooters." Unfortunately, I did learn that there are people who will look at you and say one thing to your face and then stab you in the back as soon as you turn around. I learned that those in charge will surround themselves with people who agree with everything they say or do no matter what the situation is. I learned that there were people who looked out only for themselves while others like Glenn Greenberg, Mark Bernardo and I lived, ate and breathed Marvel. We came up the ladder only to have it pulled out from under us by cheap office politics. I am no innocent, golden child who never did anything wrong. All the mistakes I made were personal in nature and hurt me. Drunkenly dancing across the tables in the Bullpen after a particularly insane lunch was a serious low point for me. However, I never, ever tried to undermine another editor (Happened to me on more than one occasion.), lie to a freelancer (I was accused of it because every other editor had lied to him before. This was by a colorist of all people.), or treat the Marvel brand as anything less than how great it was (One of our former, rotating door, golden parachute presidents thought the way to make Marvel more popular was by creating a "Marvel Macarena" dance! Seriously!). There was an incident toward the end where another editor and I had a disagreement on the usage of one of my characters. This editor did not follow the procedure for using another editor's characters and my character was not being used correctly. It was pointed out to me by the EIC that my character was not really as important in the scheme of all things Marvel compared to the other editor's. His line was, "C'mon, it's only Dracula." I never forgot it. That galvanized the inequalities that were rampant. Standards for each office were different based on who was in that office and what character(s) came out of that office. Issues of lateness were looked over for certain titles while others bore the brunt of lame, thrown together penalties. It really makes me proud to see people who I believed in doing well. Dan Abnett and Andy Lanning were two of Don's and my favorite writers. If you can believe it, Dan Slott had a time limit imposed on him when he came into the offices. The EIC felt that he was wasting editor's time with all of his ideas. He seemed to forget that ideas are where stories start. Ethan Van Sciver, Ivan Reis, Cary Nord, and Steve Leiber were all artists that I lobbied for who went on to win award after award. I knew they would be great. Greg Scott is another artist working for Marvel, DC and Boom!. I hired him for Deep Space Nine and Star Trek Unlimited 15 years ago! Nick Dragotta was my intern! As for where has my career taken me... Out of comics... maybe! After I got laid off, I was pretty despondent. All I ever wanted to do with my life was make comics. When I got my first Marvel business card I handed it out to all of my family to show them I had accomplished what I dreamed about. I had an interview, I think it was in November 1998, at DC Comics with Georg Brewer but didn't get that job and an interview with Wizard that I thought would be positive with all my experience with MARVEL VISION. Not so much. So I took the advice of my forward thinking, ex-fiancée and went back to school to become a teacher. She had always said that my eagerness to interact with the new interns and assistant editors was a sign that I enjoyed sharing my knowledge with others. So I went back to school, finished my art degree and got my teacher certification; all the while making the Dean's List. I have been teaching art for 11 years now. I got married in 2006 and had a beautiful daughter in 2008. I do find myself drifting back towards comics. I swore off them for almost six months after I got laid off but that really was just stupid. I love them too much. Before we moved to Georgia, I did give DC another shot when two openings were made available. One of those openings was actually a position held by Jaye Gardner. Those positions were filled and I realized that my time behind the desk was up. I read lots of comics from the big two. I can't lie. I'm a super hero guy. It does make me long for the old days though when I see and read some of the stuff coming out today. I'm not really sure who they're written for anymore. More than once I've had to re-read a book that I just finished because I had no idea what just happened. The art of inking is slowly going away. That makes me sad. I am enjoying the B&W Marvel Essentials right now. Looking at those and imagining today's comics printed like that makes me laugh. They would be dead lineweight coloring books. Mark Gruenwald said in an assistant editors' meeting that, "Good comic art has to work both in black and white and in color." I don't see that anymore. I'm all for good coloring but bad coloring to cover bland art is just bad for everybody involved. This might seem corny or clichéd but I truly believe, with all my heart that the Marvel Comics I knew, loved, worked for as an unpaid intern for 8 months, getting coffee, making copies, erasing pages, learning the ropes, died when Mark Gruenwald died. The air was literally sucked out of the offices and never came back. I still keep in touch with many people from my Marvel days. Facebook is great for that. Glenn Greenberg, Mark and Holly Bernardo, Glenn Herdling, Julio Soto, Tom and Lisa Rolston, all people that I worked with at Marvel, came to my wedding. My wife and I went to a Marvel Reunion in 2009. Dan Chichester called her my "trophy wife." I had to live that down for a couple of weeks! I saw my old boss Don Daley for the first time in 10 years. Hands down he is still the best boss I ever had. It was a great night. I have spent some time working with a writer that I taught with in Jersey City named Nick Miceli. Keep an eye out for him! He is going to have his first work printed in an Image Anthology. Mark Powers and I keep talking about doing something together again. Who knows? Mithra: We've covered quite a bit, and before we end the interview I just wanted to ask a few questions about MARVEL VISION. I guess the closest thing Marvel has to it now is MARVEL PREVIEWS and content on Marvel.com. When you worked on MARVEL VISION, what was your guiding force... did you want to make it more than just the solicits... because there were always some cool interviews and articles... sort of like how MARVEL AGE was. Tuohy: It's funny that you mentioned MARVEL AGE. If it wasn't for MARVEL AGE, I might never have worked for Marvel at all. I was an avid MARVEL AGE reader. I wanted to know everything I could about everything Marvel. I wanted to know as much behind the scenes stuff I could because I thought that was as interesting as the comics themselves. In issue #97, I still have it, there was an "Item" looking for college interns. I thought about it. It was something that there would have to be a lot of sacrifice for. I was going to college part time and working full time for my uncle making good money. The internship was going to be completely unpaid and I'd still be going to school at night. I called the number (Yes, the real Marvel Comics office phone number was printed in MARVEL AGE!), and left a message. I was on a ski trip when I found out that Marvel had called my job looking for me! I called back and spoke to Tom DeFalco's secretary, Mary McPherran. Mary was also in charge of the interns. We spoke and she told me that she actually had called twice looking for me. She gave me another call because she liked the professional way I left my message! Marvel Age #97, and the letter column advertising for interns at Marvel We talked for a bit and she scheduled an interview with me in December of 1990. I will always remember going into Marvel for the first time. There was this big Spider-Man on the wall and it was the first thing you saw when you exited the elevators. Mary met me at the door and we spoke at her desk. She thought that I would be a good intern for either Don Daley or Fabian Nicieza. She introduced me to Don as a meeting was just ending in Mark Gruenwald's office. Don interviewed me there, looked at my art portfolio, asked me some questions about comics and offered me the job. I thought about it for all of one second and accepted. That was the start of my Marvel career. I was now able to see comics getting produced first hand and not just from the pages of MARVEL AGE. It was great. I worked for Don and Kevin Kobasic for 6 months. After that, I was able to get a second internship at Marvel, despite my college not wanting me to, with Terry Kavanagh and Mark Powers. I worked with them for two months before I got word that Kevin was going freelance and Don needed an assistant. Don and I talked about it and he interviewed some others before he settled on me. I worked with Don for about 3 years before I was asked by Carl Potts to join him with a promotion to Associate Editor. Don let me know that he would support my decision even if it meant leaving him. So I took the promotion but a few months later, on January 4, 1995, I was laid off due to "downsizing." I was really at a loss. I wasn't prepared for being unemployed. I had worked non-stop in one job or another since I was 16. I was able to secure some freelance inking. I learned while I was a freelancer just how bad some editors can be and I swore that if the chance ever came again to be an editor, I would never treat any of my freelancers like one editor treated me. Sometime around May of 1995, I got a call from Jim Brennan in Marvel's advertising department. He and I had become friends and there was talk of Marvel starting a new fan magazine. That catch was that the entire magazine would be digitally created and produced; no old style typesetting and paste ups. Jim, his partner in the ad department, Vito Incorvaia, and I met and it was just meant to be. They wanted to hire me on the spot. I was really a bit brash in my negotiations with Marvel. I wanted to be hired back as a full editor with a salary as if I'd never been let go. Surprisingly, I got it. There were all sorts of crazy ideas, names and formats that we bounced around. I always remembered going to these focus groups and listening to what we were told that the readers wanted. I really took in all of that and used some of it. What I did was take all of the inspirations that I had when I was reading about comics. I read David Anthony Kraft's COMICS INTERVIEW, Starlog's COMICS SCENE which was edited by the great Robert Greenberger, and of course MARVEL AGE. I consciously didn't read or refer to Wizard because I didn't want what eventually became MARVEL VISION to be compared to it. WIZARD was its own entity with its own direction and fans. I wanted to tap into the built-in fan base that Marvel had. So after really fighting off the proposed name of Marvel Sponge, Vision was born. I wanted MARVEL VISION to be the be all end all of Marvel stuff. I wanted to make new features but I also knew that some features of MARVEL AGE were too good not to use again. New things were Time Slip, which after Vision was canceled got its own one-shot, "The Coming of the Avengers," by Jim Krueger, Matt Smith and Steve Mitchell; and the short lived Artist vs. Artist. All the interviews were, of course, new. One of the features I brought back from MARVEL AGE was Andy Mangel's Reel Marvel. He was one of the best writers I was ever able to work with. I enjoyed his work so much that once I learned of his love of Star Trek, I hired him and his then writing partner Mike Martin when I got the Star Trek likeness books. Honestly, I hated putting the solicits in Vision. They were always the last thing and caused me the most headaches. A great guy named Kevin Tang had the thankless task of getting all of those solicits from all of the editors. He easily had one of the hardest jobs at Marvel. My favorite thing was the letters pages. I read each letter and answered every letter that we could fit. I made sure that all the other letter writers I wasn't able to answer in the letters page, at least saw their names in print. Julio and I made sure that we printed just as many letters that took Marvel to task as those that loved everything we did. Tom Brevoort said that no matter who we interviewed, he always read the Vision Letters Page first. I got in trouble a few times for some of my answers because I told the truth about what was going on. I believed, and still do, that the fans are smart and if you are honest with them, they'll be fans for life. A funny, I use that term loosely, MARVEL VISION story, and there were many, involved the EIC and of all people, Mr. Lobdell. In issue #11, we did a long interview with the creators behind the new Ka-Zar. The EIC called me into his office to ask me why the word "and" was missing from a portion of the article. It was almost 3000 words and he called me out to point out a missing conjunction. He did this in front of Mr. Lobdell who was sitting on the EIC's couch. When I accepted responsibility for my horribly, egregious error, Mr. Lobdell actually commended me on being the only Marvel editor he ever had heard of taking responsibility for something! Scott Lobdell contest from Marvel Vision I'm sure that my inability to play the corporate game was part of, if not the primary reason for me eventually getting laid off again in 1998. It was truly sad because after all I had gone through, my loyalty to Marvel, the company, was unwavering. Frankly, it was the bullshit that I just couldn't stand anymore. Mithra: Seriously, someone wanted to call it Marvel Sponge? I don't know about you, but I enjoy buying back issues of the magazines you've mentioned, just to get a sense of the industry and learn from the various creators. I guess with the internet now, it's become easier to get information and contact people to get your voice heard. I think at the time you were at Marvel, they still weren't focusing on the web site, it was more the Marvel AOL Zone or whatever it was called. I remember there would be updates saying you were holding chat sessions. I get the feeling you enjoyed that interaction with fans. Tuohy: Yeah, "Marvel: Sponge!" The idea was that the magazine was your way to absorb everything about Marvel. That was one of those focus group things that I just smiled and nodded. I love those old magazines. I have a full set of the first volume of Comics Scene and a lot of the D.A.K Comic Interview magazines. I kept most of the ones with John Byrne interviews. He was a major influence. Batman has always been my favorite character but like a lot of people my life changed when I read X-MEN #s 141 & 142. I'll never be able to thank Terry Kavanagh enough when he brought me along to a meeting at John Byrne's house to discuss NAMOR. One of the coolest things about my life in comics, and I had many, involved Comics Scene. In issues 5 & 6 there was this great article/panel discussion about inking. The participants were Joe Rubinstein, Tom Palmer, Klaus Janson and Bob Layton. The artists were given a Hulk piece by Mike Zeck and each inked it. I learned about how inkers interpret pencilers' work, what their part in the creation puzzle is, and what their influences were. The interview happened in 1983. The Comics Scene magazine and inking article Fast forward to 1991, I'm an intern at Marvel and I get to be one of Joe Rubinstein's inking assistants on Infinity Gauntlet #6. Every filled in black, star field and zip-a-tone is me. Then I'm Don Daley's assistant editor and I get to work with Klaus Janson on PUNISHER: WAR ZONE and finally when I'm an editor, I get to hire Tom Palmer on DRACULA: LORD OF THE UNDEAD! I really was grateful for the opportunities I had at Marvel. I met so many great artists and writers whose work I loved. The icing on the cake was getting to work with these creators who had been so important to me. My wife and I just went to Dragon*Con for the second year in a row. I do miss the creators and fans. Looking back at those interviews I read with all those artists and writers, and how our paths eventually crossed is priceless to me. The online thing was great. I enjoyed the chat sessions. They were another way for me to connect with the fans. The only thing that weirded me out was how many people kept asking "age/sex." I would keep typing, "Doesn't matter." Unfortunately, there came a point where I just had to cut my losses. That's why when I got called down to Human Resources after Tom Palmer took Ralph Macchio, Glenn Greenberg and me to lunch, at the Society of Illustrators, I knew what was coming down. I signed the paper without any hesitation acknowledging I was out of Marvel again. Mithra: We've gone over quite a bit and I just wanted to thank you for this opportunity to discuss your work on DAREDEVIL and everything else at Marvel. So, in conclusion, have you been reading DAREDEVIL recently, and what do you think of the new direction? Tuohy: I can truthfully say that I picked up the first issue of the new DAREDEVIL before this interview even started. Kind of weird how that happened! I picked it up because Mark Waid was writing it. I really enjoyed his run on Legion of Super Heroes. If anyone could make me read DD again, it was him. I tried to read the stuff that followed after I left but my heart just wasn't in it. I was kind of annoyed by the renumbering though. It bugs me. I was reading a perfectly great run on THE NEW AVENGERS and then that storyline ended and a month later, I guess it was THE NEW, NEW AVENGERS with a brand new #1. Sorry, that's just the kind of marketing gimmicks that got Marvel in trouble in the '90s. I don't get that. I kind of understood what John Byrne did when the Superman books restarted when he took over but that didn't happen when he took over the FANTASTIC FOUR. I was/am crazy for the FF. I was able to pick up and enjoy the start of his run as if I was a new reader. Renumbering books may bring in the small number of new readers but probably brings back the return of the speculator. That's not where comics need to return. I still buy the majority of my comics from a shop in New Jersey called Rogue Comics. The owner and I have been friends for years. About two to three times a year he ships me a box filled with all of my pulled titles and the occasional Alien action figure. I have marathon reading nights after my wife and daughter are asleep. I'm looking forward to sitting down with a pile of Waid's DAREDEVIL's and reading about Matt like when I was just a fan! Thank you for the interview. You're my first since I was interviewed for MARVEL VISION to talk about an issue of STAR TREK: UNLIMITED. I feel so special. ---------------------------------------------- (c) 2011 Kuljit Mithra & Tim Tuohy Daredevil:The Man Without Fear http://www.manwithoutfear.com ---------------------------------------------- Read more interviews!
# Copyright (C) 2006-2007 <NAME> <<EMAIL>> # Copyright (C) 2012 <NAME> <<EMAIL>> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ Configuration file (aka ``ssh_config``) support. """ import fnmatch import getpass import os import re import shlex import socket from functools import partial from .py3compat import StringIO invoke, invoke_import_error = None, None try: import invoke except ImportError as e: invoke_import_error = e from .ssh_exception import CouldNotCanonicalize, ConfigParseError SSH_PORT = 22 class SSHConfig(object): """ Representation of config information as stored in the format used by OpenSSH. Queries can be made via `lookup`. The format is described in OpenSSH's ``ssh_config`` man page. This class is provided primarily as a convenience to posix users (since the OpenSSH format is a de-facto standard on posix) but should work fine on Windows too. .. versionadded:: 1.6 """ SETTINGS_REGEX = re.compile(r"(\w+)(?:\s*=\s*|\s+)(.+)") # TODO: do a full scan of ssh.c & friends to make sure we're fully # compatible across the board, e.g. OpenSSH 8.1 added %n to ProxyCommand. TOKENS_BY_CONFIG_KEY = { "controlpath": ["%h", "%l", "%L", "%n", "%p", "%r", "%u"], "hostname": ["%h"], "identityfile": ["~", "%d", "%h", "%l", "%u", "%r"], "proxycommand": ["~", "%h", "%p", "%r"], # Doesn't seem worth making this 'special' for now, it will fit well # enough (no actual match-exec config key to be confused with). "match-exec": ["%d", "%h", "%L", "%l", "%n", "%p", "%r", "%u"], } def __init__(self): """ Create a new OpenSSH config object. Note: the newer alternate constructors `from_path`, `from_file` and `from_text` are simpler to use, as they parse on instantiation. For example, instead of:: config = SSHConfig() config.parse(open("some-path.config") you could:: config = SSHConfig.from_file(open("some-path.config")) # Or more directly: config = SSHConfig.from_path("some-path.config") # Or if you have arbitrary ssh_config text from some other source: config = SSHConfig.from_text("Host foo\\n\\tUser bar") """ self._config = [] @classmethod def from_text(cls, text): """ Create a new, parsed `SSHConfig` from ``text`` string. .. versionadded:: 2.7 """ return cls.from_file(StringIO(text)) @classmethod def from_path(cls, path): """ Create a new, parsed `SSHConfig` from the file found at ``path``. .. versionadded:: 2.7 """ with open(path) as flo: return cls.from_file(flo) @classmethod def from_file(cls, flo): """ Create a new, parsed `SSHConfig` from file-like object ``flo``. .. versionadded:: 2.7 """ obj = cls() obj.parse(flo) return obj def parse(self, file_obj): """ Read an OpenSSH config from the given file object. :param file_obj: a file-like object to read the config file from """ # Start out w/ implicit/anonymous global host-like block to hold # anything not contained by an explicit one. context = {"host": ["*"], "config": {}} for line in file_obj: # Strip any leading or trailing whitespace from the line. # Refer to https://github.com/paramiko/paramiko/issues/499 line = line.strip() # Skip blanks, comments if not line or line.startswith("#"): continue # Parse line into key, value match = re.match(self.SETTINGS_REGEX, line) if not match: raise ConfigParseError("Unparsable line {}".format(line)) key = match.group(1).lower() value = match.group(2) # Host keyword triggers switch to new block/context if key in ("host", "match"): self._config.append(context) context = {"config": {}} if key == "host": # TODO 3.0: make these real objects or at least name this # "hosts" to acknowledge it's an iterable. (Doing so prior # to 3.0, despite it being a private API, feels bad - # surely such an old codebase has folks actually relying on # these keys.) context["host"] = self._get_hosts(value) else: context["matches"] = self._get_matches(value) # Special-case for noop ProxyCommands elif key == "proxycommand" and value.lower() == "none": # Store 'none' as None; prior to 3.x, it will get stripped out # at the end (for compatibility with issue #415). After 3.x, it # will simply not get stripped, leaving a nice explicit marker. context["config"][key] = None # All other keywords get stored, directly or via append else: if value.startswith('"') and value.endswith('"'): value = value[1:-1] # identityfile, localforward, remoteforward keys are special # cases, since they are allowed to be specified multiple times # and they should be tried in order of specification. if key in ["identityfile", "localforward", "remoteforward"]: if key in context["config"]: context["config"][key].append(value) else: context["config"][key] = [value] elif key not in context["config"]: context["config"][key] = value # Store last 'open' block and we're done self._config.append(context) def lookup(self, hostname): """ Return a dict (`SSHConfigDict`) of config options for a given hostname. The host-matching rules of OpenSSH's ``ssh_config`` man page are used: For each parameter, the first obtained value will be used. The configuration files contain sections separated by ``Host`` and/or ``Match`` specifications, and that section is only applied for hosts which match the given patterns or keywords Since the first obtained value for each parameter is used, more host- specific declarations should be given near the beginning of the file, and general defaults at the end. The keys in the returned dict are all normalized to lowercase (look for ``"port"``, not ``"Port"``. The values are processed according to the rules for substitution variable expansion in ``ssh_config``. Finally, please see the docs for `SSHConfigDict` for deeper info on features such as optional type conversion methods, e.g.:: conf = my_config.lookup('myhost') assert conf['passwordauthentication'] == 'yes' assert conf.as_bool('passwordauthentication') is True .. note:: If there is no explicitly configured ``HostName`` value, it will be set to the being-looked-up hostname, which is as close as we can get to OpenSSH's behavior around that particular option. :param str hostname: the hostname to lookup .. versionchanged:: 2.5 Returns `SSHConfigDict` objects instead of dict literals. .. versionchanged:: 2.7 Added canonicalization support. .. versionchanged:: 2.7 Added ``Match`` support. """ # First pass options = self._lookup(hostname=hostname) # Inject HostName if it was not set (this used to be done incidentally # during tokenization, for some reason). if "hostname" not in options: options["hostname"] = hostname # Handle canonicalization canon = options.get("canonicalizehostname", None) in ("yes", "always") maxdots = int(options.get("canonicalizemaxdots", 1)) if canon and hostname.count(".") <= maxdots: # NOTE: OpenSSH manpage does not explicitly state this, but its # implementation for CanonicalDomains is 'split on any whitespace'. domains = options["canonicaldomains"].split() hostname = self.canonicalize(hostname, options, domains) # Overwrite HostName again here (this is also what OpenSSH does) options["hostname"] = hostname options = self._lookup(hostname, options, canonical=True) return options def _lookup(self, hostname, options=None, canonical=False): # Init if options is None: options = SSHConfigDict() # Iterate all stanzas, applying any that match, in turn (so that things # like Match can reference currently understood state) for context in self._config: if not ( self._pattern_matches(context.get("host", []), hostname) or self._does_match( context.get("matches", []), hostname, canonical, options ) ): continue for key, value in context["config"].items(): if key not in options: # Create a copy of the original value, # else it will reference the original list # in self._config and update that value too # when the extend() is being called. options[key] = value[:] if value is not None else value elif key == "identityfile": options[key].extend( x for x in value if x not in options[key] ) # Expand variables in resulting values (besides 'Match exec' which was # already handled above) options = self._expand_variables(options, hostname) # TODO: remove in 3.x re #670 if "proxycommand" in options and options["proxycommand"] is None: del options["proxycommand"] return options def canonicalize(self, hostname, options, domains): """ Return canonicalized version of ``hostname``. :param str hostname: Target hostname. :param options: An `SSHConfigDict` from a previous lookup pass. :param domains: List of domains (e.g. ``["paramiko.org"]``). :returns: A canonicalized hostname if one was found, else ``None``. .. versionadded:: 2.7 """ found = False for domain in domains: candidate = "{}.{}".format(hostname, domain) family_specific = _addressfamily_host_lookup(candidate, options) if family_specific is not None: # TODO: would we want to dig deeper into other results? e.g. to # find something that satisfies PermittedCNAMEs when that is # implemented? found = family_specific[0] else: # TODO: what does ssh use here and is there a reason to use # that instead of gethostbyname? try: found = socket.gethostbyname(candidate) except socket.gaierror: pass if found: # TODO: follow CNAME (implied by found != candidate?) if # CanonicalizePermittedCNAMEs allows it return candidate # If we got here, it means canonicalization failed. # When CanonicalizeFallbackLocal is undefined or 'yes', we just spit # back the original hostname. if options.get("canonicalizefallbacklocal", "yes") == "yes": return hostname # And here, we failed AND fallback was set to a non-yes value, so we # need to get mad. raise CouldNotCanonicalize(hostname) def get_hostnames(self): """ Return the set of literal hostnames defined in the SSH config (both explicit hostnames and wildcard entries). """ hosts = set() for entry in self._config: hosts.update(entry["host"]) return hosts def _pattern_matches(self, patterns, target): # Convenience auto-splitter if not already a list if hasattr(patterns, "split"): patterns = patterns.split(",") match = False for pattern in patterns: # Short-circuit if target matches a negated pattern if pattern.startswith("!") and fnmatch.fnmatch( target, pattern[1:] ): return False # Flag a match, but continue (in case of later negation) if regular # match occurs elif fnmatch.fnmatch(target, pattern): match = True return match # TODO 3.0: remove entirely (is now unused internally) def _allowed(self, hosts, hostname): return self._pattern_matches(hosts, hostname) def _does_match(self, match_list, target_hostname, canonical, options): matched = [] candidates = match_list[:] local_username = getpass.getuser() while candidates: candidate = candidates.pop(0) passed = None # Obtain latest host/user value every loop, so later Match may # reference values assigned within a prior Match. configured_host = options.get("hostname", None) configured_user = options.get("user", None) type_, param = candidate["type"], candidate["param"] # Canonical is a hard pass/fail based on whether this is a # canonicalized re-lookup. if type_ == "canonical": if self._should_fail(canonical, candidate): return False # The parse step ensures we only see this by itself or after # canonical, so it's also an easy hard pass. (No negation here as # that would be uh, pretty weird?) elif type_ == "all": return True # From here, we are testing various non-hard criteria, # short-circuiting only on fail elif type_ == "host": hostval = configured_host or target_hostname passed = self._pattern_matches(param, hostval) elif type_ == "originalhost": passed = self._pattern_matches(param, target_hostname) elif type_ == "user": user = configured_user or local_username passed = self._pattern_matches(param, user) elif type_ == "localuser": passed = self._pattern_matches(param, local_username) elif type_ == "exec": exec_cmd = self._tokenize( options, target_hostname, "match-exec", param ) # This is the laziest spot in which we can get mad about an # inability to import Invoke. if invoke is None: raise invoke_import_error # Like OpenSSH, we 'redirect' stdout but let stderr bubble up passed = invoke.run(exec_cmd, hide="stdout", warn=True).ok # Tackle any 'passed, but was negated' results from above if passed is not None and self._should_fail(passed, candidate): return False # Made it all the way here? Everything matched! matched.append(candidate) # Did anything match? (To be treated as bool, usually.) return matched def _should_fail(self, would_pass, candidate): return would_pass if candidate["negate"] else not would_pass def _tokenize(self, config, target_hostname, key, value): """ Tokenize a string based on current config/hostname data. :param config: Current config data. :param target_hostname: Original target connection hostname. :param key: Config key being tokenized (used to filter token list). :param value: Config value being tokenized. :returns: The tokenized version of the input ``value`` string. """ allowed_tokens = self._allowed_tokens(key) # Short-circuit if no tokenization possible if not allowed_tokens: return value # Obtain potentially configured hostname, for use with %h. # Special-case where we are tokenizing the hostname itself, to avoid # replacing %h with a %h-bearing value, etc. configured_hostname = target_hostname if key != "hostname": configured_hostname = config.get("hostname", configured_hostname) # Ditto the rest of the source values if "port" in config: port = config["port"] else: port = SSH_PORT user = getpass.getuser() if "user" in config: remoteuser = config["user"] else: remoteuser = user local_hostname = socket.gethostname().split(".")[0] local_fqdn = LazyFqdn(config, local_hostname) homedir = os.path.expanduser("~") # The actual tokens! replacements = { # TODO: %%??? # TODO: %C? "%d": homedir, "%h": configured_hostname, # TODO: %i? "%L": local_hostname, "%l": local_fqdn, # also this is pseudo buggy when not in Match exec mode so document # that. also WHY is that the case?? don't we do all of this late? "%n": target_hostname, "%p": port, "%r": remoteuser, # TODO: %T? don't believe this is possible however "%u": user, "~": homedir, } # Do the thing with the stuff tokenized = value for find, replace in replacements.items(): if find not in allowed_tokens: continue tokenized = tokenized.replace(find, str(replace)) # TODO: log? eg that value -> tokenized return tokenized def _allowed_tokens(self, key): """ Given config ``key``, return list of token strings to tokenize. .. note:: This feels like it wants to eventually go away, but is used to preserve as-strict-as-possible compatibility with OpenSSH, which for whatever reason only applies some tokens to some config keys. """ return self.TOKENS_BY_CONFIG_KEY.get(key, []) def _expand_variables(self, config, target_hostname): """ Return a dict of config options with expanded substitutions for a given original & current target hostname. Please refer to :doc:`/api/config` for details. :param dict config: the currently parsed config :param str hostname: the hostname whose config is being looked up """ for k in config: if config[k] is None: continue tokenizer = partial(self._tokenize, config, target_hostname, k) if isinstance(config[k], list): for i, value in enumerate(config[k]): config[k][i] = tokenizer(value) else: config[k] = tokenizer(config[k]) return config def _get_hosts(self, host): """ Return a list of host_names from host value. """ try: return shlex.split(host) except ValueError: raise ConfigParseError("Unparsable host {}".format(host)) def _get_matches(self, match): """ Parse a specific Match config line into a list-of-dicts for its values. Performs some parse-time validation as well. """ matches = [] tokens = shlex.split(match) while tokens: match = {"type": None, "param": None, "negate": False} type_ = tokens.pop(0) # Handle per-keyword negation if type_.startswith("!"): match["negate"] = True type_ = type_[1:] match["type"] = type_ # all/canonical have no params (everything else does) if type_ in ("all", "canonical"): matches.append(match) continue if not tokens: raise ConfigParseError( "Missing parameter to Match '{}' keyword".format(type_) ) match["param"] = tokens.pop(0) matches.append(match) # Perform some (easier to do now than in the middle) validation that is # better handled here than at lookup time. keywords = [x["type"] for x in matches] if "all" in keywords: allowable = ("all", "canonical") ok, bad = ( list(filter(lambda x: x in allowable, keywords)), list(filter(lambda x: x not in allowable, keywords)), ) err = None if any(bad): err = "Match does not allow 'all' mixed with anything but 'canonical'" # noqa elif "canonical" in ok and ok.index("canonical") > ok.index("all"): err = "Match does not allow 'all' before 'canonical'" if err is not None: raise ConfigParseError(err) return matches def _addressfamily_host_lookup(hostname, options): """ Try looking up ``hostname`` in an IPv4 or IPv6 specific manner. This is an odd duck due to needing use in two divergent use cases. It looks up ``AddressFamily`` in ``options`` and if it is ``inet`` or ``inet6``, this function uses `socket.getaddrinfo` to perform a family-specific lookup, returning the result if successful. In any other situation -- lookup failure, or ``AddressFamily`` being unspecified or ``any`` -- ``None`` is returned instead and the caller is expected to do something situation-appropriate like calling `socket.gethostbyname`. :param str hostname: Hostname to look up. :param options: `SSHConfigDict` instance w/ parsed options. :returns: ``getaddrinfo``-style tuples, or ``None``, depending. """ address_family = options.get("addressfamily", "any").lower() if address_family == "any": return try: family = socket.AF_INET6 if address_family == "inet": family = socket.AF_INET return socket.getaddrinfo( hostname, None, family, socket.SOCK_DGRAM, socket.IPPROTO_IP, socket.AI_CANONNAME, ) except socket.gaierror: pass class LazyFqdn(object): """ Returns the host's fqdn on request as string. """ def __init__(self, config, host=None): self.fqdn = None self.config = config self.host = host def __str__(self): if self.fqdn is None: # # If the SSH config contains AddressFamily, use that when # determining the local host's FQDN. Using socket.getfqdn() from # the standard library is the most general solution, but can # result in noticeable delays on some platforms when IPv6 is # misconfigured or not available, as it calls getaddrinfo with no # address family specified, so both IPv4 and IPv6 are checked. # # Handle specific option fqdn = None results = _addressfamily_host_lookup(self.host, self.config) if results is not None: for res in results: af, socktype, proto, canonname, sa = res if canonname and "." in canonname: fqdn = canonname break # Handle 'any' / unspecified / lookup failure if fqdn is None: fqdn = socket.getfqdn() # Cache self.fqdn = fqdn return self.fqdn class SSHConfigDict(dict): """ A dictionary wrapper/subclass for per-host configuration structures. This class introduces some usage niceties for consumers of `SSHConfig`, specifically around the issue of variable type conversions: normal value access yields strings, but there are now methods such as `as_bool` and `as_int` that yield casted values instead. For example, given the following ``ssh_config`` file snippet:: Host foo.example.com PasswordAuthentication no Compression yes ServerAliveInterval 60 the following code highlights how you can access the raw strings as well as usefully Python type-casted versions (recalling that keys are all normalized to lowercase first):: my_config = SSHConfig() my_config.parse(open('~/.ssh/config')) conf = my_config.lookup('foo.example.com') assert conf['passwordauthentication'] == 'no' assert conf.as_bool('passwordauthentication') is False assert conf['compression'] == 'yes' assert conf.as_bool('compression') is True assert conf['serveraliveinterval'] == '60' assert conf.as_int('serveraliveinterval') == 60 .. versionadded:: 2.5 """ def __init__(self, *args, **kwargs): # Hey, guess what? Python 2's userdict is an old-style class! super(SSHConfigDict, self).__init__(*args, **kwargs) def as_bool(self, key): """ Express given key's value as a boolean type. Typically, this is used for ``ssh_config``'s pseudo-boolean values which are either ``"yes"`` or ``"no"``. In such cases, ``"yes"`` yields ``True`` and any other value becomes ``False``. .. note:: If (for whatever reason) the stored value is already boolean in nature, it's simply returned. .. versionadded:: 2.5 """ val = self[key] if isinstance(val, bool): return val return val.lower() == "yes" def as_int(self, key): """ Express given key's value as an integer, if possible. This method will raise ``ValueError`` or similar if the value is not int-appropriate, same as the builtin `int` type. .. versionadded:: 2.5 """ return int(self[key])
// Upload the photo stored in this instance public void uploadPhoto() { if (!SystemUtilities.isOnline(CloudCameraApplication.getAppContext())) { SystemUtilities.reportError(TAG, "Cannot upload photo without internet connection. Visit upload screen to retry."); this.setAborted(true); return; } final ParseFile picFile = this.getParseFile(); picFile.saveInBackground(new SaveCallback() { @Override public void done(ParseException e) { if (e == null) { savePhoto(picFile); } else { PhotoUpload.this.showError(e, "Photo Upload Failure "); } } }, new ProgressCallback() { @Override public void done(Integer percentDone) { PhotoUpload.this.setProgress(percentDone); } } ); }
What do progressives want now? Obviously, they want a Democratic victory in 2016—a goal for which some, but not all, of them are willing to swallow their reservations about Hillary Clinton. But there’s a larger question. After six years of the Obama Administration—which has been, from the progressive perspective, a time of important advances on issues such as health care, gay rights, and environmental regulations, but of relatively little change in other areas—what should the policy goals be going forward? Presumably, Clinton will address this question in January, when she is expected to launch her campaign publicly. In the meantime, Bernie Sanders, the Independent senator from Vermont, who, as my colleague Ryan Lizza recently wrote in the magazine, is also eyeing a 2016 run, is promoting a twelve-step “Economic Agenda for America,” which he outlined on the Senate floor Tuesday. While Sanders has about as much chance of becoming President as I have—i.e., none—he’s doing the Democratic Party, with which he caucuses, a service by issuing some specific proposals, many of which Democrats of all stripes can rally around. As you might expect, given that the Sanders platform comes from about the only politician in the American legislature who openly describes himself as a socialist, it is overtly populist and takes aim at economic élites. On Twitter, where he maintains a busy presence, Sanders posted an image declaring, “We have a corporate establishment whose greed is destroying the economy, a political establishment which is beholden to billionaires and a media establishment which largely ignores the major issues facing working families. We need a political revolution.” During his speech in the Senate, he returned to the theme, posing the question: “Are we prepared to take on the enormous economic and political power of the billionaire class or do we continue to slide into economic and political oligarchy?” By invoking the language of insurrection, Sanders was mimicking the tactics of right-wing Republicans, who are forever calling for a revolution in Washington. (What they really want, of course, is a counterrevolution.) But if you look closely at his proposals rather than how he packages them, it’s perfectly clear that he’s no Leninist intent on seizing the commanding heights of the economy. In many ways, he’s a traditional liberal Democrat, content to maintain the existing economic system but also eager to tilt it in a more egalitarian and green direction. In many areas, President Obama has already campaigned for versions of the policies that Sanders is putting forward, and few Clintonites would have any trouble endorsing them. The exceptions are his proposals to break up the big banks and move beyond Obamacare to a “Medicare-for-all” system of health care. The heart of Sanders’s program is an effort to tackle wage stagnation and promote higher-paying jobs. To this end, he wants to invest a lot more public money—up to a trillion dollars—in infrastructure, to raise the minimum wage, and to make it easier for unions to organize and negotiate collective-bargaining agreements. In addition, he would seek to close the gender gap in pay, spend more on education (he doesn’t say how much), close corporate tax loopholes, and provide federal financing for the formation of worker-run coöperatives. With the exception of promoting coöperatives, a long-standing and laudable interest of Sanders, there is nothing out of the mainstream here. Larry Summers, who worked for Bill Clinton and Barack Obama and who some progressives regard as their bête noire, has long called for a big infrastructure program. Obama has repeatedly asked Congress to raise the minimum wage, close corporate loopholes, and pass a “card check” bill, which would make it easier for unions to organize. And, ever since 2009, when he signed the Lilly Ledbetter Fair Pay Act, Obama has made closing the gender gap in compensation one of his priorities. Sanders also said that he wants to “transform energy systems away from fossil fuels to create jobs while beginning to reverse global warming and make the planet habitable for future generations.” But that is a general aspiration rather than a detailed policy prescription, and it’s one that President Obama and many other Democrats have also voiced. In the past, Sanders has sponsored legislation for a carbon tax, which was, of course, stillborn. Such a proposal goes beyond the Obama Administration’s endorsement of a cap-and-trade system, but both of these policies amount to attempts to force polluters to pay a price for emitting carbon. Nor should it be forgotten that, way back in 1993, President Bill Clinton proposed a B.T.U. tax. Sanders and his progressive supporters may come into conflict with moderate Democrats on trade, health care, and Wall Street, however. In a Huffington Post article at the start of this week, Sanders wrote, “We must end our disastrous trade policies (NAFTA, CAFTA, PNTR with China, etc.) which enable corporate America to shut down plants in this country and move to China and other low-wage countries.” Such a statement goes well beyond Sanders’s pledge, in the press release posted to his Web site on Tuesday, to “reform trade policies that have shuttered more than 60,000 factories and cost more than 4.9 million decent-paying manufacturing jobs.” The Clinton-Obama wing of the Democratic Party, although it has adopted a somewhat more critical stance toward trade agreements—calling for them to include stricter labor laws, for example—still believes, on a fundamental level, that free trade is good for America. Likewise, Sanders’s proposal to replace the 2010 health-care reform with a public option, available to all—another policy he has supported for ages—is unlikely to gain much support from moderate Democrats who have spent years fighting with the Republicans over the Affordable Care Act. Another potentially contentious area between progressive and moderates is the treatment of Wall Street. Progressives’ enthusiastic response, a couple of weeks ago, to Senator Elizabeth Warren’s grilling of Bill Dudley, the head of the New York Federal Reserve, demonstrated that even today, six years after the financial crisis of 2008, there’s nothing like a bit of banker-bashing to rile up the troops. Sanders, a long-standing critic of financiers and their enablers, is no slouch himself in this area. In his HuffPo article, he wrote, “Financial institutions cannot be an island unto themselves, standing as huge profit centers outside of the real economy.… They are too powerful to be reformed. They must be broken up.” As with other elements of Sanders’s program, this is not actually a very radical proposal. If you gathered the country’s twenty leading economists, a majority of them would probably agree that, in an ideal world, banks would be a lot smaller. Given what happened in the run-up to 2008, there’s an obvious danger that, at some point, the big banks will go on another reckless lending binge, relying on the implicit promise of a bailout if they get into trouble. One way to tackle this problem would be to make the banks smaller, so that they could be allowed to fail, and the incentives they face would be very different. Whereas a proposal to break up the banks is perfectly defensible on economic grounds, it could well prove politically toxic to a Democratic Party that, for all its ties to Main Street, remains heavily reliant on Wall Street for financial donations. The Clintons, in particular, have long-established links to firms like Goldman Sachs and Citigroup. In addition, their economic advisers have often endorsed the mercantilist argument that America needs gargantuan banks to compete with large foreign competitors. Right now, it seems unlikely that Hillary would repudiate this argument. That doesn’t mean she’s inevitably headed toward a bitter confrontation with the left. The Sanders manifesto shows that there are many areas where progressives and moderates can come together, and it’s in these places, surely, where Hillary will pitch her tents. But the flashpoint, if there is one, is likely to be Wall Street.
def evaluate(di, le, po, out, query, syn): print "not positional" q_vector = vectorize_query(di,len(le), query) postings = get_posts(di, po, syn) candidates = get_candidates(postings, le) window = get_window(candidates) return get_final(candidates, q_vector, window)
def _get_hostclass(self, group_name): parts = group_name.split('_')[1:-1] return '_'.join(parts)
def testJ_GetCell50(self): g = Grid(0,-10, 1,0, 1); i,j = g.getCell(.1,-9.1,) self.failUnlessEqual(i,0) self.failUnlessEqual(j,0) i,j = g.getCell(.1,-.1) self.failUnlessEqual(i,0) self.failUnlessEqual(j,9) i,j = g.getCell(.1,9.1) self.failUnlessEqual(i,0) self.failUnlessEqual(j,19)
<gh_stars>100-1000 package kedge_http import ( "crypto/tls" "net/http" "github.com/improbable-eng/kedge/pkg/http/tripperware" kedge_map "github.com/improbable-eng/kedge/pkg/map" "golang.org/x/net/http2" ) // NewClient constructs new HTTP client that supports proxying by kedge. // NOTE: No copy of parentTransport is done, so it will modify parent one. func NewClient(mapper kedge_map.Mapper, clientTls *tls.Config, parentTransport *http.Transport) *http.Client { if clientTls != nil { parentTransport.TLSClientConfig = clientTls if err := http2.ConfigureTransport(parentTransport); err != nil { panic(err) // this should never happen, but let's not lose an error. } } return &http.Client{ Transport: tripperware.WrapForMapping(mapper, tripperware.WrapForRouting( tripperware.DefaultWithTransport(parentTransport, clientTls), ), ), } }
/** * This builder is opinionated; 1. It assumes that false is a reasonable default for sslEnabled and * corsEnabled. 2. It assumes empty array for enabledEndpoints is a reasonable default 3. It assumes * "*" is a reasonable default for corsOrigin 4. Any "unset" objects get initialized to null */ public abstract class RpcServerBuilder<T extends RpcServerBuilder<T>> { // package private for now. // can consider making these private and enforce access through accessors, // but I personally like to avoid the visual clutter of accessors String hostName; Integer port; boolean corsEnabled = false; String corsOrigin = "*"; List<String> enabledEndpoints = new ArrayList<>(); List<String> enabledMethods = new ArrayList<>(); List<String> disabledMethods = new ArrayList<>(); boolean sslEnabled = false; String sslCertPath; char[] sslCertPass; Integer workerPoolSize = null; Integer ioPoolSize = null; Integer requestQueueSize = null; boolean stuckThreadDetectorEnabled = false; public T setUrl(String hostName, int port) { this.hostName = Objects.requireNonNull(hostName); if (port < 1) throw new RuntimeException("Port number must be greater than 0."); if (port < 1024) System.out.println("Ports < 1024 are privileged and require sudo."); if (port > 65535) System.out.println("Ports > 65535 are not supported by OS."); this.port = port; // autoboxing return self(); } public T enableCors() { this.corsEnabled = true; // autoboxing return self(); } public T enableCorsWithOrigin(String origin) { this.corsEnabled = true; this.corsOrigin = Objects.requireNonNull(origin); return self(); } public T enableEndpoints(List<String> enabledEndpoints) { // empty List is a valid input here. this.enabledEndpoints = Objects.requireNonNull(enabledEndpoints); return self(); } public T enableMethods(List<String> enabledMethods) { // Empty List or null are valid input here. this.enabledMethods = Objects.requireNonNullElse(enabledMethods, new ArrayList<>()); return self(); } public T disableMethods(List<String> disabledMethods) { // Empty List or null are valid input here. this.disabledMethods = Objects.requireNonNullElse(disabledMethods, new ArrayList<>()); return self(); } public T enableSsl(String sslCertName, char[] sslCertPass) { this.sslEnabled = true; this.sslCertPath = Objects.requireNonNull(sslCertName); this.sslCertPass = Objects.requireNonNull(sslCertPass); return self(); } public T setWorkerPoolSize(Integer workerPoolSize) { this.workerPoolSize = workerPoolSize; return self(); } public T setIoPoolSize(Integer x) { this.ioPoolSize = x; return self(); } public T setRequestQueueSize(Integer x) { this.requestQueueSize = x; return self(); } public T setStuckThreadDetectorEnabled(boolean x) { this.stuckThreadDetectorEnabled = x; return self(); } protected abstract RpcServer build(); // Subclasses must override this method to return "this" protected abstract T self(); }
/** * This template adds to each test a parameter and an item of a sequence. * <p> * The tests are created from the parameters (each new test will have a * different parameter and adopt the parameter's name) and there will be many * rounds each one named after the name of the test combined with the * string representation of the sequence item. * <p> * The performances returned are the average of the performances over all the * items of the sequence while intermediate performances are calculated on the * actual sequence item. * <p> * By this way it is possible to test different {@code Map}s (parameters) * with different sizes (sequence). * <p> * To create the name of the test use the static method * {@link #testName(String, Object) }. * * @author Francesco Illuminati */ public abstract class ParametrizedSequencePerformanceTemplate<P,S> extends SimplePerformanceTemplate { private final ProgressionConfigurator perfInstrumenter = new ProgressionConfigurator(); public ParametrizedSequencePerformanceTemplate() { perfInstrumenter.setPrintOutStdDeviation(true); } /** It's the best choice to use with unit tests. */ @Override public void testWithoutOutput() { perfInstrumenter.setPrintOutStdDeviation(false); executePerformanceTest(NullPerformanceConsumer.INSTANCE, NullPerformanceConsumer.INSTANCE); } /** * Configures the test. Please note that {@code ProgressionConfigurator} * has some sensible defaults. * <pre> * config.setBaseIterations(1_000) * .setMaxStandardDeviation(5); * </pre> */ public abstract void init(final ProgressionConfigurator config); /** * Adds named parameters to tests. * <pre> * parameters * .addParameter(NAME_1, VALUE_1) * .addParameter(NAME_2, VALUE_2) * .addParameter(NAME_3, VALUE_3); * </pre> * @param parameters */ public abstract void addParameters(final ParameterContainer<P> parameters); /** * Adds a sequence to tests. * <pre> * sequences.setSequence('x', 'y', 'z'); * </pre> */ public abstract void addSequence(final SequenceContainer<?, S> sequences); /** * Defines the test to be executed. The test will be injected of * parameters (creating brand new tests taken the parameters' names) * and a sequence item (creating different series of tests). * <p> * It <b>could</b> be possible to * define more than one test but it would be complex to * match them with the right assertions (consumers). Use the * <i>fluent interface</i> approach if you need to do that: see * {@link com.fillumina.performance.PerformanceTimerFactory}. * Anyway each tests defined will act in a totally independent way. * <p> * It's protected so you don't have to export its output type. * * @return the test to be executed. */ protected abstract ParametrizedSequenceRunnable<P, S> getTest(); /** * To discriminate between different tests use test's and parameter's names: * <pre> * assertion.forExecution(<b>TEST_NAME</b>). * .assertPercentageFor(<b>PARAMETER_NAME</b>).sameAs(<b>PERCENTAGE</b>); * </pre> */ public abstract void addAssertions( final AssertionSuiteBuilder assertionBuilder); /** * The assertions applies to each combination of test + sequence. */ public abstract void addIntermediateAssertions( final PerformanceAssertion assertion); /** Called at the end of the execution, use for assertions. */ public void onAfterExecution( final Map<String, LoopPerformances> performanceMap) {} @Override public void executePerformanceTest( final PerformanceConsumer iterationConsumer, final PerformanceConsumer resultConsumer) { init(perfInstrumenter); perfInstrumenter.setIterationConsumer(iterationConsumer); final ParametrizedSequencePerformanceSuite<P,S> suite = perfInstrumenter.create() .instrumentedBy(new ParametrizedSequencePerformanceSuite<P,S>()); addParameters(suite); addSequence(suite); suite.addPerformanceConsumer(resultConsumer); final AssertionSuiteBuilder assertionBuilder = new AssertionSuiteBuilder(); addAssertions(assertionBuilder); suite.addPerformanceConsumer( assertionBuilder.getAssertPerformanceForExecutionSuite()); final AssertPerformance assertion = new AssertPerformance(); addIntermediateAssertions(assertion); suite.executeTest("test", getTest()) .use(assertion); onAfterExecution(suite.getTestLoopPerformances()); } /** * Helper to calculate the test name from the name of the test * and the name of the sequence item. */ public static String testName(final String name, final Object seqItem) { return ParametrizedSequencePerformanceSuite.createName(name, seqItem); } }
def makeit(n,k): return ((5+5*n)*n/2)<=(240-k) n, k = map(int, input().split()) start = 0 end = n m = -1 while(start<=end): a = (start+end)//2 if(makeit(a,k)): m = max(m,a) start=a+1 else: end=a-1 print(m)
/** Enum variants that represent a symbol (e.g `->`, `@`) Each of them follow [unicode name](http://xahlee.info/comp/unicode_index.html) */ pub mod Symbol { pub use super::Rule::TriggerAt as at; pub mod arrow { pub use crate::core::Rule::{ TransitionTo as right, TransitionFrom as left, TransitionToggle as both, }; } pub mod double_arrow { pub use crate::core::Rule::{ LoopTo as right, LoopFrom as left, }; } pub mod tail_arrow { pub use crate::core::Rule::{ TransientLoopTo as right, TransientLoopFrom as left, }; } }
/** * The implementation of the interface {@link TraceStepIF} used by SMC. * * @author Wenhao Wu ([email protected]) */ public class TraceStep implements TraceStepIF<Integer> { /** * The transition related with <code>this</code> trace step. */ private String transition; /** * The final state of <code>this</code> trace-step */ private Integer finalState; /** * Construct an instance of {@link TraceStep} with given * <code>transition</code> and <code>finalState</code> * * @param transition * the transition related with <code>this</code> trace step. * @param finalState * the final state of <code>this</code> trace step. */ public TraceStep(String transition, Integer finalState) { this.transition = transition; this.finalState = finalState; } @Override public Integer getFinalState() { return this.finalState; } @Override public String toString() { StringBuilder sBuilder = new StringBuilder(); sBuilder.append("['"); sBuilder.append(transition); sBuilder.append("'=>State<"); sBuilder.append(finalState); sBuilder.append(">]"); return sBuilder.toString(); } }
// WithServerValidator validate fields, // all==true return all fields error, otherwise return first error func WithServerValidator(all bool) ServerOption { return newFuncServerOption(func(so *serverOptions) { so.interceptors = append(so.interceptors, validator.UnaryServerInterceptor(all)) }) }
/** * Method returns whether a method is called from a public method in the same class or not. * Calls itself recursively until no more calls are found. Method does currently not * differentiate between overloaded methods since it compares names of methods at this time. * * @param allMethods A list of MethodDeclarations of all methods in a class * @param methodToLookFor Method checks if it is called from a public method in the same class * * @return True if the method is called from another public method in the same class */ @SuppressWarnings("PMD.LinguisticNaming") public Feedback isMethodCalledFromPublic( List<MethodDeclaration> allMethods, MethodDeclaration methodToLookFor) { List<MethodCallExpr> publicCalls = new ArrayList<>(); List<MethodCallExpr> privateCalls = new ArrayList<>(); List<Feedback> childFeedbacks = new ArrayList<>(); boolean result = false; for (MethodDeclaration current : allMethods) { current.findAll(MethodCallExpr.class).forEach(methodCallExpr -> { if (methodCallExpr.getChildNodes().get(0).toString().equals( methodToLookFor.getNameAsString())) { if (current.isPrivate()) { privateCalls.add(methodCallExpr); } else { publicCalls.add(methodCallExpr); } } }); } result = !publicCalls.isEmpty(); if (!result) { for (MethodCallExpr currentExpr : privateCalls) { Node node = currentExpr; while (!(node instanceof ClassOrInterfaceDeclaration) && !result) { if (node instanceof MethodDeclaration) { result = !isMethodCalledFromPublic(allMethods, (MethodDeclaration) node) .getIsError(); break; } else { node = node.getParentNode().get(); } } } } if (result) { return Feedback.getSuccessfulFeedback(); } else { return Feedback.getFeedbackWithChildren(new FeedbackTrace(methodToLookFor), childFeedbacks); } }
<reponame>lokinn7/wechatsdk package com.riversoft.weixin.qy.contact; import com.riversoft.weixin.qy.TestConfiguration; import com.riversoft.weixin.qy.contact.user.Invitation; import com.riversoft.weixin.qy.contact.user.ReadUser; import com.riversoft.weixin.qy.contact.user.SimpleUser; import org.junit.Assert; import org.junit.Test; import java.util.List; /** * Created by exizhai on 10/3/2015. */ public class UsersTest { @Test public void testGet() { ReadUser user = Users.defaultUsers().get(getTestUser()); Assert.assertNotNull(user); } @Test public void testSimpleList() { ReadUser user = Users.defaultUsers().get(getTestUser()); List<SimpleUser> users = Users.defaultUsers().simpleList(user.getDepartment()[0], true, null); Assert.assertNotNull(users); } @Test public void testList() { ReadUser user = Users.defaultUsers().get(getTestUser()); List<ReadUser> users = Users.defaultUsers().list(user.getDepartment()[0], true, null); Assert.assertNotNull(users); } @Test public void testInvite() { Invitation invitation = Users.defaultUsers().invite("smooth"); Assert.assertNotNull(invitation); } public String getTestUser() { return TestConfiguration.getInstance().testUser(); } }
<gh_stars>1-10 import { memo } from '../decorators/memo.decorator' import { secret } from '../environment/secret' import { gotService } from '../srv/got.service' import { timeUtil } from '../util/time.util' class SLApiService { @memo({ ttl: 15000, }) async getDepartures (siteId: string, timeWindow: number = 30): Promise<any> { const url = `http://api.sl.se/api2/realtimedeparturesV4.json` const r = await gotService.get(url, { query: { key: secret('SECRET_SL_REALTIME_API_KEY'), siteId, timeWindow, Bus: false, Train: false, Tram: false, Ship: false, }, }) return { fetchedAt: timeUtil.nowPretty(), ...r, } } } export const slApiService = new SLApiService()
#include <bits/stdc++.h> using namespace std; typedef long long int lli; typedef unsigned long long int ulli; #define M_PI_H 3.14159265358979323846 / 2 /* pi by two*/ #define rep(i, start, end) for (int i = start; i < end; i++) #define repr(i, start, end) for (int i = start; i >= end; i--) //INCLUSIVE int main() { ios_base::sync_with_stdio(false); cin.tie(NULL); cout.tie(NULL); int testcases; cin >> testcases; while (testcases--) { lli n; cin>>n; vector<lli> a(n); rep(i, 0, n) cin>>a[i]; map<lli, lli> mp; rep(i, 0, n) { mp[a[i]-i]++; } map<lli, lli>::iterator itr; lli ans=0; for (itr = mp.begin(); itr != mp.end(); itr++) { lli kk = itr->second; ans += kk>1 ? kk*(kk-1)/2: 0; } cout << ans; cout << "\n"; } return 0; }
<gh_stars>1-10 import { Table, Model, PrimaryKey, AutoIncrement, Column, ForeignKey, BelongsTo, DataType } from "sequelize-typescript"; import { Person } from "../../person/models/Person"; @Table({ timestamps: true, tableName: 'AddressPerson' }) export class AddressPerson extends Model<AddressPerson> { @PrimaryKey @AutoIncrement @Column id: number; @Column(DataType.STRING(8)) zipcode: string; @Column(DataType.STRING(10)) number: number; @Column(DataType.STRING(80)) complement: string; // @ForeignKey(() => Person) // @Column PersonId: number; static validate(addressPerson: AddressPerson): string { if (addressPerson.number == null) return "Número inválido." if (addressPerson.zipcode == null) return "CEP inválido"; if (addressPerson.PersonId == null) return 'Pessoa inválida'; return null; } }
// // This implementation uses a linked list instead of a stack, because, while // in atomic 'pop' operations, reducing the benefit of the stack archetype. func (p *bracketPairer) locateBrackets(pairTypes []bracketType, pairValues []rune) { for i, index := range p.indexes { if pairTypes[index] == bpNone || p.codesIsolatedRun[i] != ON { continue } switch pairTypes[index] { case bpOpen: if p.openers.Len() == maxPairingDepth { p.openers.Init() return } p.openers.PushFront(i) case bpClose: count := 0 for elem := p.openers.Front(); elem != nil; elem = elem.Next() { count++ opener := elem.Value.(int) if p.matchOpener(pairValues, opener, i) { p.pairPositions = append(p.pairPositions, bracketPair{opener, i}) for ; count > 0; count-- { p.openers.Remove(p.openers.Front()) } break } } sort.Sort(p.pairPositions) } } }
use std::{convert::TryFrom, path::PathBuf}; use backblaze_b2_async_plain::v2::{ b2_download_file_by_id, AuthorizeAccountOk, DownloadParams, FileId, }; use reqwest::Response; use structopt::StructOpt; use tokio::{ fs::File, io::{self, AsyncWriteExt}, }; #[derive(StructOpt)] #[structopt( name = "b2_download_file_by_id", about = "Calls b2_download_file_by_id and downloads the file" )] struct Params { #[structopt(short, long, env = "B2_AUTH_FILE")] /// file with the authentication data as created by the [b2_authorize_account] example with the --save option, by default will check ~/.b2_auth.yaml auth_file: Option<String>, /// ID of the file to be downloaded file_id: String, #[structopt(long)] /// file to save data to, if not set, download will be printed to standard out out: Option<String>, } async fn save_download_response<O>(r: &mut Response, output: &mut O) where O: Unpin + AsyncWriteExt, { while let Some(chunk) = r.chunk().await.expect("Failed during download") { output .write_all(&chunk) .await .expect("Failed while writing / printing downloaded data"); } } #[tokio::main] /// WARNING: this example uses blocking stdin/out without generating a separate thread this is generally a bad idea, but /// done here to keep the example simple async fn main() { let p = Params::from_args(); let auth_data = { let auth_file = match p.auth_file { Some(path) => PathBuf::from(path), None => { let mut home = home::home_dir().expect("Could not get home directory. Please specify path where authentication data is stored using --auth-file"); home.push(".b2_auth.yaml"); home } }; let file = std::fs::File::open(auth_file).expect("Could not open file with authentication data"); let auth_data: AuthorizeAccountOk = serde_yaml::from_reader(file) .expect("Could not read authentication data from authentication data file"); auth_data }; let file_id = FileId::try_from(p.file_id).expect("Invalid file Id"); let download_params = DownloadParams::builder().file_id(&file_id).build(); let mut resp = b2_download_file_by_id( auth_data.download_url(), Some(auth_data.authorization_token()), &download_params, ) .await; match resp { Ok(ref mut r) => { match p.out { None => save_download_response(r, &mut io::stdout()).await, Some(file_name) => { let mut f = File::create(file_name) .await .expect("Could not create output file"); save_download_response(r, &mut f).await; } }; } Err(e) => { panic!("Download error: {:#?}", e); } } }
<reponame>smartcontractkit/chainlink-relay<filename>pkg/monitoring/metrics.go package monitoring import ( "fmt" "net/http" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/client_golang/prometheus/promhttp" ) // Metrics is a thin interface on top of the prometheus API. // As such there should be little logic in the implementation of these methods. type Metrics interface { SetHeadTrackerCurrentHead(blockNumber float64, networkName, chainID, networkID string) SetFeedContractMetadata(chainID, contractAddress, feedID, contractStatus, contractType, feedName, feedPath, networkID, networkName, symbol string) SetFeedContractLinkBalance(balance float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetLinkAvailableForPayment(amount float64, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetFeedContractTransactionsSucceeded(numSucceeded float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetFeedContractTransactionsFailed(numFailed float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetNodeMetadata(chainID, networkID, networkName, oracleName, sender string) // Deprecated: use SetOffchainAggregatorAnswers SetOffchainAggregatorAnswersRaw(answer float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetOffchainAggregatorAnswers(answer float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) IncOffchainAggregatorAnswersTotal(contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) // Deprecated: use SetOffchainAggregatorJuelsPerFeeCoin SetOffchainAggregatorJuelsPerFeeCoinRaw(juelsPerFeeCoin float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetOffchainAggregatorJuelsPerFeeCoin(juelsPerFeeCoin float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetOffchainAggregatorSubmissionReceivedValues(value float64, contractAddress, feedID, sender, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetOffchainAggregatorJuelsPerFeeCoinReceivedValues(value float64, contractAddress, feedID, sender, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetOffchainAggregatorAnswerStalled(isSet bool, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) SetOffchainAggregatorRoundID(aggregatorRoundID float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) // Cleanup deletes all the metrics Cleanup(networkName, networkID, chainID, oracleName, sender, feedName, feedPath, symbol, contractType, contractStatus, contractAddress, feedID string) // Exposes the accumulated metrics to HTTP. HTTPHandler() http.Handler } var ( headTrackerCurrentHead = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "head_tracker_current_head", Help: "Tracks the current block height that the monitoring instance has processed.", }, []string{"network_name", "chain_id", "network_id"}, ) feedContractMetadata = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "feed_contract_metadata", Help: "Exposes metadata for individual feeds. It should simply be set to 1, as the relevant info is in the labels.", }, []string{"chain_id", "contract_address", "feed_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name", "symbol"}, ) feedContractLinkBalance = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "feed_contract_link_balance", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) linkAvailableForPayment = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "link_available_for_payments", Help: "Reports the amount of link the contract can use to make payments to node operators. This may be different from the LINK balance of the contract since that can contain debt", }, []string{"feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) feedContractTransactionsSucceeded = promauto.NewCounterVec( prometheus.CounterOpts{ Name: "feed_contract_transactions_succeeded", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) feedContractTransactionsFailed = promauto.NewCounterVec( prometheus.CounterOpts{ Name: "feed_contract_transactions_failed", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) nodeMetadata = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "node_metadata", Help: "Exposes metadata for node operators. It should simply be set to 1, as the relevant info is in the labels.", }, []string{"chain_id", "network_id", "network_name", "oracle_name", "sender"}, ) offchainAggregatorAnswersRaw = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_answers_raw", Help: "Reports the latest answer for a contract.", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorAnswers = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_answers", Help: "Reports the latest answer for a contract divided by the feed's Multiply parameter.", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorAnswersTotal = promauto.NewCounterVec( prometheus.CounterOpts{ Name: "offchain_aggregator_answers_total", Help: "Bump this metric every time there is a transmission on chain.", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorJuelsPerFeeCoinRaw = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_juels_per_fee_coin_raw", Help: "Reports the latest raw answer for juels/fee_coin.", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorJuelsPerFeeCoin = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_juels_per_fee_coin", Help: "Reports the latest raw answer for juels/fee_coin divided by the feed's multiplier.", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorSubmissionReceivedValues = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_submission_received_values", Help: "Report individual node observations for the latest transmission on chain. (Should be 1 time series per node per contract). The values are divided by the feed's multiplier config.", }, []string{"contract_address", "feed_id", "sender", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorJuelsPerFeeCoinReceivedValues = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_juels_per_fee_coin_received_values", Help: "Report individual node observations of the Juels/FeeCoing feeds at the latest transmission on chain. (Should be 1 time series per node per contract). The values are divided by the feed's multiplier config.", }, []string{"contract_address", "feed_id", "sender", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorAnswerStalled = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_answer_stalled", Help: "Set to 1 if the heartbeat interval has passed on a feed without a transmission. Set to 0 otherwise.", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) offchainAggregatorRoundID = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "offchain_aggregator_round_id", Help: "Sets the aggregator contract's round id, ie. the number of observations stored by the feed", }, []string{"contract_address", "feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) ) func NewMetrics(log Logger) Metrics { return &defaultMetrics{log} } type defaultMetrics struct { log Logger } func (d *defaultMetrics) SetHeadTrackerCurrentHead(blockNumber float64, networkName, chainID, networkID string) { headTrackerCurrentHead.With(prometheus.Labels{ "network_name": networkName, "chain_id": chainID, "network_id": networkID, }).Set(blockNumber) } func (d *defaultMetrics) SetFeedContractMetadata(chainID, contractAddress, feedID, contractStatus, contractType, feedName, feedPath, networkID, networkName, symbol string) { feedContractMetadata.With(prometheus.Labels{ "chain_id": chainID, "contract_address": contractAddress, "feed_id": feedID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, "symbol": symbol, }).Set(1) } func (d *defaultMetrics) SetFeedContractLinkBalance(balance float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { feedContractLinkBalance.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(balance) } func (d *defaultMetrics) SetLinkAvailableForPayment(amount float64, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { linkAvailableForPayment.With(prometheus.Labels{ "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(amount) } func (d *defaultMetrics) SetFeedContractTransactionsSucceeded(numSucceeded float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { feedContractTransactionsSucceeded.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Add(numSucceeded) } func (d *defaultMetrics) SetFeedContractTransactionsFailed(numFailed float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { feedContractTransactionsFailed.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Add(numFailed) } func (d *defaultMetrics) SetNodeMetadata(chainID, networkID, networkName, oracleName, sender string) { nodeMetadata.With(prometheus.Labels{ "chain_id": chainID, "network_id": networkID, "network_name": networkName, "oracle_name": oracleName, "sender": sender, }).Set(1) } func (d *defaultMetrics) SetOffchainAggregatorAnswersRaw(answer float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorAnswersRaw.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(answer) } func (d *defaultMetrics) SetOffchainAggregatorAnswers(answer float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorAnswers.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(answer) } func (d *defaultMetrics) IncOffchainAggregatorAnswersTotal(contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorAnswersTotal.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Inc() } func (d *defaultMetrics) SetOffchainAggregatorJuelsPerFeeCoinRaw(juelsPerFeeCoin float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorJuelsPerFeeCoinRaw.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(juelsPerFeeCoin) } func (d *defaultMetrics) SetOffchainAggregatorJuelsPerFeeCoin(juelsPerFeeCoin float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorJuelsPerFeeCoin.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(juelsPerFeeCoin) } func (d *defaultMetrics) SetOffchainAggregatorSubmissionReceivedValues(value float64, contractAddress, feedID, sender, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorSubmissionReceivedValues.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "sender": sender, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(value) } func (d *defaultMetrics) SetOffchainAggregatorJuelsPerFeeCoinReceivedValues(value float64, contractAddress, feedID, sender, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorJuelsPerFeeCoinReceivedValues.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "sender": sender, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(value) } func (d *defaultMetrics) SetOffchainAggregatorAnswerStalled(isSet bool, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { var value float64 = 0 if isSet { value = 1 } offchainAggregatorAnswerStalled.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(value) } func (d *defaultMetrics) SetOffchainAggregatorRoundID(aggregatorRoundID float64, contractAddress, feedID, chainID, contractStatus, contractType, feedName, feedPath, networkID, networkName string) { offchainAggregatorRoundID.With(prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }).Set(aggregatorRoundID) } func (d *defaultMetrics) Cleanup( networkName, networkID, chainID, oracleName, sender string, feedName, feedPath, symbol, contractType, contractStatus string, contractAddress, feedID string, ) { for _, metric := range []struct { name string vec *prometheus.MetricVec labels prometheus.Labels }{ { "head_tracker_current_head", headTrackerCurrentHead.MetricVec, prometheus.Labels{ "network_name": networkName, "chain_id": chainID, "network_id": networkID, }, }, { "feed_contract_metadata", feedContractMetadata.MetricVec, prometheus.Labels{ "chain_id": chainID, "contract_address": contractAddress, "feed_id": feedID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, "symbol": symbol, }, }, { "feed_contract_link_balance", feedContractLinkBalance.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "link_available_for_payments", linkAvailableForPayment.MetricVec, prometheus.Labels{ "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "feed_contract_transmissions_succeeded", feedContractTransactionsSucceeded.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "feed_contract_transmissions_failed", feedContractTransactionsSucceeded.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "metric node_metadata", nodeMetadata.MetricVec, prometheus.Labels{ "chain_id": chainID, "network_id": networkID, "network_name": networkName, "oracle_name": oracleName, "sender": sender, }, }, { "offchain_aggregator_answers_raw", offchainAggregatorAnswersRaw.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_answers", offchainAggregatorAnswers.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_answers_total", offchainAggregatorAnswersTotal.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_juels_per_fee_coin_raw", offchainAggregatorJuelsPerFeeCoinRaw.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_juels_per_fee_coin", offchainAggregatorJuelsPerFeeCoin.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_submission_received_values", offchainAggregatorSubmissionReceivedValues.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "sender": sender, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_juels_per_fee_coin_received_values", offchainAggregatorJuelsPerFeeCoinReceivedValues.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "sender": sender, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_answer_stalled", offchainAggregatorAnswerStalled.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, { "offchain_aggregator_round_id", offchainAggregatorRoundID.MetricVec, prometheus.Labels{ "contract_address": contractAddress, "feed_id": feedID, "chain_id": chainID, "contract_status": contractStatus, "contract_type": contractType, "feed_name": feedName, "feed_path": feedPath, "network_id": networkID, "network_name": networkName, }, }, } { if !metric.vec.Delete(metric.labels) { errArgs := []interface{}{} for key, value := range metric.labels { errArgs = append(errArgs, key, value) } d.log.Errorw(fmt.Sprintf("unable to delete metric '%s'", metric.name), errArgs...) } } } func (d *defaultMetrics) HTTPHandler() http.Handler { return promhttp.Handler() }
def search_paths_from_description(desc): paths = [] if desc.package: dirname = package_dirname(desc.package) paths.append(("", dirname)) elif desc.qualified_name: dirname = package_dirname(package(desc.qualified_name)) paths.append(("", dirname)) if hasattr(desc, "search_paths"): paths.extend(desc.search_paths) return paths
#import "IFKShape.h" @interface IFKRegularPolygonShape : IFKShape @property (nonatomic, copy) NSNumber *inputCircumradius; @property (nonatomic, copy) CIVector *inputBorderRadiuses; @end
<filename>spec/components/mongodb/MongoMeasurableSpec.ts import {MongoMeasurable} from '../../../src/components/mongodb/MongoMeasurable'; import {MongoQuery} from '../../../src/components/mongodb/MongoQuery'; describe('A MongoMeasurable', () => { const mongoQueryMock = { constructor: { name: 'MongoQuery', }, } as MongoQuery<unknown>; const mongoMeasurable = new MongoMeasurable(mongoQueryMock); describe('When executing succesfully', () => { let result: unknown; let tags: string[]; beforeEach(async () => { mongoQueryMock.execute = () => Promise.resolve({}); result = await mongoMeasurable.execute(); tags = mongoMeasurable.tags; }); it('Has correct tags', () => { expect(tags).toContain('result:success'); expect(tags).toContain('type:MongoQuery'); }); it('Can return the mongoQuery result', async () => { expect(result).toEqual({}); }); }); describe('When executing unsuccesfully', () => { let error: unknown; const throwError = new Error(); let tags: string[]; beforeEach(async () => { mongoQueryMock.execute = () => Promise.reject(throwError); await mongoMeasurable.execute().catch((e) => { error = e; }); tags = mongoMeasurable.tags; }); it('Has correct tags', () => { expect(tags).toContain('result:failed'); expect(tags).toContain('type:MongoQuery'); }); it('Throws the correct error', () => { expect(error).toBe(throwError); }); }); });
def load_config(self, config_file=None): config_path = pkg_resources.resource_filename("snake", "data/config/snake.conf") try: with open(config_path, 'rb') as stream: base_config = yaml.safe_load(stream) self.snake_config.update(base_config) except Exception as err: print("Failed to parse base config file: %s" % err) sys.exit(1) if config_file: if not path.exists(config_file): print("Not a valid config_file: %s" % config_file) sys.exit(1) try: with open(config_file, 'rb') as stream: snake_config = yaml.safe_load(stream) self.snake_config.update(snake_config) except Exception as err: print("Failed to parse user config file: %s" % err) sys.exit(1) else: etc_conf = path.join(path.abspath(path.expanduser(constants.ETC_DIR)), 'snake.conf') if path.exists(etc_conf): try: etc_config = {} with open(etc_conf, 'rb') as stream: etc_config = yaml.safe_load(stream) self.snake_config.update(etc_config) except Exception as err: print("Failed to parse etc config file: %s" % err) sys.exit(1)
// ConstructionCombineResponse returns an error if // a *types.ConstructionCombineResponse does // not have a populated SignedTransaction. func ConstructionCombineResponse( response *types.ConstructionCombineResponse, ) error { if response == nil { return ErrConstructionCombineResponseIsNil } if len(response.SignedTransaction) == 0 { return ErrSignedTxEmpty } return nil }
The Sandakan Magistrate's Court today slapped a foreigner with a three-year jail sentence for breaking into a restaurant and stealing RM1. Abu Huraira Razak, 27, who pleaded guilty before Magistrate Faelly Jeffrey Lanjungan, was also ordered to pay a fine of RM5,000 or serve 12 months in prison. The court also ordered that he be deported back to the Philippines upon completion of his sentence. Abu Huraira, who was arrested on Sept 15 after committing the theft, had arrived in Malaysia illegally only three days earlier. He had broken into a restaurant through the back door and stole a ornamental coin box from the counter, which only contained RM1. A worker, who realised that someone had entered the premises, alerted the police. Abu Huraira was arrested as he attempted to flee the area. According to Berita Harian, the magistrate ordered that Abu Huraira, who was unrepresented, serve his sentence from the time of arrest. Abu Huraira was charged under Section 457 of the Penal Code, which deals with trespassing or housebreaking in order to commit an offence punishable with imprisonment.
/** * Frees all resources associated with this Mesh */ public void dispose() { if (meshes.get(graphics.getApp()) != null) meshes.get(graphics.getApp()).removeValue(this, true); vertices.dispose(); if (instances != null) instances.dispose(); indices.dispose(); }
def curveFromSelection(startFrame, endFrame, curveName='', cleanCurve=False): sel = cmds.ls(selection=True) if not sel: logger.warning("Nothing selected") return [] createdCurves = [] for obj in sel: newCurve = curveFromAnim(obj ,startFrame, endFrame, curveName, cleanCurve) createdCurves.append(newCurve) return createdCurves
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ /** * Copyright (c) 2013, <NAME> All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of * the IIIA-CSIC nor the names of its contributors may be used to endorse or promote products derived from this software * without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package csic.iiia.ftl.argumentation.visualization.amail.panels; import java.awt.Dimension; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import javax.swing.BoxLayout; import javax.swing.JPanel; import javax.swing.JScrollPane; import csic.iiia.ftl.argumentation.core.Argument; import csic.iiia.ftl.argumentation.core.ArgumentAcceptability; import csic.iiia.ftl.argumentation.core.ArgumentationTree; import csic.iiia.ftl.base.core.FTKBase; import csic.iiia.ftl.base.core.Path; // TODO: Auto-generated Javadoc /** * The Class ArgumentationPanelMultiScrolled. * * @author santi */ public class ArgumentationPanelMultiScrolled extends ArgumentationPanel { /** The Constant serialVersionUID. */ private static final long serialVersionUID = -8863242484616901152L; /** The agent pane list. */ List<JScrollPane> agentPaneList = new LinkedList<JScrollPane>(); /** The agent pane content. */ List<ArgumentationStatePanel> agentPaneContent = new LinkedList<ArgumentationStatePanel>(); /** * Instantiates a new argumentation panel multi scrolled. * * @param a * the a * @param a_aa * the a_aa * @param dm * the dm * @param sp * the sp * @param dp * the dp * @param argumentIDs * the argument i ds */ public ArgumentationPanelMultiScrolled(List<AgentPanel> a, List<ArgumentAcceptability> a_aa, FTKBase dm, Path sp, Path dp, HashMap<Argument, String> argumentIDs) { apl.addAll(a); aal.addAll(a_aa); m_domainModel = dm; m_solutionPath = sp; m_descriptionPath = dp; // Set up the view, divided in tree (argumentation in the middle, agents on the sides): removeAll(); BoxLayout b1 = new BoxLayout(this, BoxLayout.Y_AXIS); setLayout(b1); int NAGENTS = apl.size(); for (int n = 0; n < NAGENTS; n++) { AgentPanel ap = apl.get(n); ap.setPreferredSize(new Dimension(getSize().width / (NAGENTS + 1), getSize().height)); ArgumentationStatePanel apc = (agentPaneContent.size() > n ? agentPaneContent.get(n) : null); if (apc == null) { apc = new ArgumentationStatePanel(this, dm, sp, dp, aal, argumentIDs); JScrollPane agentPane = new JScrollPane(apc); agentPane.setPreferredSize(new Dimension(getSize().width / (NAGENTS + 1), getSize().height)); agentPaneList.add(agentPane); agentPaneContent.add(apc); } } JPanel tmp1 = new JPanel(); add(tmp1); BoxLayout b2 = new BoxLayout(tmp1, BoxLayout.X_AXIS); tmp1.setLayout(b2); for (JScrollPane p : agentPaneList) tmp1.add(p); JPanel tmp2 = new JPanel(); add(tmp2); BoxLayout b3 = new BoxLayout(tmp2, BoxLayout.X_AXIS); tmp2.setLayout(b3); for (AgentPanel p : apl) tmp2.add(p); } /* * public ArgumentationPanelMultiScrolled(AgentPanel a1, AgentPanel a2, ArgumentAcceptability a_aa1, * ArgumentAcceptability a_aa2, FTKBase dm, Path sp, Path dp, HashMap<Argument,String> argumentIDs) { apl.add(a1); * apl.add(a2); aal.add(a_aa1); aal.add(a_aa2); m_domainModel = dm; m_solutionPath = sp; m_descriptionPath = dp; * * // Set up the view, divided in tree (argumentation in the middle, agents on the sides): removeAll(); BoxLayout b1 * = new BoxLayout(this,BoxLayout.Y_AXIS); setLayout(b1); * * int NAGENTS = apl.size(); for(int n = 0;n<NAGENTS;n++) { AgentPanel ap = apl.get(n); ap.setPreferredSize(new * Dimension(getSize().width/(NAGENTS+1),getSize().height)); ArgumentationStatePanel apc = * (agentPaneContent.size()>n ? agentPaneContent.get(n):null); if (apc==null) { apc = new * ArgumentationStatePanel(this, dm, sp, dp, aal,argumentIDs); JScrollPane agentPane = new JScrollPane(apc); * agentPane.setPreferredSize(new Dimension(getSize().width/(NAGENTS+1),getSize().height)); * agentPaneList.add(agentPane); agentPaneContent.add(apc); } } * * JPanel tmp1 = new JPanel(); add(tmp1); BoxLayout b2 = new BoxLayout(tmp1,BoxLayout.X_AXIS); tmp1.setLayout(b2); * for(JScrollPane p:agentPaneList) tmp1.add(p); * * JPanel tmp2 = new JPanel(); add(tmp2); BoxLayout b3 = new BoxLayout(tmp2,BoxLayout.X_AXIS); tmp2.setLayout(b3); * for(AgentPanel p:apl) tmp2.add(p); } */ /* * (non-Javadoc) * * @see * csic.iiia.ftl.argumentation.visualization.amail.panels.ArgumentationPanel#addTree(csic.iiia.ftl.argumentation * .core.ArgumentationTree, java.lang.String, boolean) */ public void addTree(ArgumentationTree at, String agent, boolean retracted) { trees.add(at); for (int n = 0; n < apl.size(); n++) { AgentPanel ap = apl.get(n); if (at.getRoot().m_agent.equals(ap.m_name)) { agentPaneContent.get(n).addTree(at, agent, retracted); break; } } } }
<filename>tupletest.cpp #include <iostream> using namespace std; int main() { //数组名的用途 //1. 通过数组名计算数组占用内存大小 int arr[5] ={1,2,3,4,5} ; cout << "每个数组占用空间大小:" << sizeof(arr[0]) << endl; cout <<"数组占用空间大小:" << sizeof(arr) << endl; cout <<"数组中元素数量:" <<sizeof(arr)/sizeof(arr[0]) << endl; //2. 通过数组名查看数组首地址; // cout << "数组首地址为:"<< arr << endl; cout << "数组首地址为:"<< (long)arr << endl; // cout <<"数组首个元素地址为:"<< &arr[0] << endl; cout <<"数组首个元素地址为:"<< (long)&arr[0] << endl; cout <<"数组首个元素地址为:"<< (long)&arr[1] << endl; return 0; }
/** * Showcase for API to trigger message replay using string representation of a replication group * message Id * * @param service ready configured and connected service instance * @param queueToConsumeFrom queue to consume from messages * @param replicationGroupMessageIdToString string representation of a replication group message * Id */ public static void requestReplayFromReplicationGroupMessageIdAsString(MessagingService service, Queue queueToConsumeFrom, String replicationGroupMessageIdToString) { final ReplicationGroupMessageId restoredReplicationGroupMessageId = ReplicationGroupMessageId .of(replicationGroupMessageIdToString); final PersistentMessageReceiver receiver = service .createPersistentMessageReceiverBuilder() .withMessageReplay( ReplayStrategy.replicationGroupMessageIdBased(restoredReplicationGroupMessageId)) .build(queueToConsumeFrom) .start(); final InboundMessage message = receiver.receiveMessage(); }
A welcome hybrid of old and new school MMO mechanics The Elder Scrolls Online has a lot to prove. Not only does developer Zenimax have to placate the fans of old, and sell them on the idea of a subscription based MMO, but they also have to bring in fans who have never played the franchise if they want to sustain their model. As both an Elder Scrolls fan since Daggerfall and an avid MMO player, I was excited to get my hands on the game last week to see what it really had to offer, and if it would deliver on either front. After an extended period play time, I think that in some ways, The Elder Scrolls Online functions as a traditional MMO that won't sway people who have disdain for the genre. But in other ways, it's doing a lot of things that may warrant a second look, and it's looking really good for Elder Scrolls and MMO fans alike. The Elder Scrolls Online (PC, Mac) Developer: Zenimax Online Studios Publisher: Bethesda Softworks Release: TBA 2013 This is how the game will work. After booting up Elder Scrolls Online for the first time, you'll have the option to select from the Daggerfall Covenant, Aldmeri Dominion, and the Ebonheart Pact factions. Each faction will have different playable races -- Breton, Redguard and Orcs for Daggerfall -- High Elves, Wood Elves, and Khajit for the Aldermi Dominion -- and Dark Elves, Nords, and Argonians for Ebonheart. All quests will take place within your respective region, with the option to initiate cross-region PvP. Bethesda revealed last week that after completing your region, you'll have the option to visit the other two with the same character. Of course, you could always create an alternate character and start in a new region as well. It's odd to segment content like that, but Zenimax claims that each region will contain around 120-150 hours of content each, not including dungeons or PvP. For the purposes of the preview, we were able to play a Daggerfall Covenant character up to level six, so I created a Dragonknight Orc. It's a tank mage class, so I naturally gravitated toward it. There are only four classes in total (Dragonknight, Templar, Sorcerer and Nightblade), but Zenimax notes that they want to de-emphasize classes entirely. Like any Elder Scrolls game, it's mostly just a guide to start with -- you can progress past your template and create your own custom character. There are racial skill trees, guild skill trees, and even a teased vampire tree that Zenimax was almost hesitant to reveal. Abilities can be morphed as they progress through their trees, and if you equip a certain ability class, you'll earn more XP towards new ones than normal. Think of how most MMOs force you to commit to one tree -- this system allows you to at least have some freedom as it gives you a smaller amount even if you're not using it, which is neat. Although our group started in the first major area of the game, there is an instanced story based tutorial to help you learn the ropes. The Elder Scrolls Online is set roughly 1000 years before the events of most of the games. Your character is killed by the forces of Molag Bal, an ancient god on par with the ones you've encountered before on the plane of Oblivion. You wake up in Molag Bal's Oblivion, Coldharbour, and the game starts there. Similar to Guild Wars 2, you'll have a personal story spending the rest of game getting your soul back from the forces of the Daedra. Zenimax tried to convey that the game is mostly a solo experience, stating that, "you are the hero" with the "option" to work with a "band of heroes" to save Tamriel. Essentially, they want you to treat it like a multiplayer Elder Scrolls game, which people have been clamoring for a while. So how's the gameplay? Well, it's a mix of the combat systems found in Elder Scrolls IV and V, and old school MMO thought. In many ways, Zenimax has kept a lot of the original Elder Scrolls spirit intact. You'll hold down the attack button (defaulted to left mouse) to initiate a heavy attack, and click repeatedly for a regular attack. The right mouse button enables defensive options like your shield or weapon parrying, and left clicking while blocking springs a charge move that can interrupt spells. Double tapping movement keys to dodge is also a nice touch, as I used it multiple times to avoid AOE attacks. In the first thirty minutes or so before I gained any skills, combat felt a little boring, and I had a "this is it?" style reaction. But once I gained projectile based skills, defensive abilities and more, combat ramped up a bit and became more dynamic, like the Elder Scrolls franchise is known for. Zenimax confirmed that the game wouldn't ship with native controller support, but it "shouldn't be too hard" to get the game mapped out with third party software, like XPadder or Joy2Key. Spells are bound to your selection of hotkeys like a typical MMO, and work just like Skyrim in real time. You can sneak around like the classic Bethesda games, sprint, and upon leveling up, you're able to choose between a health, magica or stamina upgrade. The locales look true to form for the series. As lead developer Matt Firor stated, there are going to be some fundamental differences compared to, say, Skyrim, but in the world of Elder Scrolls Online, "when you're in Windhelmd you feel like you're in Windhelm." There is a "choice" system, and it isn't really hamfisted either, thankfully. Instead of choosing from lazy Fable-esque snooze-inducing quandaries of "this is Good or Bad," Elder Scrolls Online is a bit more subtle, like the original Dragon Age or the Witcher series. For instance, in the time that I played, I had the option to save three potential crew members for a heist, and choose whether or not a few other story characters lived or died along the way. Whether or not I killed those characters off or not influenced my story as early as a few hours in, as the dialog and interactions I had with other NPCs were directly influenced from my choices. From a design standpoint, the HUD was very, very clean, and I enjoyed the subtle design differences compared to most MMOs, like glowing NPCs to denote quest-givers rather than the typical question marks above their heads -- fully voiced quest NPCs also help bring up the presentation. There's Wayshrines just like Guild Wars 2 for fast travel, but you have to pay more than a nominal fee to use them from any spot on the map. Zenimax stated during the presentation that it was their goal to bring two major groups together: MMO and Elder Scrolls players. The way they want to accomplish this is through story immersion, exploration based content, a deep combat system, and choices that matter. Content wise, you'll have your typical world quests as you're roaming about, in addition to hidden Easter eggs, PvP, crafting, dungeons, and guilds (although only the Fighter and Mage guilds are confirmed currently). Both guilds have meta-games, like the ability to destroy spirit anchors and collect necromancy tomes for unique skills. There's also a full crafting system, including specializations for cooking, weapon smith, armor smith, alchemy, and enchanting. The system itself will support a "jack of all trades, master of one" mentality, allowing you to choose one major skill. Crafters will create some of the best items in the game -- not just consumables or throwaway items. Zenimax stated that they want crafting to be fun and not a grind -- brought about through experimentation, rather than making the same thing over and over to grind skill points. As an example, they showed off the cooking system, which consists of adding two ingredients, and then up to three additives. You can add more or less ingredients to increase potency of consumables, or change the effect entirely. It's a pretty neat and open system that should allow a hefty amount of experimentation. In addition to our hands-on, Zenimax provided a live group dungeon run, showing off the four person party system in the spider themed Spindleclutch dungeon. The main mechanic they wanted to show off was the emphasis on synergy from not only players, but different enemies as well. For instance, a Foot Solider may drop some oil, and call for a Fire Mage to "light it up" for an area of effect attack. Also, Necromancers could perform a ritual at any time to summon a giant spirit monster, sacrificing themselves in the process. The developers claim that these dynamic combat situations could happen at random, which forces you to think on the fly, and makes combat a little more exciting. A distinction was made with enemy groups in that every pull will not be a traditional "tank pulls group" so much as a dynamic experience. The blocking mechanic and other crowd control based abilities helps other players fit into scheme, and get into the action more. After asking the developers directly about how far they were willing to go with this, they noted that they will not be eliminating the trinity like Guild Wars 2 did; only focusing fights towards a less traditional view and increasing the options of non-tanks. What I really liked about the instance is the promise that each dungeon contains a story. In fact, the first time you run it, there's always a deep narrative involved with the dungeon itself. In this particular dungeon, quest NPCs were setting fire to webs as the team progressed through. Like typical MMO instances, mini-bosses (dungeon bosses) were peppered in before the big finale. Although we didn't get to play it, Zenimax also showed off a live PvP test for the first time, showcasing the siege style gameplay of the Cyrodiil zone. Siege weapons were out in full display, destroying keep walls to take out different perimeters to access the keep in multiple ways. Battering rams can also help break down the front door as players try to raid a keep. Tiny arrow icons above enemy heads identify rivals easily, which was a nice touch that most large scale MMO battles lack. As you take structures along the way, you can travel from keep to keep with waypoints. Zenimax said that they don't use development tools or trainers to test PvP because they want to replicate the player experience. Like World v. World in Guild Wars 2, PvP matches will accommodate a few hundred players. Add in their support Tiny arrow icons above enemy heads identify rivals easily, which was a nice touch that most large scale MMO battles lack. As you take structures along the way, you can travel from keep to keep with waypoints. Zenimax said that they don't use development tools or trainers to test PvP because they want to replicate the player experience. Like World v. World in Guild Wars 2, PvP matches will accommodate a few hundred players. Add in their support for endgame content at launch, and it could be a pretty enticing package. As the developers at Zenimax were showing off the game, you could tell that they really cared about the history of the Elder Scrolls franchise. It was clear after playing it for an extended period of time that there will be lots of lore here to uncover; perhaps more than any other Elder Scrolls game before it. Coupled with the confirmed first-person mode, a lot of TES fans should feel right at home with Online -- the only major problem is the fact that they haven't really demonstrated quite enough to justify the subscription fee for more than a few months. But from what I can tell so far, it would be an amazing few months indeed. For those of you who want to see the game for yourself, the beta will start later this month, and it will be playable at PAX East -- which will be the first ever public test of the game. You are logged out. Login | Sign up
<gh_stars>0 from flask_restful import Resource, reqparse from models.tipo import TipoModel class TipoController(Resource): def get(self,nombre): # USO DEL LIKE EN SQL-ALCHEMY resultado = TipoModel.query.filter(TipoModel.tipo_desc.like('%'+nombre+'%')).all() if resultado: resultadoFinal = [] for tipo in resultado: resultadoFinal.append(tipo.retornar_json_con_nombre_local()) # print(tipo.canchitas) # for canchita in tipo.canchitas: # print(canchita.local.loc_nombre) # print(tipo.canchitas[0].local.loc_nombre) # resultadoFinal.append(tipo.retornar_json()) return resultadoFinal else: return {'message':'No se encontro ese tipo'},404 def post(self): parser = reqparse.RequestParser() parser.add_argument('descripcion', type=str, required=True, help='Falta la descripcion') data = parser.parse_args() consulta = TipoModel.query.filter_by(tipo_desc=data['descripcion']).first() if not consulta: insercion = TipoModel(data['descripcion']) try: insercion.guardar_en_la_bd() except: return {'message':'Hubo un error al agregar en la base de datos'},500 return{ 'message':'Se agrego exitosamente el tipo en la base de datos', 'content':insercion.retornar_json() },201 return{ 'message':'Ya hay un tipo creado con esa descripcion' },412
MELBOURNE City has pulled off another stunning coup, this time signing 100-time Irish international Damien Duff. But the real bonus for City fans is the fact that Duff, a two-time English Premier League winner with Chelsea, has signed a one-year deal under the club’s $2.55 million salary cap, as revealed by the Herald Sun last week. That means the Manchester City-owned club still has the capacity to sign a superstar international marquee player, with his wages to be paid outside the cap. Melbourne City coach John van ‘t Schip said the Irishman, who captained his country at Euro 2012 and scored a goal in the 2002 World Cup, would bring a wealth of experience to his new team. “We’re excited to have a player of Damien’s quality and experience for this season,” van ‘t Schip said. “We believe he can offer international standard quality on the pitch and will also be a great professional example for our younger players. “His career speaks for itself. He has achieved almost everything at club level and has been a great player at an international level too. “He comes to us straight from the English Premier League with Fulham, so he has been playing at the highest level against top teams and top players. “When we met with Damien, he was excited about the opportunity here and keen to work with us to help make this project a success.” ADD ANOTHER! THE A-LEAGUE’S BIGGEST SIGNINGS media_camera Damien Duff during his Chelsea days. The club also announced young guns Stefan Mauk and Ben Garuccio had signed contract extensions. That City was able to fit such a highly credentialed player under the salary cap is understood to be due in part to the fact that it had set aside money to re-sign last year’s international marquee, Orlando Engelaar, on a reduced deal. Engelaar, a popular figure with the club’s fans despite missing the first half of last season with a broken leg, has returned to Europe. City has now signed 18 senior players for next season. It can sign another five, including two more foreign players. One of those will be the international marquee, who is expected to be yet another big name. An Australian marquee player also needs to be signed. Josh Kennedy and Mark Bresciano have both been contacted by the club. Spain’s all-time leading scorer David Villa last week signed a short-term deal that will see him play A-League games between October and December before reporting for pre-season training at sister club New York City FC in the United States. Duff has only ever played professionally in England. And since helping Blackburn re-win a place in the English Premier League in 2001 the left-footed winger has played only one club game outside of the country’s top flight. Duff scored 27 goals in 184 league games for the Rovers in a combination of the EPL and Championship, before securing a dream move to Chelsea in 2003 where he would go on to score 14 goals in 81 league games across three seasons. While at Stamford Bridge Duff won back-to-back Premier League titles in 2004-05 and 2005-06 and also played crucial roles in the club’s European Champions League campaigns. He then spent three seasons at Newcastle (five goals in 69 league games), but after turning out for the Magpies just once after the club was relegated to the Championship in 2009 he was snapped up by Fulham where he would go on to make 119 league appearances and score 15 goals. Mauk’s new deal ties him to City until the end of the 2015-16 season and Garuccio’s ensures he’ll be at the club until the end of 2016-17. “Ben and Stef have worked hard for us and are two of the A-League’s exciting young players,” van ’t Schip said. “We hope these players will learn a lot from the likes of Damien both on and off the pitch about what it takes to perform at the highest level as a professional footballer.” City squad so far: James Brown, Connor Chapman, Damien Duff, Mate Dugandzic, Ben Garuccio, Jonatan Germano, Jason Hoffman, Nick Kalmar, Patrick Kisnorbo, Stefan Mauk, Jacob Melling, Aaron Mooy, Massimo Murdocca, Iain Ramsay, Andrew Redmayne, David Villa, Tando Velaphi, Robbie Wielaert, David Williams. Originally published as Duff joins Melbourne City within cap
I wrote this a while back, about the top 10 skills needed for a job based on some LinkedIn research. Actually spent some time in Northampton, MA over the holidays after I wrote that post (was with friends), and one of my friends told me a point I had made in there resonated a lot: namely, the types of people who use LinkedIn are a much different animal than the broader workforce (or even understandable segments of the workforce). You have a lot of Silicon Valley types on there, you have a bunch of job-seekers and HR types, and some real, actual data might be hard to chase. For example: when I was working with McKesson in summer 2013, tons of people making $250K+ didn’t even have LinkedIn profiles. If you like your job and your salary and probably aren’t moving anytime soon, why would you be super active on LinkedIn? Most people don’t want to be “thought leaders.” They want to print money for themselves, bay-bee! (Or at least, you know, maybe find some purpose with their work.) Alright, well … here’s some new research and ideas about “What Employers Are Looking For.” All this stuff is always fraught because you never know the agenda of the company doing the research — and often you don’t even know the methodology — but they’re somewhat interesting to look at. Here’s the source, and here’s the first chart. It shouldn’t surprise you: Not surprising that “engineering” (“We need people to build this shit!”) and “business” (“We need hard-chargers!”) would be 1-2, with Computer Science No. 3. That is not even remotely shocking. This isn’t either, but … Like I said, this one ain’t surprising. But … “critical thinking” and “problem solving” aren’t really skills you need in most jobs. That implies that most jobs involve a degree of critical thinking and/or problem-solving. They don’t. Most jobs are digital paper pushing at core. Those words are things that hiring managers, HR people, and senior executives say because they sound good. You know what they really want? People that tow the line, do their shit, won’t complain, and won’t try to get promoted a lot. That’s how the game works, bay-bee! It’s amazing that “teamwork” is No. 2 above. That’s another buzzword. You know the dirty secret about teamwork? People don’t even want to work in teams. “Professionalism” — don’t even get me started there. It’s important to a point, but everyone makes a huge fucking deal out of it, and people love to use it as a way to keep others down. (“He’s not professional enough. We can’t promote him!”) This happens even if work quality is good. Bottom line here is that the way employers think about hiring, talk about hiring, and actually feel about hiring are essentially three entirely different things.
#include <cstdio> #include <algorithm> using namespace std; int n, a[100010], r; int main(){ scanf("%d", &n); for (int i = 0; i < n; i++) scanf("%d", &a[i]); sort(a, a + n); if (a[0] == a[1]) { if (a[1] == a[2]) { long long c = 0; for (int i = 0; i < n; i++) if (a[i] == a[1])c++; printf("%lld\n", (c*(c - 1LL)*(c - 2LL)) / 6LL); } else { long long c = 0; for (int i = 0; i < n; i++) if (a[i] == a[2])c++; printf("%lld\n", c); } } else { if (a[1] == a[2]) { long long c = 0; for (int i = 0; i < n; i++) if (a[i] == a[2])c++; printf("%lld\n", (c*(c - 1LL)) / 2LL); } else { long long c = 0; for (int i = 0; i < n; i++) if (a[i] == a[2])c++; printf("%lld\n", c); } } return 0; }
<filename>kerby-kerb/kerb-core/src/main/java/org/apache/kerby/kerberos/kerb/type/fast/KrbFastArmoredReq.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.kerby.kerberos.kerb.type.fast; import org.apache.kerby.asn1.Asn1FieldInfo; import org.apache.kerby.asn1.EnumType; import org.apache.kerby.asn1.ExplicitField; import org.apache.kerby.kerberos.kerb.type.KrbSequenceType; import org.apache.kerby.kerberos.kerb.type.base.CheckSum; import org.apache.kerby.kerberos.kerb.type.base.EncryptedData; /** KrbFastArmoredReq ::= SEQUENCE { armor [0] KrbFastArmor OPTIONAL, -- Contains the armor that identifies the armor key. -- MUST be present in AS-REQ. req-checksum [1] Checksum, -- For AS, contains the checksum performed over the type -- KDC-REQ-BODY for the req-body field of the KDC-REQ -- structure; -- For TGS, contains the checksum performed over the type -- AP-REQ in the PA-TGS-REQ padata. -- The checksum key is the armor key, the checksum -- type is the required checksum type for the enctype of -- the armor key, and the key usage number is -- KEY_USAGE_FAST_REQ_CHKSUM. enc-fast-req [2] EncryptedData, -- KrbFastReq -- -- The encryption key is the armor key, and the key usage -- number is KEY_USAGE_FAST_ENC. } */ public class KrbFastArmoredReq extends KrbSequenceType { protected enum KrbFastArmoredReqField implements EnumType { ARMOR, REQ_CHECKSUM, ENC_FAST_REQ; @Override public int getValue() { return ordinal(); } @Override public String getName() { return name(); } } private KrbFastReq fastReq; static Asn1FieldInfo[] fieldInfos = new Asn1FieldInfo[] { new ExplicitField(KrbFastArmoredReqField.ARMOR, KrbFastArmor.class), new ExplicitField(KrbFastArmoredReqField.REQ_CHECKSUM, CheckSum.class), new ExplicitField(KrbFastArmoredReqField.ENC_FAST_REQ, EncryptedData.class), }; public KrbFastArmoredReq() { super(fieldInfos); } public KrbFastArmor getArmor() { return getFieldAs(KrbFastArmoredReqField.ARMOR, KrbFastArmor.class); } public void setArmor(KrbFastArmor armor) { setFieldAs(KrbFastArmoredReqField.ARMOR, armor); } public CheckSum getReqChecksum() { return getFieldAs(KrbFastArmoredReqField.REQ_CHECKSUM, CheckSum.class); } public void setReqChecksum(CheckSum checkSum) { setFieldAs(KrbFastArmoredReqField.REQ_CHECKSUM, checkSum); } public KrbFastReq getFastReq() { return fastReq; } public void setFastReq(KrbFastReq fastReq) { this.fastReq = fastReq; } public EncryptedData getEncryptedFastReq() { return getFieldAs(KrbFastArmoredReqField.ENC_FAST_REQ, EncryptedData.class); } public void setEncryptedFastReq(EncryptedData encFastReq) { setFieldAs(KrbFastArmoredReqField.ENC_FAST_REQ, encFastReq); } }
/** * Verifies that the given user image id exists. * Throws NOT_ACCEPTABLE error if user image id does not exists. * * @param userId the id of the user which has images. * @param imageId the id of the image. * @throws ResponseStatusException an HTTP error with code and error message. */ private UserImage verifyUserImageId(Integer imageId, Integer userId, User currentUser) throws ResponseStatusException { Optional<UserImage> image = userImageRepository.findImageByIdAndUserId(imageId, userId); if (image.isEmpty()) { String errorMessage = String.format("User (id: %d) attempted to delete a non-existent image with image id %d for user with id %d.", currentUser.getId(), imageId, userId); logger.error(errorMessage); throw new ResponseStatusException(HttpStatus.NOT_ACCEPTABLE, HTTP_NOT_ACCEPTABLE_MESSAGE); } return image.get(); }
Two weeks ago, I announced the Bad Code Offsets project. It's a way to undo the bad code you other people have written without actually replacing the bad code. Much like carbon offsets, money used to buy Bad Code Offsets goes towards open-source projects which not only produce good code, but produce software that helps developers build good software. And when I say the money goes towards these projects, I mean all of it. 100%. We pay for all expenses — PayPal fees, material costs, postage, etc. — out of pocket. But speaking of money, I’m happy to announce that we’ve raised $3,630.50 and are mailing out checks to the appropriate projects today. Why Inedo? We're still in the process of starting an official not-for-profit. TRWTF is how much work that takes to do! There are currently five projects supported, two of which — Drupal and FreeBSD — were added as a result of your feedback. The donations break down as follows, with Drupal and FreeBSD getting revenue from those who said "Let the Alliance decide" where their contribution goes. Apache $803.00 jQuery $1,500.50 Postgres $288.50 Drupal $288.50 FreeBSD $500.00 Now if you do the math, you'll notice that the sum of the checks is $250.00 less than the cash received. There's a good reason for that. Announcing The $500 Good Code Grant Obviously, we're not expecting to change the world with $500. We'd just like to start or help something that maybe — just maybe — could make the world of code a better place. Here's how the grant works. Tell us how your free and open source project prevents bad code from being created and show us how $500 would make a real difference in your project — or — Propose a new, free and open source project and show us how $500 would help you get it started Will it work? I think it could. The folks like John Resig who dedicate their nights and weekends to creating projects like jQuery have it hard enough developing their project code. Then they have to recruit other developers, write unit tests, create documentation, and so on. And then they have to pay real money, out of their own pocket, for things like hosting, logos, and hardware. Imagine that: all that meta-work and expense just to do work to build something that'll be free?! I think the least we can do is help with the expenses, and that's what we, The Alliance for Code Excellence, hope grants like this can achieve. Join the Alliance for Code Excellence When we chose the name the Alliance for Code Excellence, it was somewhat tongue-in-cheek, to fit with the satirically pompous copy on the Bad Code Offsets site. Well that, and Jeff already called dibs on the superhero theme with the whole League of (web) Justice thing. That said, we don't want to come across as actually being pompous. Because we're not. We're simply a group of colleagues who want to improve the state of software development industry, whether it's through Bad Code Offsets, Good Code Grants, or whatever. And we're not even exclusive; in fact, we'd love it for you to join us. The Alliance for Code Excellence is a not-for-profit venture that's 100% volunteer driven. But we do have some real costs, such as legal fees (a 501(c)3 is not quite DIY), materials costs (printing Bad Code Offset certificates), postage (especially for the overseas orders), and so on. The costs are paid by the Alliance, not through Bad Code Offsets. So if you like what we do, feel free to become a member. A one-year membership is $50 and, as a welcoming gift, you'll receive a black T-shirt with our logo on it. Become a Patron Member Cost: $50.00 Select T-Shirt Size: Small Medium Large X-Large XX-Large XXX-Large Don't get me wrong: $50 isn't chump change and, quite honestly, you can buy several cool T-shirts for that. But it's not about the shirt, it's about joining the team and doing what we do: spending hard-earned money and precious time to build a better tomorrow, one line of code at a time. If you purchased Bad Code Offsets, you should be receiving your certificate(s) in the mail in the next few weeks. We're a bit behind, but will get them out as soon as possible.
// // Copyright 2019 <NAME> <mateusz at loskot dot net> // // Distributed under the Boost Software License, Version 1.0 // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt // #include <boost/gil.hpp> #include <boost/mp11.hpp> #include <boost/core/lightweight_test.hpp> #include <memory> #include <random> #include <type_traits> #include <vector> #include "core/channel/test_fixture.hpp" #include "core/image/test_fixture.hpp" #include "core/pixel/test_fixture.hpp" namespace boost { namespace gil { namespace test { namespace fixture { template <typename Pixel> struct pixel_array { using iterator = Pixel*; #ifdef NDEBUG constexpr static std::size_t default_x_size = 256; constexpr static std::size_t default_y_size = 128; #else constexpr static std::size_t default_x_size = 16; constexpr static std::size_t default_y_size = 8; #endif pixel_array(std::size_t x_size = default_x_size, std::size_t y_size = default_y_size) : pixels_(new Pixel[x_size * y_size]) , x_size_(x_size) , y_size_(y_size) {} auto begin() -> iterator { return pixels_.get(); } auto end() -> iterator { return pixels_.get() + x_size_ * y_size_; } private: std::unique_ptr<Pixel[]> pixels_; std::size_t x_size_; std::size_t y_size_; }; }}}} namespace gil = boost::gil; namespace fixture = boost::gil::test::fixture; struct fill_with_pixel_integer_types { template <typename Pixel> void operator()(Pixel const &) { using pixel_t = Pixel; auto min_pixel = fixture::pixel_generator<pixel_t>::min(); auto max_pixel = fixture::pixel_generator<pixel_t>::max(); auto rnd_pixel = fixture::pixel_generator<pixel_t>::random(); for (auto const &fill_pixel : {min_pixel, max_pixel, rnd_pixel}) { fixture::pixel_array<pixel_t> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (pixel_t const &p : pixels) BOOST_TEST(p == fill_pixel); } } static void run() { boost::mp11::mp_for_each<fixture::pixel_integer_types>(fill_with_pixel_integer_types{}); } }; struct fill_with_pixel_float_types { template <typename Pixel> void operator()(Pixel const &) { using pixel_t = Pixel; auto min_pixel = fixture::pixel_generator<pixel_t>::min(); auto max_pixel = fixture::pixel_generator<pixel_t>::max(); for (auto const &fill_pixel : {min_pixel, max_pixel}) { fixture::pixel_array<Pixel> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (Pixel const &p : pixels) BOOST_TEST(p == fill_pixel); } } static void run() { boost::mp11::mp_for_each<fixture::pixel_float_types>(fill_with_pixel_float_types{}); } }; void test_fill_with_packed_pixel_gray3() { auto min_pixel = fixture::packed_pixel_gray3{0}; auto mid_pixel = fixture::packed_pixel_gray3{3}; auto max_pixel = fixture::packed_pixel_gray3{7}; for (auto const& fill_pixel : {min_pixel, max_pixel, mid_pixel} ) { fixture::pixel_array<fixture::packed_pixel_gray3> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (fixture::packed_pixel_gray3 const& p : pixels) { BOOST_TEST(p == fill_pixel); BOOST_TEST((int)get_color(p, gil::gray_color_t()) == (int)get_color(fill_pixel, gil::gray_color_t())); } } } void test_fill_with_packed_pixel_bgr121() { auto min_pixel = fixture::packed_pixel_bgr121{0}; auto mid_pixel = fixture::packed_pixel_bgr121{8}; auto max_pixel = fixture::packed_pixel_bgr121{17}; for (auto const& fill_pixel : {min_pixel, max_pixel, mid_pixel} ) { fixture::pixel_array<fixture::packed_pixel_bgr121> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (fixture::packed_pixel_bgr121 const& p : pixels) { BOOST_TEST(p == fill_pixel); BOOST_TEST((int)get_color(p, gil::red_t()) == (int)get_color(fill_pixel, gil::red_t())); BOOST_TEST((int)get_color(p, gil::green_t()) == (int)get_color(fill_pixel, gil::green_t())); BOOST_TEST((int)get_color(p, gil::blue_t()) == (int)get_color(fill_pixel, gil::blue_t())); } } } void test_fill_with_packed_pixel_rgb535() { fixture::packed_pixel_rgb535 min_pixel(0, 0, 0); fixture::packed_pixel_rgb535 mid_pixel(15, 3, 15); fixture::packed_pixel_rgb535 max_pixel(31, 7, 31); for (auto const& fill_pixel : {min_pixel, max_pixel, mid_pixel} ) { fixture::pixel_array<fixture::packed_pixel_rgb535> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (fixture::packed_pixel_rgb535 const& p : pixels) { BOOST_TEST(p == fill_pixel); BOOST_TEST((int)get_color(p, gil::red_t()) == (int)get_color(fill_pixel, gil::red_t())); BOOST_TEST((int)get_color(p, gil::green_t()) == (int)get_color(fill_pixel, gil::green_t())); BOOST_TEST((int)get_color(p, gil::blue_t()) == (int)get_color(fill_pixel, gil::blue_t())); } } } void test_bit_aligned_pixel_bgr232() { fixture::bit_aligned_pixel_bgr232 min_pixel(0, 0, 0); fixture::bit_aligned_pixel_bgr232 mid_pixel(1, 4, 2); fixture::bit_aligned_pixel_bgr232 max_pixel(3, 7, 3); for (auto const& fill_pixel : {min_pixel, max_pixel, mid_pixel} ) { fixture::pixel_array<fixture::bit_aligned_pixel_bgr232> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (fixture::bit_aligned_pixel_bgr232 const& p : pixels) { BOOST_TEST(p == fill_pixel); BOOST_TEST((int)get_color(p, gil::red_t()) == (int)get_color(fill_pixel, gil::red_t())); BOOST_TEST((int)get_color(p, gil::green_t()) == (int)get_color(fill_pixel, gil::green_t())); BOOST_TEST((int)get_color(p, gil::blue_t()) == (int)get_color(fill_pixel, gil::blue_t())); } } } void test_bit_aligned_pixel_rgb567() { fixture::bit_aligned_pixel_rgb567 min_pixel(0, 0, 0); fixture::bit_aligned_pixel_rgb567 mid_pixel(15, 31, 63); fixture::bit_aligned_pixel_rgb567 max_pixel(31, 63, 127); for (auto const& fill_pixel : {min_pixel, max_pixel, mid_pixel} ) { fixture::pixel_array<fixture::bit_aligned_pixel_rgb567> pixels; std::uninitialized_fill(pixels.begin(), pixels.end(), fill_pixel); for (fixture::bit_aligned_pixel_rgb567 const& p : pixels) { BOOST_TEST(p == fill_pixel); BOOST_TEST((int)get_color(p, gil::red_t()) == (int)get_color(fill_pixel, gil::red_t())); BOOST_TEST((int)get_color(p, gil::green_t()) == (int)get_color(fill_pixel, gil::green_t())); BOOST_TEST((int)get_color(p, gil::blue_t()) == (int)get_color(fill_pixel, gil::blue_t())); } } } int main() { fill_with_pixel_integer_types::run(); fill_with_pixel_float_types::run(); test_fill_with_packed_pixel_gray3(); test_fill_with_packed_pixel_bgr121(); test_fill_with_packed_pixel_rgb535(); test_bit_aligned_pixel_bgr232(); test_bit_aligned_pixel_rgb567(); return ::boost::report_errors(); }
// import functions via scripts resources import * as Enums from "../resources/constants/enums"; // import components import { Header } from './components/header/header'; import { Footer } from './components/footer/footer'; // import styles import './index.scss'; export class Index { constructor( header: Header, footer: Footer ) {} } new Index( Header, Footer );
def encodings_l10n(input_files, output, base): output = open(output, 'w') reg = re.compile('Encoding [\w-]+\s+[\w-]+\s+"([\w \-\(\)]+)"\s+[\w-]+\s+(fixed|variable)\s+\w+.*') input = open(input_files[0]) for lineno, line in enumerate(input.readlines()): if not line.startswith('Encoding'): continue if reg.match(line): print >> output, '#: %s:%d\nmsgid "%s"\nmsgstr ""\n' % \ (relativePath(input_files[0], base), lineno+1, reg.match(line).groups()[0]) else: print "Error: Unable to handle line:" print line input.close() output.close()
#!/usr/bin/env python3 # # makers.py """ Functions for creating imports, modules and packages. """ # # Copyright © 2020 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import re from types import FunctionType, ModuleType from typing import Any, Iterable, List # 3rd party import clr # type: ignore import isort # type: ignore from autoflake import fix_code # type: ignore from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList # this package from dotnet_stub_builder.type_conversion import Converter from dotnet_stub_builder.utils import ( SYSTEM_MODULES, dedup, get_child_attrs, get_signature, is_dunder, isort_config, make_property ) clr.AddReference("System") # 3rd party import System # type: ignore # noqa: E402 __all__ = ["make_imports", "make_module", "make_package", "walk_attrs"] def make_imports(current_module_name: str) -> List[str]: """ Create the imports for the top of a stub file. :param current_module_name: The name of the current module. :return: """ imports = [ "from __future__ import annotations", '', "from enum import Enum", "from typing import Any, Dict, List, Type", '', ] for m in SYSTEM_MODULES: if m != current_module_name: imports.append(f"import {m}") return imports def make_module( name: str, module: ModuleType, attr_list: Iterable[str] = (), first_party_imports: Iterable[str] = (), converter=Converter() ) -> bool: """ Create type stubs for a module. :param name: The name of the module. :param module: The module object. :param attr_list: A list of attributes to create stubs for. :param first_party_imports: A list of first-party imports to include at the top of the file. """ buf = StringList() path = name.split('.') stubs_dir = PathPlus(f"{path[0]}-stubs") stubs_dir.maybe_make() (stubs_dir / '/'.join(x for x in path[1:-1])).maybe_make(parents=True) stub_file = stubs_dir / '/'.join(x for x in path[1:-1]) / f"{path[-1]}.pyi" import_name = name.replace(".__init__", '') for imp in (*make_imports(name), *first_party_imports): imp = re.sub( fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)\.([A-Za-z_]+)", r"from .\1.\2 import \3", imp ) imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)", r"from .\1 import \2", imp) imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)", r"from . import \1", imp) imp = re.sub(fr"import {import_name}$", '', imp) buf.append(imp) if import_name != "System.ComponentModel": if import_name == "System": buf.append("from .ComponentModel import MarshalByValueComponent") else: buf.append("from System.ComponentModel import MarshalByValueComponent") for attr_name in dedup(attr_list): stub_code = walk_attrs(module, attr_name, converter=converter) stub_code = stub_code.replace(f": {import_name}.", ": ") stub_code = stub_code.replace(f" -> {import_name}.", " -> ") stub_code = stub_code.replace(f"[{import_name}.", '[') stub_code.replace("System.Collections.Generic.IDictionary[System.String,System.String]", "Any") buf.blankline(ensure_single=True) buf.blankline() buf.append(stub_code) sorted_code = isort.code(str(buf), config=isort_config) sans_unneeded_imports = fix_code( sorted_code, additional_imports=None, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys=False, remove_unused_variables=False, ignore_init_module_imports=False, ) stub_file.write_text(sans_unneeded_imports) return True def make_package( name: str, module: ModuleType, attr_list: Iterable[str] = (), converter=Converter(), ) -> bool: """ Create type stubs for a module. :param name: The name of the module. :param module: The module object. :param attr_list: A list of attributes to create stubs for. """ path = name.split('.') stubs_dir = PathPlus(f"{path[0]}-stubs") (stubs_dir / '/'.join(x for x in path[1:])).maybe_make(parents=True) return make_module(f"{name}.__init__", module, attr_list, converter=converter) def walk_attrs(module: ModuleType, attr_name, converter=Converter()) -> str: """ Create stubs for given class, including all attributes. :param module: :param attr_name: :param converter: :return: """ buf = StringList(convert_indents=True) buf.indent_type = " " if not is_dunder(attr_name): obj = getattr(module, attr_name) # TODO: case where obj is not a class if not isinstance(obj, FunctionType): bases = [] for base in obj.__bases__: if base not in {System.Object, object}: if base.__name__ in converter.type_mapping: bases.append(converter.type_mapping[base.__name__]) else: bases.append(base.__name__) bases = list(filter(lambda x: x is Any, bases)) if bases: buf.append(f"class {attr_name}({', '.join(bases)}):\n") else: buf.append(f"class {attr_name}:\n") for child_attr_name in get_child_attrs(obj): try: child_obj = getattr(obj, child_attr_name) except TypeError as e: if str(e) in { "instance property must be accessed through a class instance", "property cannot be read", }: make_property(buf, child_attr_name) continue elif str(e) == "instance attribute must be accessed through a class instance": print(f"{e.__class__.__name__}: '{e}' occurred for {attr_name}.{child_attr_name}") continue else: raise e # TODO: if isinstance(child_obj, FunctionType): return_type, arguments = get_signature(child_obj, child_attr_name, converter) with buf.with_indent_size(buf.indent_size + 1): if arguments is not None and arguments: signature = [] for idx, argument in enumerate(arguments.split(", ")): signature.append(f"{'_' * (idx + 1)}: {converter.convert_type(argument)}") line = f"def {child_attr_name}(self, {', '.join(signature)}) -> {return_type}: ..." if len(line) > 88: buf.blankline(ensure_single=True) buf.append(f"def {child_attr_name}(") with buf.with_indent_size(buf.indent_size + 2): buf.append("self,") for line in signature: buf.append(f"{line},") buf.append(f") -> {return_type}: ...\n") else: buf.append(line) elif arguments is None: buf.append(f"def {child_attr_name}(self, *args, **kwargs) -> {return_type}: ...") elif not arguments: # i.e. takes no arguments buf.append(f"def {child_attr_name}(self) -> {return_type}: ...") buf.blankline(ensure_single=True) return str(buf) return ''
/** * Default implementation of {@link Neo4jClient}. Uses the Neo4j Java driver to connect to and interact with the * database. * * @author Gerrit Meier * @author Michael J. Simons * @since 6.0 */ final class DefaultNeo4jClient implements Neo4jClient { private final Driver driver; private final TypeSystem typeSystem; private @Nullable final DatabaseSelectionProvider databaseSelectionProvider; private @Nullable final UserSelectionProvider userSelectionProvider; private final ConversionService conversionService; private final Neo4jPersistenceExceptionTranslator persistenceExceptionTranslator = new Neo4jPersistenceExceptionTranslator(); // Basically a local bookmark manager private final Set<Bookmark> bookmarks = new HashSet<>(); private final ReentrantReadWriteLock bookmarksLock = new ReentrantReadWriteLock(); DefaultNeo4jClient(Builder builder) { this.driver = builder.driver; this.typeSystem = driver.defaultTypeSystem(); this.databaseSelectionProvider = builder.databaseSelectionProvider; this.userSelectionProvider = builder.userSelectionProvider; this.conversionService = new DefaultConversionService(); new Neo4jConversions().registerConvertersIn((ConverterRegistry) conversionService); } @Override public QueryRunner getQueryRunner(DatabaseSelection databaseSelection, UserSelection impersonatedUser) { QueryRunner queryRunner = Neo4jTransactionManager.retrieveTransaction(driver, databaseSelection, impersonatedUser); Collection<Bookmark> lastBookmarks = Collections.emptySet(); if (queryRunner == null) { ReentrantReadWriteLock.ReadLock lock = bookmarksLock.readLock(); try { lock.lock(); lastBookmarks = new HashSet<>(bookmarks); queryRunner = driver.session(Neo4jTransactionUtils.sessionConfig(false, lastBookmarks, databaseSelection, impersonatedUser)); } finally { lock.unlock(); } } return new DelegatingQueryRunner(queryRunner, lastBookmarks, (usedBookmarks, newBookmark) -> { ReentrantReadWriteLock.WriteLock lock = bookmarksLock.writeLock(); try { lock.lock(); bookmarks.removeAll(usedBookmarks); bookmarks.add(newBookmark); } finally { lock.unlock(); } }); } private static class DelegatingQueryRunner implements QueryRunner { private final QueryRunner delegate; private final Collection<Bookmark> usedBookmarks; private final BiConsumer<Collection<Bookmark>, Bookmark> newBookmarkConsumer; private DelegatingQueryRunner(QueryRunner delegate, Collection<Bookmark> lastBookmarks, BiConsumer<Collection<Bookmark>, Bookmark> newBookmarkConsumer) { this.delegate = delegate; this.usedBookmarks = lastBookmarks; this.newBookmarkConsumer = newBookmarkConsumer; } @Override public void close() throws Exception { // We're only going to close sessions we have acquired inside the client, not something that // has been retrieved from the tx manager. if (this.delegate instanceof Session) { Session session = (Session) this.delegate; session.close(); this.newBookmarkConsumer.accept(usedBookmarks, session.lastBookmark()); } } @Override public Result run(String s, Value value) { return delegate.run(s, value); } @Override public Result run(String s, Map<String, Object> map) { return delegate.run(s, map); } @Override public Result run(String s, Record record) { return delegate.run(s, record); } @Override public Result run(String s) { return delegate.run(s); } @Override public Result run(Query query) { return delegate.run(query); } } // Below are all the implementations (methods and classes) as defined by the contracts of Neo4jClient @Override public UnboundRunnableSpec query(String cypher) { return query(() -> cypher); } @Override public UnboundRunnableSpec query(Supplier<String> cypherSupplier) { return new DefaultRunnableSpec(cypherSupplier); } @Override public <T> OngoingDelegation<T> delegateTo(Function<QueryRunner, Optional<T>> callback) { return new DefaultRunnableDelegation<>(callback); } @Override @Nullable public DatabaseSelectionProvider getDatabaseSelectionProvider() { return databaseSelectionProvider; } /** * Basically a holder of a cypher template supplier and a set of named parameters. It's main purpose is to orchestrate * the running of things with a bit of logging. */ static class RunnableStatement { RunnableStatement(Supplier<String> cypherSupplier) { this(cypherSupplier, new NamedParameters()); } RunnableStatement(Supplier<String> cypherSupplier, NamedParameters parameters) { this.cypherSupplier = cypherSupplier; this.parameters = parameters; } private final Supplier<String> cypherSupplier; private final NamedParameters parameters; protected final Result runWith(QueryRunner statementRunner) { String statementTemplate = cypherSupplier.get(); if (cypherLog.isDebugEnabled()) { cypherLog.debug(() -> String.format("Executing:%s%s", System.lineSeparator(), statementTemplate)); if (cypherLog.isTraceEnabled() && !parameters.isEmpty()) { cypherLog.trace(() -> String.format("with parameters:%s%s", System.lineSeparator(), parameters)); } } return statementRunner.run(statementTemplate, parameters.get()); } } /** * Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original * exception if the conversation failed. Thus allows safe re-throwing of the return value. * * @param ex the exception to translate * @param exceptionTranslator the {@link PersistenceExceptionTranslator} to be used for translation * @return Any translated exception */ private static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, PersistenceExceptionTranslator exceptionTranslator) { RuntimeException resolved = exceptionTranslator.translateExceptionIfPossible(ex); return resolved == null ? ex : resolved; } private DatabaseSelection resolveTargetDatabaseName(@Nullable String parameterTargetDatabase) { String value = Neo4jClient.verifyDatabaseName(parameterTargetDatabase); if (value != null) { return DatabaseSelection.byName(value); } if (databaseSelectionProvider != null) { return databaseSelectionProvider.getDatabaseSelection(); } return DatabaseSelectionProvider.getDefaultSelectionProvider().getDatabaseSelection(); } private UserSelection resolveUser(@Nullable String userName) { if (StringUtils.hasText(userName)) { return UserSelection.impersonate(userName); } if (userSelectionProvider != null) { return userSelectionProvider.getUserSelection(); } return UserSelectionProvider.getDefaultSelectionProvider().getUserSelection(); } class DefaultRunnableSpec implements UnboundRunnableSpec, RunnableSpecBoundToDatabaseAndUser { private final RunnableStatement runnableStatement; private DatabaseSelection databaseSelection; private UserSelection userSelection; DefaultRunnableSpec(Supplier<String> cypherSupplier) { this.databaseSelection = resolveTargetDatabaseName(null); this.userSelection = resolveUser(null); this.runnableStatement = new RunnableStatement(cypherSupplier); } @Override public RunnableSpecBoundToDatabase in(String targetDatabase) { this.databaseSelection = resolveTargetDatabaseName(targetDatabase); return new DefaultRunnableSpecBoundToDatabase(); } @Override public RunnableSpecBoundToUser asUser(String asUser) { this.userSelection = resolveUser(asUser); return new DefaultRunnableSpecBoundToUser(); } @Override public <T> OngoingBindSpec<T, RunnableSpec> bind(T value) { return new DefaultOngoingBindSpec<>(value); } @Override public RunnableSpec bindAll(Map<String, Object> newParameters) { this.runnableStatement.parameters.addAll(newParameters); return this; } @Override public <T> MappingSpec<T> fetchAs(Class<T> targetClass) { return new DefaultRecordFetchSpec<>(databaseSelection, userSelection, runnableStatement, new SingleValueMappingFunction<>(conversionService, targetClass)); } @Override public RecordFetchSpec<Map<String, Object>> fetch() { return new DefaultRecordFetchSpec<>(databaseSelection, userSelection, runnableStatement, (t, r) -> r.asMap()); } @Override public ResultSummary run() { try (QueryRunner statementRunner = getQueryRunner(databaseSelection, userSelection)) { Result result = runnableStatement.runWith(statementRunner); return ResultSummaries.process(result.consume()); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, persistenceExceptionTranslator); } catch (Exception e) { throw new RuntimeException(e); } } class DefaultOngoingBindSpec<T> implements OngoingBindSpec<T, RunnableSpec> { @Nullable private final T value; DefaultOngoingBindSpec(@Nullable T value) { this.value = value; } @Override public RunnableSpec to(String name) { DefaultRunnableSpec.this.runnableStatement.parameters.add(name, value); return DefaultRunnableSpec.this; } @Override public RunnableSpec with(Function<T, Map<String, Object>> binder) { Assert.notNull(binder, "Binder is required."); return bindAll(binder.apply(value)); } } class DefaultRunnableSpecBoundToDatabase implements RunnableSpecBoundToDatabase { @Override public RunnableSpecBoundToDatabaseAndUser asUser(String aUser) { DefaultRunnableSpec.this.userSelection = resolveUser(aUser); return DefaultRunnableSpec.this; } @Override public <T> MappingSpec<T> fetchAs(Class<T> targetClass) { return DefaultRunnableSpec.this.fetchAs(targetClass); } @Override public RecordFetchSpec<Map<String, Object>> fetch() { return DefaultRunnableSpec.this.fetch(); } @Override public ResultSummary run() { return DefaultRunnableSpec.this.run(); } @Override public <T> OngoingBindSpec<T, RunnableSpec> bind(T value) { return DefaultRunnableSpec.this.bind(value); } @Override public RunnableSpec bindAll(Map<String, Object> parameters) { return DefaultRunnableSpec.this.bindAll(parameters); } } class DefaultRunnableSpecBoundToUser implements RunnableSpecBoundToUser { @Override public RunnableSpecBoundToDatabaseAndUser in(String aDatabase) { DefaultRunnableSpec.this.databaseSelection = resolveTargetDatabaseName(aDatabase); return DefaultRunnableSpec.this; } @Override public <T> MappingSpec<T> fetchAs(Class<T> targetClass) { return DefaultRunnableSpec.this.fetchAs(targetClass); } @Override public RecordFetchSpec<Map<String, Object>> fetch() { return DefaultRunnableSpec.this.fetch(); } @Override public ResultSummary run() { return DefaultRunnableSpec.this.run(); } @Override public <T> OngoingBindSpec<T, RunnableSpec> bind(T value) { return DefaultRunnableSpec.this.bind(value); } @Override public RunnableSpec bindAll(Map<String, Object> parameters) { return DefaultRunnableSpec.this.bindAll(parameters); } } } class DefaultRecordFetchSpec<T> implements RecordFetchSpec<T>, MappingSpec<T> { private final DatabaseSelection databaseSelection; @Nullable private final UserSelection impersonatedUser; private final RunnableStatement runnableStatement; private BiFunction<TypeSystem, Record, T> mappingFunction; DefaultRecordFetchSpec(DatabaseSelection databaseSelection, @Nullable UserSelection impersonatedUser, RunnableStatement runnableStatement, BiFunction<TypeSystem, Record, T> mappingFunction) { this.databaseSelection = databaseSelection; this.impersonatedUser = impersonatedUser; this.runnableStatement = runnableStatement; this.mappingFunction = mappingFunction; } @Override public RecordFetchSpec<T> mappedBy( @SuppressWarnings("HiddenField") BiFunction<TypeSystem, Record, T> mappingFunction) { this.mappingFunction = new DelegatingMappingFunctionWithNullCheck<>(mappingFunction); return this; } @Override public Optional<T> one() { try (QueryRunner statementRunner = getQueryRunner(this.databaseSelection, this.impersonatedUser)) { Result result = runnableStatement.runWith(statementRunner); Optional<T> optionalValue = result.hasNext() ? Optional.ofNullable(mappingFunction.apply(typeSystem, result.single())) : Optional.empty(); ResultSummaries.process(result.consume()); return optionalValue; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, persistenceExceptionTranslator); } catch (Exception e) { throw new RuntimeException(e); } } @Override public Optional<T> first() { try (QueryRunner statementRunner = getQueryRunner(this.databaseSelection, this.impersonatedUser)) { Result result = runnableStatement.runWith(statementRunner); Optional<T> optionalValue = result.stream().map(partialMappingFunction(typeSystem)).findFirst(); ResultSummaries.process(result.consume()); return optionalValue; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, persistenceExceptionTranslator); } catch (Exception e) { throw new RuntimeException(e); } } @Override public Collection<T> all() { try (QueryRunner statementRunner = getQueryRunner(this.databaseSelection, this.impersonatedUser)) { Result result = runnableStatement.runWith(statementRunner); Collection<T> values = result.stream().map(partialMappingFunction(typeSystem)).collect(Collectors.toList()); ResultSummaries.process(result.consume()); return values; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, persistenceExceptionTranslator); } catch (Exception e) { throw new RuntimeException(e); } } /** * @param typeSystem The actual type system * @return The partially evaluated mapping function */ private Function<Record, T> partialMappingFunction(TypeSystem typeSystem) { return r -> mappingFunction.apply(typeSystem, r); } } class DefaultRunnableDelegation<T> implements RunnableDelegation<T>, OngoingDelegation<T> { private DatabaseSelection databaseSelection; @Nullable private UserSelection impersonatedUser; private final Function<QueryRunner, Optional<T>> callback; DefaultRunnableDelegation(Function<QueryRunner, Optional<T>> callback) { this.callback = callback; this.databaseSelection = resolveTargetDatabaseName(null); this.impersonatedUser = resolveUser(null); } @Override public RunnableDelegation<T> in(@Nullable String targetDatabase) { this.databaseSelection = resolveTargetDatabaseName(targetDatabase); return this; } @Override public Optional<T> run() { try (QueryRunner queryRunner = getQueryRunner(databaseSelection, this.impersonatedUser)) { return callback.apply(queryRunner); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, persistenceExceptionTranslator); } catch (Exception e) { throw new RuntimeException(e); } } } }
/** * @author Alessandro Caiani and Antoine Godin * * Generates the network of balance sheet connections among the selected populations for the selected assets. Each NxN matrix is represented * as one 1x(N+2) row, where the two first fields (with key "-1" and "-2") contain 1 the number of agents (i.e. N) and 2 the selection of assets (where each * number is separated by a hyphen "-". Each cell of the row is given by the concatenation of the following information (separated by * "-"): 1. agent type, 2 list of all the issuers' id of the assets belonging to assetsId. */ public class FlowNetworkComputer extends AbstractMicroComputer implements MicroMultipleVariablesComputer { private int[] populationsId; private int[] assetsId; private boolean weighted; private Object fileNamePrefix; private String fileName; private String fileNameExtension; private CSVWriter csvWriter; private boolean firstRound = true; private int fileNumber; //TODO: MAKE SURE THAT THE ASSET ISSUER IS IN THE POPULATIONS YOU WANT TO SEE public FlowNetworkComputer(){} /** * @param populationsId * @param assetsId * @param weighted * @param fileName */ public FlowNetworkComputer(int[] populationsId, int[] assetsId, boolean weighted, String fileName) { super(); this.populationsId = populationsId; this.assetsId = assetsId; this.weighted = weighted; this.fileName = fileName; } /* (non-Javadoc) * @see jmab.report.MicroMultipleVariablesComputer#computeVariables(jmab.simulations.MacroSimulation) */ @Override public Map<Long, Double> computeVariables(MacroSimulation sim) { MacroPopulation macroPop = (MacroPopulation) sim.getPopulation(); TreeMap<Long,String> result=new TreeMap<Long,String>(); ArrayList<Integer> populations = new ArrayList<Integer>(); for(int populationId:populationsId) populations.add(populationId); int popSize = 0; for(int populationId:populationsId){ Population pop = macroPop.getPopulation(populationId); popSize+=pop.getSize(); for (Agent i:pop.getAgents()){ String entry = ""; MacroAgent agent=(MacroAgent) i; entry = entry.concat(String.valueOf(populationId)); if (!agent.isDead()){ TreeMap<Long,TreeMap<Integer, Double>> agentList=new TreeMap<Long,TreeMap<Integer, Double>>(); for(int assetId:assetsId){ List<Item> assets=agent.getItemsStockMatrix(true, assetId); for (Item j:assets){ if(j.getAge()==0){ long liabId; if(j instanceof AbstractGood){ AbstractGood good = (AbstractGood) j; liabId = good.getProducer().getAgentId(); }else{ liabId = j.getLiabilityHolder().getAgentId(); } TreeMap<Integer,Double> liabEntry; if(agentList.containsKey(liabId)) liabEntry = agentList.remove(liabId); else liabEntry = new TreeMap<Integer,Double>(); double val = j.getValue(); if(liabEntry.containsKey(assetId)) val+=liabEntry.remove(assetId); liabEntry.put(assetId, val); agentList.put(liabId, liabEntry); } } } Set<Long> orderedKeys = agentList.keySet(); for(Long key:orderedKeys){ TreeMap<Integer,Double> liabEntry = agentList.get(key); entry = entry.concat("|"); entry = entry.concat(String.valueOf(key)); Set<Integer> orderedAssetsKeys = liabEntry.keySet(); for(Integer assetKey:orderedAssetsKeys){ entry = entry.concat("&"); entry = entry.concat(String.valueOf(assetKey)); if(weighted){ entry = entry.concat("&"); entry = entry.concat(String.valueOf(liabEntry.get(assetKey))); } } } } else{ entry = entry.concat("|"); entry = entry.concat(String.valueOf(Double.NaN)); } result.put(agent.getAgentId(), entry); } } result.put((long) -1, String.valueOf(popSize)); String entry = String.valueOf(sim.getRound()); for(int assetId:assetsId){ entry = entry.concat("|"); entry = entry.concat(String.valueOf(assetId)); } result.put((long) -2, entry); Set<Long> orderedKeys = result.keySet(); if(firstRound){ for(Long key:orderedKeys){ csvWriter.newData(key); } csvWriter.endRecord(); firstRound=false; } for(Long key:orderedKeys){ csvWriter.newData(result.get(key)); } csvWriter.endRecord(); return new TreeMap<Long,Double>(); } /** * @return the populationsId */ public int[] getPopulationsId() { return populationsId; } /** * @param populationsId the populationsId to set */ public void setPopulationsId(int[] populationsId) { this.populationsId = populationsId; } /** * @return the marketId */ public int[] getAssetsId() { return assetsId; } /** * @param assetsId the marketId to set */ public void setAssetsId(int[] assetsId) { this.assetsId = assetsId; } /** * @return the fileName */ public String getFileName() { return fileName; } /** * @param fileName the fileName to set */ public void setFileName(String fileName) { this.fileName = fileName; } /** * @return the weighted */ public boolean isWeighted() { return weighted; } /** * @param weighted the weighted to set */ public void setWeighted(boolean weighted) { this.weighted = weighted; } /** * @return the fileNamePrefix */ public Object getFileNamePrefix() { return fileNamePrefix; } /** * @param fileNamePrefix the fileNamePrefix to set */ public void setFileNamePrefix(Object fileNamePrefix) { this.fileNamePrefix = fileNamePrefix; } /** * @return the fileNameExtension */ public String getFileNameExtension() { return fileNameExtension; } /** * @param fileNameExtension the fileNameExtension to set */ public void setFileNameExtension(String fileNameExtension) { this.fileNameExtension = fileNameExtension; } @Override public void dispose(){ if (csvWriter != null) { csvWriter.close(); } fileNumber++; } @Override public void initialise(){ try { csvWriter = new CSVWriter(new FileOutputStream(this.fileNamePrefix + fileName + getNumberingSuffix() + fileNameExtension),','); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private String getNumberingSuffix() { return Integer.toString(fileNumber + 1); } }
/** * Runs through the log removing segments older than a certain age */ private int cleanupExpiredSegments(final Log log) { final long startMs = time.milliseconds(); String topic = parseTopicPartitionName(log.name()).topic; return log.deleteOldSegments(new Predicate<LogSegment>() { @Override public boolean apply(LogSegment _) { return startMs - _.lastModified() > log.config.retentionMs; } }); }
On joint maximum-likelihood estimation of PCR efficiency and initial amount of target We consider the problem of estimating unknown parameters of the real-time polymerase chain reaction (RT- PCR) from noisy observations. The joint ML estimator of the RT-PCR efficiency and the initial number of DNA target molecules is derived. The mean-square error performance of the estimator is studied via simulations. The simulation results indicate that the proposed estimator significantly outperforms a competing technique.
async def async_encrypt_attachment(data: AsyncDataT) -> _EncryptedReturnT: key = Random.new().read(32) iv = Random.new().read(8) ctr = Counter.new(64, prefix=iv, initial_value=0) cipher = AES.new(key, AES.MODE_CTR, counter=ctr) sha256 = SHA256.new() loop = asyncio.get_event_loop() async for chunk in async_generator_from_data(data): update_crypt = partial(cipher.encrypt, chunk) crypt_chunk = await loop.run_in_executor(None, update_crypt) update_hash = partial(sha256.update, crypt_chunk) await loop.run_in_executor(None, update_hash) yield crypt_chunk yield _get_decryption_info_dict(key, iv, sha256)