content
stringlengths
10
4.9M
v. Aston Villa 608 – Attempted Arsenal passes in the entire match 307 – Attempted Arsenal passes in the second half 226 – Attempted Aston Villa passes in the entire match 152 – Completed passes by Aston Villa full time 187 – Completed passes by Song, Sagna, and Arteta 7 – Arsenal players who completed more than 90% of their attempted passes (Sagna, Ramsey, Santos, Ox, Arteta, Song, Djourou) 247 – Attempted Arsenal passes in the Aston Villa final third 103 – Attempted Aston Villa passes in the Arsenal final third 49 – Attempted passes in the Aston Villa final third by Tomas Rosicky 41 – Successful passes in the Aston Villa final third by Tomas Rosicky* 59 – Successful passes by Tomas Rosicky 70 – Percent of Tomas Rosicky’s successful passes that were in the Aston Villa final third 27 – Successful passes in the Aston Villa final third by Bacary Sagna 27 – Attempted passes in the Aston Villa final third by Bacary Sagna 9 – Successful passes in the Arsenal final third by Marc Albrighton** 14 – Key Passes by Arsenal 2 – Key Passes by Aston Villa 3 – Successful take-ons by Alex Oxlade-Chamberlain (of 4 attempted)* 2 – Successful take-ons by Carlos Cuellar (of 2 attempted)** 19 – Arsenal shots 8 – Arsenal shots on goal 3 – Aston Villa shots 0 – Aston Villa shots on goal 1 – Blocks by Djourou to prevent what probably would have been an Aston Villa shot on goal² but that still doesn’t count as an Aston Villa shot on goal no matter how much you think it should, or how crazy that seems to you, or how much you think it makes stats meaningless 5 – Yellow cards given to Aston Villa 44 – League matches since the opposition team was given 5 yellow cards in an Arsenal match (Arsenal 2-1 Everton, 1 February 2011) 9 – League matches since Arsenal were given 5 yellow cards (Arsenal 1-2 Man U, 22 January 2012) 17 – Different goalscorers in the League for Arsenal this season¹ 2 – Career Arsenal goals, Gael Clichy 2 – Career Arsenal goals, Kieran Gibbs 264 – Career Arsenal appearances, Gael Clichy 66 – Career Arsenal appearances, Kieran Gibbs 28 25 – Premier League matches since Robin van Persie went two consecutive games without scoring ( A-Newcastle, H-Liverpool H-Swansea, A-Blackburn) 10 – Assists by Alex Song for Arsenal in all competitions 17 – Career Arsenal assists by Alex Song 3 – Tackles by Alex Song v. Aston Villa (of three attempted) Just how good have Arsenal been in this 8 match unbeaten run? Some of you may remember a post from March 15th on 7amkickoff where I took the Arsenal season, split it in half, and compiled the averages for goals scored (GFA), goals allowed (GAA), shots per game by Arsenal (SPG), shots on goal per game (SOG), the oppositions shots ans shots on goal (OSPG, OSOG), and then the ratios of goals per shot on goal (GPSOG) and the percent of overall shots that Arsenal score and allow the opposition to score (CONV). Just for fun, today I compiled the Arsenal season then I split off the first 22 matches and the last 8 match unbeaten run. That, folks, is what an unbeaten run looks like: allowing less than a goal a game, scoring nearly three, converting 43% of our shots on goal, keeping the opposition to just 8 shots per game, and allowing them to convert a stingy 9% of those shots. Arsenal are working very hard right now and playing some great football both offensively and defensively. Qq *Leads all players **Leads just his team 1. United are second with 16 different goal scorers 2. See the comments in Everton 0-1 Arsenal: By the Numbers
/** * Base class for Aggregate Root entities as they are described in DDD. It exposes a {@link #register(ApplicationEvent)} * method allowing subclasses to register events to be published as soon as the aggregate using calls to * {@link org.springframework.data.repository.CrudRepository#save(Object)}. * * @author Vincent Nadoll * @see <a href="https://docs.spring.io/spring-data/jpa/docs/current/reference/html/#core.domain-events">Publishing * Events from Aggregate Roots</a> */ @MappedSuperclass public abstract class AggregateRoot<ID extends Identifier> extends Entity<ID> { private final transient Collection<ApplicationEvent> events = new ArrayList<>(); protected final <T extends ApplicationEvent> T register(@NonNull T event) { events.add(event); return event; } @Transient @DomainEvents @SuppressWarnings("unused") Collection<ApplicationEvent> getEvents() { return Collections.unmodifiableCollection(events); } @AfterDomainEventPublication @SuppressWarnings("unused") void wipeEvents() { events.clear(); } }
package main import ( "bufio" "bytes" "fmt" "io" "strings" ) type Token int const ( // Special tokens ILLEGAL Token = iota EOF WS // Literals IDENT // fields, table name // Misc characters ASTERISK // * COMMA // , // Keywords SELECT FROM ) func isWhitespace(ch rune) bool { return ch == ' ' || ch == '\t' || ch == '\n' } func isLetter(ch rune) bool { return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') } func isDigit(ch rune) bool { return ch >= '0' && ch <= '9' } var eof = rune(0) // Scanner represents lexical scanner type Scanner struct { r *bufio.Reader } func NewScanner(r io.Reader) *Scanner { return &Scanner{r: bufio.NewReader(r)} } func (s *Scanner) read() rune { ch, _, err := s.r.ReadRune() if err != nil { return eof } return ch } func (s *Scanner) unread() { _ = s.r.UnreadRune() } // scan returns the next token and literal value func (s *Scanner) Scan() (tok Token, lit string) { // Read the next rune ch := s.read() // If we see whitespace then consume all contiguous whitespace // if we see a letter then consume and ident our reserved word if isWhitespace(ch) { s.unread() return s.scanWhitespace() } else if isLetter(ch) { s.unread() return s.scanIdent() } // Otherwise read the individual character switch ch { case eof: return EOF, "" case '*': return ASTERISK, string(ch) case ',': return COMMA, string(ch) } return ILLEGAL, string(ch) } func (s *Scanner) scanWhitespace() (tok Token, lit string) { // create a buffer and read the current character in it var buf bytes.Buffer buf.WriteRune(s.read()) // read every subsequent whitespace character into the buffer // Non-whitespace characters and EOF whill cause the loop to exit for { if ch := s.read(); ch == eof { break } else if !isWhitespace(ch) { s.unread() break } else { buf.WriteRune(ch) } } return WS, buf.String() } // scanIdent consumes current rune and all following ident runes func (s *Scanner) scanIdent() (tok Token, lit string) { // create a buffer and read the current character into it var buf bytes.Buffer buf.WriteRune(s.read()) // Read every following ident rune in the buffer // Non ident or eof will cause to exit for { if ch := s.read(); ch == eof { break } else if !isLetter(ch) && !isDigit(ch) && ch != '_' { s.unread() break } else { buf.WriteRune(ch) } } // if the string matches a keyword then return that keyword switch strings.ToUpper(buf.String()) { case "SELECT": return SELECT, buf.String() case "FROM": return FROM, buf.String() } // otherwise return as a regular identifier return IDENT, buf.String() } type Parser struct { s *Scanner buf struct { tok Token // last read token lit string // last read literal n int // buffer size (max=1) } } func NewParse(r io.Reader) *Parser { return &Parser{s: NewScanner(r)} } // scan returns the next token from the underlying scanner. // If a token has been unscanned then read that instead. func (p *Parser) scan() (tok Token, lit string) { // If we have a token on the buffer, then return it. if p.buf.n != 0 { p.buf.n = 0 return p.buf.tok, p.buf.lit } // Otherwise read the next token from the scanner. tok, lit = p.s.Scan() // Save it to the buffer in case we unscan later. p.buf.tok, p.buf.lit = tok, lit return } // unscan pushes the previously read token back onto the buffer. func (p *Parser) unscan() { p.buf.n = 1 } // scanIgnoreWhitespace scans the next non-whitespace token. func (p *Parser) scanIgnoreWhitespace() (tok Token, lit string) { tok, lit = p.scan() if tok == WS { tok, lit = p.scan() } return } func (p *Parser) Parse() (*SelectStatement, error) { stmt := &SelectStatement{} if tok, lit := p.scanIgnoreWhitespace(); tok != SELECT { return nil, fmt.Errorf("found %q, expected SELECT", lit) } for { // Read a field. tok, lit := p.scanIgnoreWhitespace() if tok != IDENT && tok != ASTERISK { return nil, fmt.Errorf("found %q, expected field", lit) } stmt.Fields = append(stmt.Fields, lit) // If the next token is not a comma then break the loop. if tok, _ := p.scanIgnoreWhitespace(); tok != COMMA { p.unscan() break } } // Next we should see the "FROM" keyword. if tok, lit := p.scanIgnoreWhitespace(); tok != FROM { return nil, fmt.Errorf("found %q, expected FROM", lit) } tok, lit := p.scanIgnoreWhitespace() if tok != IDENT { return nil, fmt.Errorf("found %q, expected table name", lit) } stmt.TableName = lit return stmt, nil } type SelectStatement struct { Fields []string TableName string } func main() { p := NewParse(strings.NewReader("select name, age from things")) stmt, err := p.Parse() fmt.Printf("Got stmt %v with error: %v", stmt, err) }
<reponame>meszaroz/rpi-multimedia-server #include "./interface/abstractexecuter.h" AbstractExecuter::AbstractExecuter(QObject *parent) : QObject(parent) { } AbstractExecuter::~AbstractExecuter() { }
/** * This implements getTime() for the national types. It lives * here so it can be shared between all the national types. * * @exception StandardException thrown on failure to convert */ protected Time nationalGetTime( Calendar cal) throws StandardException { if (isNull()) return null; SQLTime internalTime = new SQLTime( getString(), false, getLocaleFinder(), cal); return internalTime.getTime( cal); }
// Update changes the resource snapshot held by the management server, which // updates connected clients as required. func (s *ManagementServer) Update(opts UpdateOptions) error { s.version++ var listeners []types.Resource for _, l := range opts.Listeners { listeners = append(listeners, l) } snapshot := v3cache.NewSnapshot(strconv.Itoa(s.version), nil, nil, nil, listeners, nil, nil) if err := snapshot.Consistent(); err != nil { return fmt.Errorf("failed to create new resource snapshot: %v", err) } logger.Infof("Created new resource snapshot...") if err := s.cache.SetSnapshot(opts.NodeID, snapshot); err != nil { return fmt.Errorf("failed to update resource snapshot in management server: %v", err) } logger.Infof("Updated snapshot cache with resource snapshot...") return nil }
/** * {@inheritDoc} * * @throws CTKException if it was not able to export molecule into the desired output format * */ @Override public String convertMolecule(AbstractMolecule container, StType type) throws CTKException { String result = null; Molecule molecule = (Molecule) container.getMolecule(); switch (type) { case SMILES: try { result = molecule2SMILES(molecule); } catch (MolExportException e) { throw new CTKException("unable to export molecule to SMILES!", e); } break; case MOLFILE: try { result = molecule2MolFile(molecule); } catch (MolExportException e) { throw new CTKException("unable to export molecule to molfile!", e); } break; default: break; } return result; }
/// Diff0 compression, which is a diff0_compress_i64 firstly, and then a diff0_compress_u32 if it /// indeed furtherly decrease the final size. pub fn diff0_compress(diffs: Vec<i64>) -> Result<Vec<u8>, Error> { let data = diff0_compress_i64(diffs)?; let first_compressed_out_size = data.len(); let data_2nd_compressed = diff0_compress_u32(data.clone())?; if data_2nd_compressed.len() < std::mem::size_of::<u32>() * first_compressed_out_size { Ok(data_2nd_compressed) } else { // convert Vec<u32> to Vec<u8> Ok(u32_to_u8(&data)) } }
# # # 0=================================0 # | Kernel Point Convolutions | # 0=================================0 # # # ---------------------------------------------------------------------------------------------------------------------- # # Callable script to start a training on ModelNet40 dataset # # ---------------------------------------------------------------------------------------------------------------------- # # Hugues THOMAS - 06/03/2020 # # ---------------------------------------------------------------------------------------------------------------------- # # Imports and global variables # \**********************************/ # # Common libs import signal import os import numpy as np import sys import torch # Dataset from datasets.SemanticKitti import * from torch.utils.data import DataLoader from utils.config import Config from utils.tester import ModelTester from models.architectures import KPCNN, KPFCNN np.random.seed(0) torch.manual_seed(0) torch.cuda.manual_seed_all(0) # ---------------------------------------------------------------------------------------------------------------------- # # Main Call # \***************/ # def model_choice(chosen_log): ########################### # Call the test initializer ########################### # Automatically retrieve the last trained model if chosen_log in ['last_ModelNet40', 'last_ShapeNetPart', 'last_S3DIS']: # Dataset name test_dataset = '_'.join(chosen_log.split('_')[1:]) # List all training logs logs = np.sort([os.path.join('results', f) for f in os.listdir('results') if f.startswith('Log')]) # Find the last log of asked dataset for log in logs[::-1]: log_config = Config() log_config.load(log) if log_config.dataset.startswith(test_dataset): chosen_log = log break if chosen_log in ['last_ModelNet40', 'last_ShapeNetPart', 'last_S3DIS']: raise ValueError('No log of the dataset "' + test_dataset + '" found') # Check if log exists if not os.path.exists(chosen_log): raise ValueError('The given log does not exists: ' + chosen_log) return chosen_log # ---------------------------------------------------------------------------------------------------------------------- # # Main Call # \***************/ # if __name__ == '__main__': ############################### # Choose the model to visualize ############################### # Here you can choose which model you want to test with the variable test_model. Here are the possible values : # # > 'last_XXX': Automatically retrieve the last trained model on dataset XXX # > '(old_)results/Log_YYYY-MM-DD_HH-MM-SS': Directly provide the path of a trained model chosen_log = 'results/Log_2020-10-06_16-51-05' # => ModelNet40 # Choose the index of the checkpoint to load OR None if you want to load the current checkpoint chkp_idx = None # Choose to test on validation or test split on_val = True # Deal with 'last_XXXXXX' choices chosen_log = model_choice(chosen_log) ############################ # Initialize the environment ############################ # Set which gpu is going to be used GPU_ID = '0' if torch.cuda.device_count() > 1: GPU_ID = '0, 1' ############### # Previous chkp ############### # Find all checkpoints in the chosen training folder chkp_path = os.path.join(chosen_log, 'checkpoints') chkps = [f for f in os.listdir(chkp_path) if f[:4] == 'chkp'] # Find which snapshot to restore if chkp_idx is None: chosen_chkp = 'current_chkp.tar' else: chosen_chkp = np.sort(chkps)[chkp_idx] chosen_chkp = os.path.join(chosen_log, 'checkpoints', chosen_chkp) # Initialize configuration class config = Config() config.load(chosen_log) ################################## # Change model parameters for test ################################## # Change parameters for the test here. For example, you can stop augmenting the input data. config.global_fet = False config.validation_size = 200 config.input_threads = 16 config.n_frames = 4 config.n_test_frames = 4 #it should be smaller than config.n_frames if config.n_frames < config.n_test_frames: config.n_frames = config.n_test_frames config.big_gpu = True config.dataset_task = '4d_panoptic' #config.sampling = 'density' config.sampling = 'importance' config.decay_sampling = 'None' config.stride = 1 config.first_subsampling_dl = 0.061 ############## # Prepare Data ############## print() print('Data Preparation') print('****************') if on_val: set = 'validation' else: set = 'test' # Initiate dataset if config.dataset.startswith('ModelNet40'): test_dataset = ModelNet40Dataset(config, train=False) test_sampler = ModelNet40Sampler(test_dataset) collate_fn = ModelNet40Collate elif config.dataset == 'S3DIS': test_dataset = S3DISDataset(config, set='validation', use_potentials=True) test_sampler = S3DISSampler(test_dataset) collate_fn = S3DISCollate elif config.dataset == 'SemanticKitti': test_dataset = SemanticKittiDataset(config, set=set, balance_classes=False, seqential_batch=True) test_sampler = SemanticKittiSampler(test_dataset) collate_fn = SemanticKittiCollate else: raise ValueError('Unsupported dataset : ' + config.dataset) # Data loader test_loader = DataLoader(test_dataset, batch_size=1, sampler=test_sampler, collate_fn=collate_fn, num_workers=0,#config.input_threads, pin_memory=True) # Calibrate samplers test_sampler.calibration(test_loader, verbose=True) print('\nModel Preparation') print('*****************') # Define network model t1 = time.time() if config.dataset_task == 'classification': net = KPCNN(config) elif config.dataset_task in ['cloud_segmentation', 'slam_segmentation']: net = KPFCNN(config, test_dataset.label_values, test_dataset.ignored_labels) else: raise ValueError('Unsupported dataset_task for testing: ' + config.dataset_task) # Define a visualizer class tester = ModelTester(net, chkp_path=chosen_chkp) print('Done in {:.1f}s\n'.format(time.time() - t1)) print('\nStart test') print('**********\n') config.dataset_task = '4d_panoptic' # Training if config.dataset_task == 'classification': a = 1/0 elif config.dataset_task == 'cloud_segmentation': tester.cloud_segmentation_test(net, test_loader, config) elif config.dataset_task == 'slam_segmentation': tester.slam_segmentation_test(net, test_loader, config) elif config.dataset_task == '4d_panoptic': tester.panoptic_4d_test(net, test_loader, config) else: raise ValueError('Unsupported dataset_task for testing: ' + config.dataset_task)
package handler import ( "context" "fmt" "github.com/golang/protobuf/ptypes" proto "github.com/ops-cn/go-devops/proto/admin" "github.com/ops-cn/go-devops/proto/unified" "sort" "github.com/google/wire" "github.com/ops-cn/go-devops/admin/app/model" "github.com/ops-cn/go-devops/common/auth" "github.com/ops-cn/go-devops/common/errors" "github.com/ops-cn/go-devops/common/schema" "github.com/ops-cn/go-devops/common/util" ) // LoginSet 注入Login var LoginSet = wire.NewSet(wire.Struct(new(Login), "*")) // Login 登录管理 type Login struct { Auth auth.Auther UserModel model.IUser UserRoleModel model.IUserRole RoleModel model.IRole RoleMenuModel model.IRoleMenu MenuModel model.IMenu MenuActionModel model.IMenuAction } // Verify 登录验证 func (loginMgr *Login) Verify(ctx context.Context, req *proto.LoginParam, res *unified.Response) error { // 检查是否是超级用户 root := schema.GetRootUser() user := &proto.User{} if req.UserName == root.UserName && root.Password == req.Password { util.StructCopy(user, root) fmt.Println(user) res.Items, _ = ptypes.MarshalAny(user) return nil } fmt.Println(user) result, err := loginMgr.UserModel.Query(ctx, schema.UserQueryParam{ UserName: req.UserName, }) if err != nil { return err } else if len(result.Data) == 0 { return errors.ErrInvalidUserName } item := result.Data[0] if item.Password != util.SHA1HashString(req.Password) { return errors.ErrInvalidPassword } else if item.Status != 1 { return errors.ErrUserDisable } util.StructCopy(user, item) res.Items, err = ptypes.MarshalAny(user) return err } /*func (loginService *Login) CheckAndGetUser(ctx context.Context, req *proto.UserLoginInfo, res *unified.Response) error { item, err := loginService.UserModel.Get(ctx, req.UserID) if err != nil { return err } else if item == nil { return errors.ErrInvalidUser } else if item.Status != 1 { return errors.ErrUserDisable } user := &proto.User{} util.StructCopy(user, item) res.Items, _ = ptypes.MarshalAny(user) return nil }*/ func (loginMgr *Login) checkAndGetUser(ctx context.Context, userID string) (*schema.User, error) { user, err := loginMgr.UserModel.Get(ctx, userID) if err != nil { return nil, err } else if user == nil { return nil, errors.ErrInvalidUser } else if user.Status != 1 { return nil, errors.ErrUserDisable } return user, nil } // GetLoginInfo 获取当前用户登录信息 func (loginMgr *Login) GetLoginInfo(ctx context.Context, req *proto.UserLoginInfo, res *unified.Response) error { if isRoot := schema.CheckIsRootUser(ctx, req.UserID); isRoot { root := schema.GetRootUser() loginInfo := &proto.UserLoginInfo{ UserName: root.UserName, RealName: root.RealName, } res.Items, _ = ptypes.MarshalAny(loginInfo) return nil } user, err := loginMgr.checkAndGetUser(ctx, req.UserID) if err != nil { return err } info := &proto.UserLoginInfo{ UserID: user.ID, UserName: user.UserName, RealName: user.RealName, } userRoleResult, err := loginMgr.UserRoleModel.Query(ctx, schema.UserRoleQueryParam{ UserID: req.UserID, }) if err != nil { return err } if roleIDs := userRoleResult.Data.ToRoleIDs(); len(roleIDs) > 0 { roleResult, err := loginMgr.RoleModel.Query(ctx, schema.RoleQueryParam{ IDs: roleIDs, Status: 1, }) if err != nil { return err } var roles []*proto.Role for _, v := range roleResult.Data { role := &proto.Role{} util.StructCopy(role, v) roles = append(roles, role) } info.Roles = roles } res.Items, err = ptypes.MarshalAny(info) return err } // QueryUserMenuTree 查询当前用户的权限菜单树 func (loginMgr *Login) QueryUserMenuTree(ctx context.Context, req *proto.UserLoginInfo, res *unified.Response) error { var menuTrees schema.MenuTrees isRoot := schema.CheckIsRootUser(ctx, req.UserID) // 如果是root用户,则查询所有显示的菜单树 if isRoot { result, err := loginMgr.MenuModel.Query(ctx, schema.MenuQueryParam{ Status: 1, }, schema.MenuQueryOptions{ OrderFields: schema.NewOrderFields(schema.NewOrderField("sequence", schema.OrderByDESC)), }) if err != nil { return err } menuActionResult, err := loginMgr.MenuActionModel.Query(ctx, schema.MenuActionQueryParam{}) if err != nil { return err } menuTrees = result.Data.FillMenuAction(menuActionResult.Data.ToMenuIDMap()).ToTree() //return nil } else { userRoleResult, err := loginMgr.UserRoleModel.Query(ctx, schema.UserRoleQueryParam{ UserID: req.UserID, }) if err != nil { return err } else if len(userRoleResult.Data) == 0 { return errors.ErrNoPerm } roleMenuResult, err := loginMgr.RoleMenuModel.Query(ctx, schema.RoleMenuQueryParam{ RoleIDs: userRoleResult.Data.ToRoleIDs(), }) if err != nil { return err } else if len(roleMenuResult.Data) == 0 { return errors.ErrNoPerm } menuResult, err := loginMgr.MenuModel.Query(ctx, schema.MenuQueryParam{ IDs: roleMenuResult.Data.ToMenuIDs(), Status: 1, }) if err != nil { return err } else if len(menuResult.Data) == 0 { return errors.ErrNoPerm } mData := menuResult.Data.ToMap() var qIDs []string for _, pid := range menuResult.Data.SplitParentIDs() { if _, ok := mData[pid]; !ok { qIDs = append(qIDs, pid) } } if len(qIDs) > 0 { pmenuResult, err := loginMgr.MenuModel.Query(ctx, schema.MenuQueryParam{ IDs: menuResult.Data.SplitParentIDs(), }) if err != nil { return err } menuResult.Data = append(menuResult.Data, pmenuResult.Data...) } sort.Sort(menuResult.Data) menuActionResult, err := loginMgr.MenuActionModel.Query(ctx, schema.MenuActionQueryParam{ IDs: roleMenuResult.Data.ToActionIDs(), }) if err != nil { return err } menuTrees = menuResult.Data.FillMenuAction(menuActionResult.Data.ToMenuIDMap()).ToTree() } var mTreesPB = &proto.MenuTrees{} //var mTrees []*proto.MenuTree for _, v := range menuTrees { mTree := &proto.MenuTree{} util.StructCopy(mTree, v) if v.Actions != nil { for _, action := range v.Actions { mAction := &proto.MenuAction{} util.StructCopy(mAction, action) mTree.Actions = append(mTree.Actions, mAction) } } if v.Children != nil { for _, child := range *v.Children { tree := &proto.MenuTree{} util.StructCopy(tree, child) mTree.Children = append(mTree.Children, tree) } } mTreesPB.MenuTree = append(mTreesPB.MenuTree, mTree) } res.Items, _ = ptypes.MarshalAny(mTreesPB) return nil } // UpdatePassword 更新当前用户登录密码 func (loginMgr *Login) UpdatePassword(ctx context.Context, req *proto.UpdatePasswordParam, res *unified.Response) error { if schema.CheckIsRootUser(ctx, req.UserID) { return errors.New400Response("root用户不允许更新密码") } user, err := loginMgr.checkAndGetUser(ctx, req.UserID) if err != nil { return err } else if util.SHA1HashString(req.OldPassword) != user.Password { return errors.New400Response("旧密码不正确") } req.NewPassword = util.<PASSWORD>String(req.NewPassword) err = loginMgr.UserModel.UpdatePassword(ctx, req.UserID, req.NewPassword) return err }
/** * It represents a version and allows getting access to its information parts. The Representer can be created from text representation of a version or through providing version fields directly. * Format of version is : {prefix-}?{ddd.ddd.ddd...ddd}?{-postfix}? * * @since 1.0.0 */ public final class Version implements Comparable<Version>, Serializable { private static final Pattern EXTRACTOR = Pattern.compile("^([^\\d.]+)-|\\.?([\\d]+)|-?(.*)$"); private static final long serialVersionUID = -4409642391893263592L; private final long[] numericParts; private final String prefix; private final String postfix; private final int hash; /** * Create instance from a string. * * @param string text representation of version * @since 1.0.0 */ public Version(final String string) { if (string == null) { this.prefix = ""; this.postfix = ""; this.numericParts = new long[0]; } else { final List<Long> detectedDigits = new ArrayList<>(); final Matcher matcher = EXTRACTOR.matcher(string.trim()); String tail = ""; String start = ""; while (matcher.find()) { final String grpStart = matcher.group(1); final String grpNum = matcher.group(2); final String grpTail = matcher.group(3); if (grpStart != null) { start = grpStart; } else if (grpNum != null) { detectedDigits.add(Long.parseLong(grpNum)); } else if (grpTail != null) { tail = grpTail; break; } } this.numericParts = new long[detectedDigits.size()]; for (int i = 0; i < detectedDigits.size(); i++) { this.numericParts[i] = detectedDigits.get(i); } this.postfix = tail; this.prefix = start; } this.hash = this.toString().hashCode(); } /** * Create version based only on numeric parts. Both Prefix and postfix are empty. * * @param parts numeric parts of the version, it can be null * @since 1.0.0 */ public Version(final long... parts) { this(null, parts, null); } /** * Create version from provided components. * * @param prefix the prefix, it can be null * @param numericParts the numeric parts, it can be null * @param postfix the postfix, it can be null * @since 1.0.0 */ public Version(final String prefix, final long[] numericParts, final String postfix) { this.prefix = prefix == null ? "" : prefix.trim(); this.postfix = postfix == null ? "" : postfix.trim(); this.numericParts = new long[numericParts == null ? 0 : numericParts.length]; if (numericParts != null) { for (int i = 0; i < numericParts.length; i++) { this.numericParts[i] = Math.abs(numericParts[i]); } } this.hash = this.toString().hashCode(); } /** * Create copy of version with changed prefix. * * @param prefix the new prefix, it can be null * @return copy of the version with replaced prefix * @since 1.0.0 */ public Version changePrefix(final String prefix) { return new Version(prefix, this.numericParts, this.postfix); } /** * Create copy of version with changed postfix. * * @param postfix the new postfix, it can be null * @return copy of the version with replaced postfix * @since 1.0.0 */ public Version changePostfix(final String postfix) { return new Version(this.prefix, this.numericParts, postfix); } /** * Create copy of version with changed numeric parts. * * @param numericParts the new numeric parts, it can be null * @return copy of the version with replaced numeric parts * @since 1.0.0 */ public Version changeNumeric(final long... numericParts) { return new Version(this.prefix, numericParts, this.postfix); } /** * Get numeric part of version at position. * * @param position the position of needed numeric part. * @return the numeric part, if it is not presented then 0 will be returned * @since 1.0.0 */ public long getNumericPartAtPosition(final int position) { return position < 0 || position >= this.numericParts.length ? 0L : this.numericParts[position]; } /** * Get the prefix part. * * @return the prefix, it is not null * @since 1.0.0 */ public String getPrefix() { return this.prefix; } /** * Get the postfix part. * * @return the postfix part, it is not null * @since 1.0.0 */ public String getPostfix() { return this.postfix; } /** * Get the major (first) numeric element of version. * * @return the first element of version * @since 1.0.0 */ public long getMajor() { return this.getNumericPartAtPosition(0); } /** * Get the minor (second) numeric element of version. * * @return the second element of version. * @since 1.0.0 */ public long getMinor() { return this.getNumericPartAtPosition(1); } /** * Get the micro (third) numeric element of version. * * @return the third element of version. * @since 1.0.0 */ public long getMicro() { return this.getNumericPartAtPosition(2); } @Override public int hashCode() { return this.hash; } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } boolean result = false; if (obj instanceof Version) { final Version that = (Version) obj; result = this.prefix.equals(that.prefix) && Arrays.equals(this.numericParts, that.numericParts) && this.postfix.equals(that.postfix); } return result; } @Override public String toString() { final StringBuilder result = new StringBuilder(); if (!this.prefix.isEmpty()) { result.append(this.prefix); } if (this.numericParts.length > 0) { if (result.length() > 0) { result.append('-'); } boolean nofirst = false; for (final long i : this.numericParts) { if (nofirst) { result.append('.'); } result.append(i); nofirst = true; } } if (!this.postfix.isEmpty()) { if (result.length() > 0 && result.charAt(result.length() - 1) != '-') { result.append('-'); } result.append(this.postfix); } return result.toString(); } @Override public int compareTo(final Version version) { final long[] thatNumbers = version.numericParts; final int comparePrefix = this.prefix.compareTo(version.prefix); if (comparePrefix != 0) { return comparePrefix; } final int maxnum = Math.max(this.numericParts.length, thatNumbers.length); for (int i = 0; i < maxnum; i++) { final long x = i < this.numericParts.length ? this.numericParts[i] : 0L; final long y = i < thatNumbers.length ? thatNumbers[i] : 0L; final int result = Long.compare(x, y); if (result != 0) { return result; } } return this.postfix.compareTo(version.postfix); } /** * Check is there any numeric part in the version. * * @return true if there is numeric part, false otherwise. * @since 1.0.0 */ public boolean isNumericPartPresented() { return this.numericParts.length > 0; } }
/// Drops every node in the tree, essentially invalidating it pub fn destroy_tree(&mut self) { let root_ix = self.tree.root_ix(); let mut nodes = self.tree.all_descendants_of(root_ix); nodes.sort_by(|a, b| b.cmp(a)); for node in nodes { self.tree.remove(node); } self.unset_active_container(); }
<gh_stars>10-100 # # Using the standard diet problem to demonstrate the ticdat principles of Tidy, Tested, Safe. # Please refer to the following for an introduction. # https://github.com/ticdat/ticdat/wiki/1-Beginner-ticdat-intro # # This file is training material meant to guide you towards a well organized GitHub repository. # https://github.com/ticdat/tts_diet # # Implement core functionality needed to achieve modularity. # 1. Define the input data schema # 2. Define the output data schema # 3. Create a solve function that accepts a data set consistent with the input # schema and (if possible) returns a data set consistent with the output schema. # # Provides command line interface via ticdat.standard_main # For example, typing # python diet.py -i diet_sample_data -o diet_solution_data # will read from a model stored in the directory diet_sample_data and write the solution # to a directory called diet_solution_data. These data directories contain .csv files. # this version of the file uses Gurobi try: # if you don't have gurobipy installed, the code will still load and then fail on solve import gurobipy as gp except: gp = None from ticdat import TicDatFactory, standard_main # ------------------------ define the input schema -------------------------------- # There are three input tables, with 4 primary key fields and 4 data fields. input_schema = TicDatFactory ( categories=[["Name"], ["Min Nutrition", "Max Nutrition"]], foods=[["Name"], ["Cost"]], nutrition_quantities=[["Food", "Category"], ["Quantity"]]) # Define the foreign key relationships input_schema.add_foreign_key("nutrition_quantities", "foods", ["Food", "Name"]) input_schema.add_foreign_key("nutrition_quantities", "categories", ["Category", "Name"]) # Define the data types input_schema.set_data_type("categories", "Min Nutrition", min=0, max=float("inf"), inclusive_min=True, inclusive_max=False) input_schema.set_data_type("categories", "Max Nutrition", min=0, max=float("inf"), inclusive_min=True, inclusive_max=True) input_schema.set_data_type("foods", "Cost", min=0, max=float("inf"), inclusive_min=True, inclusive_max=False) input_schema.set_data_type("nutrition_quantities", "Quantity", min=0, max=float("inf"), inclusive_min=True, inclusive_max=False) # We also want to insure that Max Nutrition doesn't fall below Min Nutrition input_schema.add_data_row_predicate( "categories", predicate_name="Min Max Check", predicate=lambda row : row["Max Nutrition"] >= row["Min Nutrition"]) # The default-default of zero makes sense everywhere except for Max Nutrition input_schema.set_default_value("categories", "Max Nutrition", float("inf")) # --------------------------------------------------------------------------------- # ------------------------ define the output schema ------------------------------- # There are three solution tables, with 3 primary key fields and 3 data fields. solution_schema = TicDatFactory( parameters=[["Parameter"], ["Value"]], buy_food=[["Food"], ["Quantity"]], consume_nutrition=[["Category"], ["Quantity"]]) # --------------------------------------------------------------------------------- # ------------------------ create a solve function -------------------------------- def solve(dat): """ core solving routine :param dat: a good ticdat for the input_schema :return: a good ticdat for the solution_schema, or None """ assert input_schema.good_tic_dat_object(dat) assert not input_schema.find_foreign_key_failures(dat) assert not input_schema.find_data_type_failures(dat) assert not input_schema.find_data_row_failures(dat) if gp is None: # even if you don't have gurobipy installed, you can still import this file for other uses print("*****\ngurobipy needs to be installed for this example code to solve!\n*****\n") mdl = gp.Model("diet") nutrition = {c: mdl.addVar(lb=n["Min Nutrition"], ub=n["Max Nutrition"], name=c) for c, n in dat.categories.items()} # Create decision variables for the foods to buy buy = {f: mdl.addVar(name=f) for f in dat.foods} # Nutrition constraints for c in dat.categories: mdl.addConstr(gp.quicksum(dat.nutrition_quantities[f, c]["Quantity"] * buy[f] for f in dat.foods) == nutrition[c], name=c) mdl.setObjective(gp.quicksum(buy[f] * c["Cost"] for f, c in dat.foods.items()), sense=gp.GRB.MINIMIZE) mdl.optimize() if mdl.status == gp.GRB.OPTIMAL: sln = solution_schema.TicDat() for f,x in buy.items(): if x.x > 0: sln.buy_food[f] = x.x for c,x in nutrition.items(): sln.consume_nutrition[c] = x.x sln.parameters['Total Cost'] = sum(dat.foods[f]["Cost"] * r["Quantity"] for f, r in sln.buy_food.items()) return sln # --------------------------------------------------------------------------------- # ------------------------ provide stand-alone functionality ---------------------- # when run from the command line, will read/write json/xls/csv/db/sql/mdb files if __name__ == "__main__": standard_main(input_schema, solution_schema, solve) # ---------------------------------------------------------------------------------
#ifndef FWCore_Framework_PreallocationConfiguration_h #define FWCore_Framework_PreallocationConfiguration_h // -*- C++ -*- // // Package: FWCore/Framework // Class : PreallocationConfiguration // /**\class edm::PreallocationConfiguration PreallocationConfiguration.h "PreallocationConfiguration.h" Description: Holds number of simultaneous Streams, LuminosityBlocks and Runs the job will allow. Usage: <usage> */ // // Original Author: Chris Jones // Created: Sun, 11 Aug 2013 19:27:57 GMT // // system include files // user include files // forward declarations namespace edm { class PreallocationConfiguration { public: PreallocationConfiguration() : PreallocationConfiguration(1, 1, 1, 1) {} PreallocationConfiguration(unsigned int iNThreads, unsigned int iNStreams, unsigned int iNLumis, unsigned int iNRuns) : m_nthreads(iNThreads), m_nStreams(iNStreams), m_nLumis(iNLumis), m_nRuns(iNRuns) {} // ---------- const member functions --------------------- unsigned int numberOfThreads() const { return m_nthreads; } unsigned int numberOfStreams() const { return m_nStreams; } unsigned int numberOfLuminosityBlocks() const { return m_nLumis; } unsigned int numberOfRuns() const { return m_nRuns; } private: //PreallocationConfiguration(const PreallocationConfiguration&) = delete; // stop default //const PreallocationConfiguration& operator=(const PreallocationConfiguration&) = delete; // stop default // ---------- member data -------------------------------- unsigned int m_nthreads; unsigned int m_nStreams; unsigned int m_nLumis; unsigned int m_nRuns; }; } // namespace edm #endif
<gh_stars>0 import * as React from "react"; import {appStateService, getStageById} from "../../state/AppStateService"; import {StageComponent} from "./StageComponent"; import {appStateStore} from "../../state/AppStateStore"; import {LoadingComponent} from "../common/LoadingComponent"; import {StageLocked} from "./StageLocked"; import {KillerStageComponent} from "./KillerStageComponent"; type StageStage = { stage: Stage, loading: boolean, available: boolean }; export class StageContainerComponent extends React.Component<any, StageStage> { private _changeListener: (p: AppState) => void; constructor(props: any, context: any) { super(props, context); const state: AppState = appStateService.getAppState(); this.state = this.getStageByAppState(state); } _onChange(appState: AppState) { this.setState(this.getStageByAppState(appState)); } componentWillMount() { this._changeListener = this._onChange.bind(this); appStateStore.addChangeListener(this._changeListener); appStateService.updateState(); } componentWillUnmount() { appStateStore.removeChangeListener(this._changeListener); this._changeListener = null; } private getStageByAppState(state: AppState): StageStage { if (!state) { return { loading: true, available: false, stage: null }; } else { const id: string = this.getStageId(); if (id) { const stage = getStageById(state, id); if (stage) { return { stage, loading: false, available: true } } } } return { available: false, loading: false, stage: null } } getStageId() { return (this.props as any).params.id; } render() { const state = this.state; if (state) { if (false === state.available) { return <StageLocked /> } const stage = state.stage; if (stage) { if (stage.status == StageStatus.LOCKED) { return <StageLocked /> } return this.createStageComponent(stage); } } return <LoadingComponent/>; } createStageComponent(stage: Stage) { if (stage.status == StageStatus.KILLER || stage.status == StageStatus.KILLER_COMPLETED) { return <KillerStageComponent stage={stage}/>; } return <StageComponent stage={stage}/>; } }
<filename>src/components/Sections/RefillSettings.tsx import React, { useEffect } from "react" import { iRefillFuelTypes, iSectionsProps } from "../../constants/interfaces" import { CLASSES } from "../../css/classes" import { refillFuelTypes, refillFuelTypesHuman } from "../../constants/constants" import TOOLTIPS from "../../constants/tooltips" import ReactTooltip from "react-tooltip" export default function RefillSettings(props: iSectionsProps): JSX.Element { useEffect(() => { // Rebuild tooltips on dynamic changes ReactTooltip.rebuild() }) const fuelTypesHtml = ( <select className={CLASSES.selectElement} hidden={!props.userSettings.refillEnabled} value={props.userSettings.refillFuelType} onChange={(e) => { props.setUserSettings({ ...props.userSettings, refillFuelType: e.target.value as iRefillFuelTypes, }) }} > {refillFuelTypes.map((fuelType) => { return ( <option className={CLASSES.optionElement} key={fuelType} value={fuelType}> {refillFuelTypesHuman[fuelType]} </option> ) })} </select> ) return ( <div className={CLASSES.section}> <div className={CLASSES.gridSection}> <input className={CLASSES.checkboxElement} type={"checkbox"} id={"refillEnabled"} checked={props.userSettings.refillEnabled} data-tip={TOOLTIPS.refillEnabled} onChange={(e) => { props.setUserSettings({ ...props.userSettings, refillEnabled: e.target.checked, }) }} /> <label className={CLASSES.labelElement} htmlFor={"refillEnabled"}> Refill at this Station? </label> {fuelTypesHtml} <label className={CLASSES.labelElement} hidden={!props.userSettings.refillEnabled} htmlFor={"fuelTypes"} > Fuel Type </label> <input type={"number"} className={CLASSES.inputTextElement} hidden={!props.userSettings.refillEnabled} value={props.userSettings.refillFuelAmount} min={"0"} onChange={(e) => { props.setUserSettings({ ...props.userSettings, refillFuelAmount: e.target.value, }) }} /> <label className={CLASSES.labelElement} hidden={!props.userSettings.refillEnabled} htmlFor={"fuelAmount"} > Amount </label> </div> </div> ) }
link ../../../IQKeyboardManager/IQKeyBoardManager/IQToolbar/IQToolbar.h
<filename>web/socket.cpp #include <web/web.hpp> #include <netdb.h> #include <mbedtls/net_sockets.h> #include <mbedtls/debug.h> #include <mbedtls/ssl.h> #include <mbedtls/entropy.h> #include <mbedtls/ctr_drbg.h> #include <mbedtls/error.h> #if !defined(_WIN32) #define closesocket close typedef int SOCKET; #endif struct SocketInternal { bool secure; int64_t timeout_ms; SOCKET fd_insecure; mbedtls_x509_crt ca_cert; mbedtls_net_context ctx; mbedtls_entropy_context entropy; mbedtls_ctr_drbg_context ctr_drbg; mbedtls_ssl_context ssl; mbedtls_ssl_config conf; mbedtls_pk_context pkey; // for server only }; void Socket::set_timeout(int64_t t) { intern->timeout_ms = t; } Socket::Socket(std::nullptr_t n) { } Socket::Socket(Role role) { /// intern = std::shared_ptr<SocketInternal>(new SocketInternal); SocketInternal &i = *(intern.get()); memset(&i, 0, sizeof(SocketInternal)); intern->secure = !is_debug(); cchar_t *pers = "ion:web"; mbedtls_debug_set_threshold(3); mbedtls_net_init (&i.ctx); mbedtls_ssl_init (&i.ssl); mbedtls_ssl_config_init (&i.conf); mbedtls_x509_crt_init (&i.ca_cert); mbedtls_pk_init (&i.pkey); mbedtls_ctr_drbg_init (&i.ctr_drbg); mbedtls_entropy_init (&i.entropy); if (mbedtls_ctr_drbg_seed(&i.ctr_drbg, mbedtls_entropy_func, &i.entropy, (const unsigned char *)pers, strlen(pers)) != 0) { std::cerr << " failed: mbedtls_ctr_drbg_seed\n"; exit(1); } /// [generate certs in orbit] Path cert_path = (role == Server) ? "trust/server.cer" : "trust"; auto certs = cert_path.matching({".cer"}); for (auto &p: certs) { std::string str = p; int r = mbedtls_x509_crt_parse_file(&i.ca_cert, str.c_str()); if (r != 0) { std::cerr << "mbedtls_x509_crt_parse_file failure: " << str << std::endl; exit(1); } } if (role == Server) { Path key = "server.key"; if (key.exists()) { console.assertion(key.exists(), "server key: {0} does not exist", { path_t(key) }); /// std::string p = key; int r = mbedtls_pk_parse_keyfile(&i.pkey, p.c_str(), nullptr, mbedtls_ctr_drbg_random, &i.ctr_drbg); if (r != 0) { std::cerr << "mbedtls_x509_crt_parse_file failure: " << r << std::endl; exit(1); } } } /// debug-based param #if defined(NDEBUG) mbedtls_ssl_conf_authmode(&i.conf, MBEDTLS_SSL_VERIFY_REQUIRED); #else mbedtls_ssl_conf_authmode(&i.conf, MBEDTLS_SSL_VERIFY_OPTIONAL); #endif } Socket &Socket::operator=(const Socket &ref) { if (this != &ref) { uri = ref.uri; intern = ref.intern; connected = ref.connected; } return *this; } Socket:: Socket(const Socket &ref) : uri(ref.uri), intern(ref.intern), connected(ref.connected) { } Socket::~Socket() { } int Socket::read_raw(cchar_t *v, size_t sz) { SocketInternal &i = *intern; int r = i.secure ? mbedtls_ssl_read(&i.ssl, (unsigned char *)v, sz) : recv(i.fd_insecure, (void *)v, sz, 0); return r; } bool Socket::read(cchar_t *v, size_t sz) { SocketInternal &i = *intern; int st = 0; /// for (int len = sz; len > 0;) { int r = i.secure ? mbedtls_ssl_read(&i.ssl, (unsigned char *)&v[st], len) : recv(i.fd_insecure, (void *)&v[st], len, 0); if (r <= 0) return false; len -= r; st += r; } return true; } array<char> Socket::read_until(str s, int max_len) { auto rbytes = array<char>(max_len); size_t slen = s.size(); /// for (;;) { rbytes += '\0'; size_t sz = rbytes.size(); if (!read(&rbytes[sz - 1], 1)) return array<char> { }; if (sz >= slen and memcmp(&rbytes[sz - slen], s.cstr(), slen) == 0) break; if (sz == max_len) return array<char> { }; } return rbytes; } bool Socket::write(cchar_t *v, size_t sz, int flags) { SocketInternal &i = *intern; size_t tlen = sz; size_t len = tlen; size_t ibuf = 0; assert(strlen(v) == sz); /// write some bytes for (;;) { auto wlen = i.secure ? mbedtls_ssl_write(&i.ssl, (const unsigned char *)&v[ibuf], len) : send(i.fd_insecure, &v[ibuf], len, 0); if (wlen < 0) return false; ibuf += wlen; len -= wlen; if (len <= 0) break; assert(ibuf <= tlen); } return true; } bool Socket::write(array<char> &v) { return write(v.data(), v.size(), 0); } bool Socket::write(str s, array<var> a) { str f = var::format(s, a); return write(f.cstr(), f.size(), 0); } bool Socket::write(var &v) { if (v == Type::Str) return write(str(v)); assert(false); // todo return true; } void Socket::close() const { SocketInternal &i = *intern; if (i.secure) { mbedtls_net_close (&i.ctx); mbedtls_net_free (&i.ctx); mbedtls_ssl_free (&i.ssl); mbedtls_ssl_config_free(&i.conf); mbedtls_ctr_drbg_free (&i.ctr_drbg); mbedtls_entropy_free (&i.entropy); mbedtls_pk_free (&i.pkey); } else ::closesocket(i.fd_insecure); } void Socket::logging(void *ctx, int level, cchar_t *file, int line, cchar_t *str) { fprintf((FILE *)ctx, "%s:%04d: %s", file, line, str); } Socket Socket::connect(URI uri, Trust trust_level) { URI a = uri.methodize(URI::Method::Get); str qy = a.query; Socket s = null; str p = a.proto; str h = a.host; int port = a.port; /// if (p == "https") { s = Socket(Client); SocketInternal &i = *(s.intern); string s_port = string(port); if (mbedtls_net_connect( &i.ctx, h.cstr(), s_port.cstr(), MBEDTLS_NET_PROTO_TCP) != 0) return null; if (mbedtls_ssl_config_defaults( &i.conf, MBEDTLS_SSL_IS_CLIENT, MBEDTLS_SSL_TRANSPORT_STREAM, MBEDTLS_SSL_PRESET_DEFAULT) != 0) return null; /// set verification auth mode mbedtls_ssl_conf_ca_chain(&i.conf, &i.ca_cert, NULL); mbedtls_ssl_conf_rng(&i.conf, mbedtls_ctr_drbg_random, &i.ctr_drbg); mbedtls_ssl_conf_dbg(&i.conf, logging, stdout); /// bind who we are; this will be tied into keys associated at installation time of station /// simply to be made at that point, and instructions given for storage /// (outside of repo, or git-ignored) //if (mbedtls_ssl_conf_own_cert(&i.conf, &i.ca_cert, &i.pkey) != 0) // return null; if (mbedtls_ssl_setup(&i.ssl, &i.conf) != 0) return null; if (mbedtls_ssl_set_hostname(&i.ssl, h.cstr()) != 0) return null; mbedtls_ssl_set_bio(&i.ssl, &i.ctx, mbedtls_net_send, mbedtls_net_recv, NULL); /// handshake for (;;) { int hs = mbedtls_ssl_handshake(&i.ssl); if (hs == 0) break; if (hs != MBEDTLS_ERR_SSL_WANT_READ && hs != MBEDTLS_ERR_SSL_WANT_WRITE) { console.log("connect: mbedtls_ssl_handshake: {0}", { hs }); return null; } } #if defined(NDEBUG) /// verify server ca x509; exit if flags != 0 int flags = mbedtls_ssl_get_verify_result(&i.ssl); if (flags != 0) { console.log("connect: trust check failure, disconnecting"); return null; } else console.log("connect: trusted, connected"); #else console.log("connect: trust check bypass (debug-build)"); #endif } else { std::cerr << "unsupported proto: " << p.cstr() << "\n"; assert(false); } s.uri = a; s.connected = true; return s; } /// listen on https using mbedtls async Socket::listen(URI url, std::function<void(Socket)> fn) { return async(1, [&, url=url, fn=fn](auto process, int t_index) -> var { URI uri = url.methodize(URI::Method::Get); auto sc_listen = Socket(Server); /// remove the interfacing in this case auto &i = *(sc_listen.intern); /// state the truth assert(uri.proto() == "https"); /// start listening on adapter address over port (443 = https default but it can be any) if (mbedtls_net_bind(&i.ctx, uri.host().cstr(), std::to_string(uri.port()).c_str(), MBEDTLS_NET_PROTO_TCP) != 0) return false; /// boilerplate thing for initialization of a conf struct. i like its boilers. if (mbedtls_ssl_config_defaults(&i.conf, MBEDTLS_SSL_IS_SERVER, MBEDTLS_SSL_TRANSPORT_STREAM, MBEDTLS_SSL_PRESET_DEFAULT) != 0) return false; /// more initialization of the random number gen. mbedtls_ssl_conf_rng(&i.conf, mbedtls_ctr_drbg_random, &i.ctr_drbg); /// direct logging to standard out, worf. aye sir. mbedtls_ssl_conf_dbg(&i.conf, logging, stdout); /// bind certificate authority chain (i think) mbedtls_ssl_conf_ca_chain(&i.conf, &i.ca_cert, NULL); /// bind who we are. if (mbedtls_ssl_conf_own_cert(&i.conf, &i.ca_cert, &i.pkey) != 0) return false; for (;;) { char cip[64] = { 0 }; size_t cip_len = 0; /// create client socket first (not used to this!) auto sc_client = Socket(Server); auto &ci = *(sc_client.intern); /// setup ssl with the configuration of the accepting socket if (mbedtls_ssl_setup(&ci.ssl, &i.conf) != 0) return false; console.log("server: accepting new connection...\n"); /// the following should never block the acceptance thread for any undesirable time /// accept connection if (mbedtls_net_accept(&i.ctx, &ci.ctx, cip, sizeof(cip), &cip_len) != 0) return false; /// tell it where the tubing is mbedtls_ssl_set_bio(&ci.ssl, &ci.ctx, mbedtls_net_send, mbedtls_net_recv, NULL); /// handshake through said tubes for (;;) { int hs = mbedtls_ssl_handshake(&ci.ssl); /// if (hs == 0) break; /// if (hs != MBEDTLS_ERR_SSL_WANT_READ && hs != MBEDTLS_ERR_SSL_WANT_WRITE) { console.log("server: abort during handshake"); return false; } } /// spawn thread for the given callback async(1, [&, sc_client=sc_client, uri=uri, fn=fn](auto process, int t_index) -> var { fn(sc_client); sc_client.close(); return true; }); } return true; }); }
/** * Handler method for standard messages used by simplebot */ void SimpleMsgHandler::handleMessage(BasicNetworkMsg *msg) { if (node->isActive()) { if (msg->getType() == NL_REQ) { handleNLReq(msg); } else if (msg->getType() == NL_RESP) { handleNLResp(msg); } else if (msg->getType() == PING) { handlePingMsg(msg); } else if (msg->getType() == PONG) { handlePongMsg(msg); } else if (msg->getType() == CMD_REQ) { handleCmdReq(msg); } else if (msg->getType() == CMD_REP) { handleCmdRep(msg); } } }
import numpy as np import torch from torch.distributions import OneHotCategorical from offpolicy.algorithms.r_maddpg.algorithm.r_actor_critic import R_MADDPG_Actor, R_MADDPG_Critic from offpolicy.algorithms.r_matd3.algorithm.r_actor_critic import R_MATD3_Actor, R_MATD3_Critic from offpolicy.utils.util import is_discrete, is_multidiscrete, get_dim_from_space, DecayThenFlatSchedule, soft_update, hard_update, \ gumbel_softmax, onehot_from_logits, gaussian_noise, avail_choose, to_numpy from offpolicy.algorithms.base.recurrent_policy import RecurrentPolicy class R_MADDPGPolicy(RecurrentPolicy): """ Recurrent MADDPG/MATD3 Policy Class to wrap actor/critic and compute actions. See parent class for details. :param config: (dict) contains information about hyperparameters and algorithm configuration :param policy_config: (dict) contains information specific to the policy (obs dim, act dim, etc) :param target_noise: (int) std of target smoothing noise to add for MATD3 (applies only for continuous actions) :param td3: (bool) whether to use MATD3 or MADDPG. :param train: (bool) whether the policy will be trained. """ def __init__(self, config, policy_config, target_noise=None, td3=False, train=True): self.config = config self.device = config['device'] self.args = self.config["args"] self.tau = self.args.tau self.lr = self.args.lr self.opti_eps = self.args.opti_eps self.weight_decay = self.args.weight_decay self.prev_act_inp = self.args.prev_act_inp self.central_obs_dim, self.central_act_dim = policy_config["cent_obs_dim"], policy_config["cent_act_dim"] self.obs_space = policy_config["obs_space"] self.obs_dim = get_dim_from_space(self.obs_space) self.act_space = policy_config["act_space"] self.act_dim = get_dim_from_space(self.act_space) self.output_dim = sum(self.act_dim) if isinstance(self.act_dim, np.ndarray) else self.act_dim self.hidden_size = self.args.hidden_size self.discrete = is_discrete(self.act_space) self.multidiscrete = is_multidiscrete(self.act_space) actor_class = R_MATD3_Actor if td3 else R_MADDPG_Actor critic_class = R_MATD3_Critic if td3 else R_MADDPG_Critic self.actor = actor_class(self.args, self.obs_dim, self.act_dim, self.device, take_prev_action=self.prev_act_inp) self.critic = critic_class(self.args, self.central_obs_dim, self.central_act_dim, self.device) self.target_actor = actor_class(self.args, self.obs_dim, self.act_dim, self.device, take_prev_action=self.prev_act_inp) self.target_critic = critic_class(self.args, self.central_obs_dim, self.central_act_dim, self.device) # sync the target weights self.target_actor.load_state_dict(self.actor.state_dict()) self.target_critic.load_state_dict(self.critic.state_dict()) if train: self.actor_optimizer = torch.optim.Adam(self.actor.parameters(), lr=self.lr, eps=self.opti_eps, weight_decay=self.weight_decay) self.critic_optimizer = torch.optim.Adam(self.critic.parameters(), lr=self.lr, eps=self.opti_eps, weight_decay=self.weight_decay) if self.discrete: # eps greedy exploration self.exploration = DecayThenFlatSchedule(self.args.epsilon_start, self.args.epsilon_finish, self.args.epsilon_anneal_time, decay="linear") self.target_noise = target_noise def get_actions(self, obs, prev_actions, rnn_states, available_actions=None, t_env=None, explore=False, use_target=False, use_gumbel=False): """ See parent class. :param use_target: (bool) whether to use the target actor or live actor. :param use_gumbel: (bool) whether to apply gumbel softmax on the actions. """ assert prev_actions is None or len(obs.shape) == len(prev_actions.shape) # obs is either an array of shape (batch_size, obs_dim) or (seq_len, batch_size, obs_dim) if len(obs.shape) == 2: batch_size = obs.shape[0] no_sequence = True else: batch_size = obs.shape[1] no_sequence = False eps = None if use_target: actor_out, new_rnn_states = self.target_actor(obs, prev_actions, rnn_states) else: actor_out, new_rnn_states = self.actor(obs, prev_actions, rnn_states) if self.discrete: if self.multidiscrete: if use_gumbel or (use_target and self.target_noise is not None): onehot_actions = list(map(lambda a: gumbel_softmax(a, hard=True, device=self.device), actor_out)) actions = torch.cat(onehot_actions, dim=-1) elif explore: onehot_actions = list(map(lambda a: gumbel_softmax(a, hard=True, device=self.device), actor_out)) onehot_actions = torch.cat(onehot_actions, dim=-1) assert no_sequence, "Doesn't make sense to do exploration on a sequence!" # eps greedy exploration eps = self.exploration.eval(t_env) rand_numbers = np.random.rand(batch_size, 1) take_random = (rand_numbers < eps).astype(int).reshape(-1, 1) # random actions sample uniformly from action space random_actions = [OneHotCategorical(logits=torch.ones(batch_size, self.act_dim[i])).sample() for i in range(len(self.act_dim))] random_actions = torch.cat(random_actions, dim=1) actions = (1 - take_random) * to_numpy(onehot_actions) + take_random * to_numpy(random_actions) else: onehot_actions = list(map(onehot_from_logits, actor_out)) actions = torch.cat(onehot_actions, dim=-1) else: if use_gumbel or (use_target and self.target_noise is not None): actions = gumbel_softmax(actor_out, available_actions, hard=True, device=self.device) # gumbel has a gradient elif explore: onehot_actions = gumbel_softmax(actor_out, available_actions, hard=True, device=self.device) # gumbel has a gradient assert no_sequence, "Cannot do exploration on a sequence!" # eps greedy exploration eps = self.exploration.eval(t_env) rand_numbers = np.random.rand(batch_size, 1) # random actions sample uniformly from action space logits = avail_choose(torch.ones(batch_size, self.act_dim), available_actions) random_actions = OneHotCategorical(logits=logits).sample().numpy() take_random = (rand_numbers < eps).astype(int) actions = (1 - take_random) * to_numpy(onehot_actions) + take_random * random_actions else: actions = onehot_from_logits(actor_out, available_actions) # no gradient else: if explore: assert no_sequence, "Cannot do exploration on a sequence!" actions = gaussian_noise(actor_out.shape, self.args.act_noise_std) + actor_out elif use_target and self.target_noise is not None: assert isinstance(self.target_noise, float) actions = gaussian_noise(actor_out.shape, self.target_noise) + actor_out else: actions = actor_out # # clip the actions at the bounds of the action space # actions = torch.max(torch.min(actions, torch.from_numpy(self.act_space.high)), torch.from_numpy(self.act_space.low)) return actions, new_rnn_states, eps def init_hidden(self, num_agents, batch_size): """See parent class.""" if num_agents == -1: return torch.zeros(batch_size, self.hidden_size) else: return torch.zeros(num_agents, batch_size, self.hidden_size) def get_random_actions(self, obs, available_actions=None): """See parent class.""" batch_size = obs.shape[0] if self.discrete: if self.multidiscrete: random_actions = [OneHotCategorical(logits=torch.ones(batch_size, self.act_dim[i])).sample().numpy() for i in range(len(self.act_dim))] random_actions = np.concatenate(random_actions, axis=-1) else: if available_actions is not None: logits = avail_choose(torch.ones(batch_size, self.act_dim), available_actions) random_actions = OneHotCategorical(logits=logits).sample().numpy() else: random_actions = OneHotCategorical(logits=torch.ones(batch_size, self.act_dim)).sample().numpy() else: random_actions = np.random.uniform(self.act_space.low, self.act_space.high, size=(batch_size, self.act_dim)) return random_actions def soft_target_updates(self): """Soft update the target networks through a Polyak averaging update.""" soft_update(self.target_critic, self.critic, self.tau) soft_update(self.target_actor, self.actor, self.tau) def hard_target_updates(self): """Hard update target networks by copying the weights of the live networks.""" hard_update(self.target_critic, self.critic) hard_update(self.target_actor, self.actor)
Alaska Gov. Bill Walker and Lt. Gov. Byron Mallott will return bearded sealskin vests they were given by an organization affiliated with the North Slope Borough following the disclosure that the borough bought them from a daughter of the mayor. The office of Mayor Charlotte Brower in February paid $7,000 to Mary Jo Olemaun, one of Brower's daughters, for the pair of vests. The payments were detailed in a response to an Alaska Dispatch News records request, which followed a vote by the borough Assembly this month to appoint a law firm to investigate borough purchasing policies and potential violations of its ethics code, at Brower's request. Brower said in a memorandum to the Assembly dated July 7 that her staff members had authorized purchases of goods and services from members of her family without her approval or knowledge. But other documents showed a handwritten "approved" notation, with Brower's name, on a request ultimately granted by the borough for $7,405 to send three of her grandchildren to basketball camp in California. The documents also showed that the North Slope Borough spent thousands of dollars to buy gifts from Olemaun for high-profile officials, including Walker, Mallott, U.S. Interior Secretary Sally Jewell and U.S. Sen. Lisa Murkowski. Brower has said her office authorized payments to her family members under a borough policy that allowed no-bid purchases of goods or services for less than $10,000. Brower has since changed the policy. Records show Brower's office paid Olemaun $3,500 apiece on Feb. 18 for two sealskin vests for Walker and Mallott as well as $500 for an atiqluk, or kuspuk, for Jewell -- plus $100 for delivery. Walker and Mallott each received bearded seal vests on Feb. 16 at an Alaska Federation of Natives conference in the Northwest Alaska community of Kotzebue, Walker's spokeswoman, Katie Marquette, said in an email Friday. They were valued at $1,750 each and given on behalf of the whaling captains association from the 4,400-person city of Barrow, Marquette said. That organization shares a mailing address with the North Slope Borough. "In light of the new information released by the North Slope Borough, these gifts are being returned to ensure that the public's trust is being upheld," Marquette said. Marquette said the vests received by Walker and Mallott matched a photograph included in the borough records. A spokeswoman for Brower, Kristine Hilderbrand, couldn't immediately say how the vests were transferred from the borough to the association, though she emailed a prepared statement. "Sealskin vests are traditional gifts of honor for distinguished elders," the statement said. "The purchase of the vests presented to the governor and lieutenant governor did not violate the borough's procurement policy." There's no cap on the value of gifts that can be received by public officials in Alaska, said Paul Dauphinais, the executive director of the Alaska Public Offices Commission, though gifts valued at $250 or greater must be recorded on an annual disclosure. Marquette, in her email, provided two disclosure forms -- one for Walker and one for Mallott -- that recorded the vests earlier this year. A spokeswoman for Jewell, Emily Beyer, said in an email late Friday that all gifts received by the secretary are processed and logged. But she added that she could not confirm until early next week whether Jewell had received the kuspuk purchased by the borough. A spokesman for Murkowski, Matthew Felling, confirmed that the senator's office received a purse and hat from the North Slope Borough, which were purchased from Olemaun for $1,900, according to borough records.
package CronScheduler import ( . "gopkg.in/check.v1" "log" "testing" "time" ) const ( layout string = "2006-01-02 15:04:05" ) func TestCronSchedulerScript(t *testing.T) { TestingT(t) } type CronSchedulerScriptTestsSuite struct{} var _ = Suite(&CronSchedulerScriptTestsSuite{}) func (s *CronSchedulerScriptTestsSuite) TestCronSchedulerScriptInit(c *C) { sched := New() c.Assert(sched, NotNil) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeDigitalBad(c *C) { ////c.Skip("NO TEST NOW") sched := New() _, isGood := sched.prepareTimeDigital("", 1, 12) // WRONG 1 c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeDigital("1001", 0, 59) // WRONG 2 c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeDigital("0", 1, 12) // WRONG 3 c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeDigital("-1", 1, 12) // WRONG 4 c.Assert(isGood, Equals, false) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeHyphen(c *C) { ////c.Skip("NO TEST NOW") sched := New() t, isGood := sched.prepareTimeHyphen("1-12", 1, 59) c.Assert(isGood, Equals, true) c.Assert(len(t), Equals, 12) c.Assert(t[0], Equals, 1) c.Assert(t[10], Equals, 11) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeHyphenBad(c *C) { ////c.Skip("NO TEST NOW") sched := New() _, isGood := sched.prepareTimeHyphen("-12", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeHyphen("1-101", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeHyphen("0-13", 1, 12) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeHyphen("0-1", 1, 12) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeHyphen("1-", 0, 59) c.Assert(isGood, Equals, false) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeStar(c *C) { ////c.Skip("NO TEST NOW") sched := New() t, isGood := sched.prepareTimeStar("*", 0, 59) c.Assert(isGood, Equals, true) // Each minute c.Assert(len(t), Equals, 60) c.Assert(t[0], Equals, 0) c.Assert(t[56], Equals, 56) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeStarBad(c *C) { ////c.Skip("NO TEST NOW") sched := New() _, isGood := sched.prepareTimeStar("**", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeStar("", 0, 59) c.Assert(isGood, Equals, false) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeSlash(c *C) { ////c.Skip("NO TEST NOW") sched := New() t, isGood := sched.prepareTimeSlash("*/3", 0, 59) c.Assert(isGood, Equals, true) c.Assert(len(t), Equals, 20) c.Assert(t[0], Equals, 0) c.Assert(t[19], Equals, 57) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeSlashBad(c *C) { ////c.Skip("NO TEST NOW") sched := New() _, isGood := sched.prepareTimeSlash("**/3", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeSlash("*/", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeSlash("*/-1", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeSlash("*/0", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTimeSlash("/", 0, 59) c.Assert(isGood, Equals, false) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeComplex(c *C) { ////c.Skip("NO TEST NOW") sched := New() t, isGood := sched.prepareTime(`*/3,11,44,5-10`, 0, 59) c.Assert(isGood, Equals, true) c.Assert(len(t), Equals, 26) c.Assert(t[0], Equals, 0) c.Assert(t[19], Equals, 42) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptCheckPrepareTimeComplexBad(c *C) { sched := New() _, isGood := sched.prepareTime("*/3,11,44,5-101", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTime("*/3,11,1000,5-10", 0, 59) c.Assert(isGood, Equals, false) _, isGood = sched.prepareTime("*,/3,11,1000,5-10", 0, 59) c.Assert(isGood, Equals, false) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTimeBad(c *C) { sched := New() err := sched.SetTime("s", "*", "*", "*", "*") // Check Bad Min c.Assert(err, NotNil) err = sched.SetTime("*", "s", "*", "*", "*") // Check Bad Hour c.Assert(err, NotNil) err = sched.SetTime("*", "*", "s", "*", "*") // Check Bad DayM c.Assert(err, NotNil) err = sched.SetTime("*", "*", "*", "s", "*") // Check Bad Month c.Assert(err, NotNil) err = sched.SetTime("*", "*", "*", "*", "s") // Check Bad DayW c.Assert(err, NotNil) err = sched.SetTime("") // Check empty c.Assert(err, NotNil) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTimeEmpty(c *C) { sched := New() err := sched.SetTime() c.Assert(err, NotNil) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTime(c *C) { sched := New() err := sched.SetTime("*", "*", "*", "*", "*") // Check EMPTY TIME c.Assert(err, IsNil) c.Assert(len(sched.DayMInt), Equals, 31) c.Assert(len(sched.DayWInt), Equals, 7) c.Assert(len(sched.MinInt), Equals, 60) c.Assert(len(sched.MonthInt), Equals, 12) c.Assert(len(sched.HourInt), Equals, 24) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTimeShot(c *C) { sched := New() err := sched.SetTime("*") // Check EMPTY TIME c.Assert(err, IsNil) c.Assert(len(sched.DayMInt), Equals, 31) c.Assert(len(sched.DayWInt), Equals, 7) c.Assert(len(sched.MinInt), Equals, 60) c.Assert(len(sched.MonthInt), Equals, 12) c.Assert(len(sched.HourInt), Equals, 24) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptgetNextDay_29_Feb_2024(c *C) { sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("33", "12", "29", "2", "4") t := time.Date(2024, time.Month(1), 29, 23, 59, 1, 1, time.Now().Location()) day, month, year := sched.getNextDay(t, 0) log.Printf("day: %d, month: %d, year: %d\n", day, month, year) // Check length array c.Assert(day, Equals, 29) c.Assert(month, Equals, 2) c.Assert(year, Equals, 2024) t = time.Date(2020, time.Month(1), 29, 23, 59, 1, 1, time.Now().Location()) day, month, year = sched.getNextDay(t, 0) log.Printf("day: %d, month: %d, year: %d\n", day, month, year) // Check length array c.Assert(day, Equals, 29) c.Assert(month, Equals, 2) c.Assert(year, Equals, 2024) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptgetNextDayThisDay(c *C) { sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("*", "*", "*", "*", "*") t := time.Date(2024, time.Month(2), 29, 23, 59, 1, 1, time.Now().Location()) day, month, year := sched.getNextDay(t, 0) log.Printf("day: %d, month: %d, year: %d\n", day, month, year) // Check length array c.Assert(day, Equals, 29) c.Assert(month, Equals, 2) c.Assert(year, Equals, 2024) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptgetNextHourMinNow(c *C) { //c.Skip("NO TEST NOW") sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("*", "*", "*", "*", "*") t := time.Date(2024, time.Month(2), 29, 2, 5, 1, 1, time.Now().Location()) add, hour, min := sched.getNextHourMin(t, 0) c.Assert(add, Equals, false) c.Assert(hour, Equals, 2) c.Assert(min, Equals, 5) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptgetNextHourMinNextDate(c *C) { ////c.Skip("NO TEST NOW") sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("3,45", "*", "*", "*", "*") // Check next hour t, _ := time.Parse(layout, "2024-02-29 2:59:01") add, hour, min := sched.getNextHourMin(t, 0) c.Assert(add, Equals, false) c.Assert(hour, Equals, 3) c.Assert(min, Equals, 3) // Check next hour t, _ = time.Parse(layout, "2024-02-29 5:59:55") add, hour, min = sched.getNextHourMin(t, 0) c.Assert(add, Equals, false) c.Assert(hour, Equals, 6) c.Assert(min, Equals, 3) // Check next day t, _ = time.Parse(layout, "2024-02-29 23:59:55") add, hour, min = sched.getNextHourMin(t, 0) c.Assert(add, Equals, true) c.Assert(hour, Equals, 0) c.Assert(min, Equals, 0) // Check next year t, _ = time.Parse(layout, "2024-12-31 23:59:55") add, hour, min = sched.getNextHourMin(t, 0) c.Assert(add, Equals, true) c.Assert(hour, Equals, 0) c.Assert(min, Equals, 0) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptGetNextTime(c *C) { //c.Skip("NO TEST NOW") if time.Now().Minute() == 59 { c.Skip("We don't test inconsistent situations now.") } sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("*", "*", "*", "*", "*") nt := sched.GetNextTime(time.Now()) c.Assert(int(nt.Day()), Equals, int(time.Now().Day())) c.Assert(int(nt.Month()), Equals, int(time.Now().Month())) c.Assert(int(nt.Year()), Equals, int(time.Now().Year())) c.Assert(int(nt.Hour()), Equals, int(time.Now().Hour())) c.Assert(int(nt.Minute()), Equals, 1+int(time.Now().Minute())) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptGetNextTime_29_Feb_2024(c *C) { //c.Skip("NO TEST NOW") sched := New() //Min, Hour, DayM, Month, DayW // 29 Feb 2020 sched.SetTime("33", "12", "29", "2", "4") nt := sched.GetNextTime(time.Now()) // Check 29 Feb 2020 c.Assert(int(nt.Minute()), Equals, 33) c.Assert(int(nt.Hour()), Equals, 12) c.Assert(int(nt.Day()), Equals, 29) c.Assert(int(nt.Month()), Equals, 2) c.Assert(int(nt.Year()), Equals, 2024) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTime2(c *C) { c.Skip("OLD") sched := New() //Min, Hour, DayM, Month, DayW err := sched.SetTime("*", "*", "*", "*", "6") c.Assert(err, IsNil) start, _ := time.Parse(layout, "2015-01-25 15:04:05") finish, _ := time.Parse(layout, "2015-01-25 15:04:05") isStart := sched.StartTime(start, finish) c.Assert(isStart, Equals, true) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTime3(c *C) { c.Skip("OLD") sched := New() //Min, Hour, DayM, Month, DayW err := sched.SetTime("*/5", "*", "*", "*", "*") // Check returning values c.Assert(err, IsNil) // Check length array //c.Assert(len(sched.Time), Equals, 1) start, _ := time.Parse(layout, "2015-01-25 15:04:00") finish, _ := time.Parse(layout, "2015-01-25 15:05:01") isStart := sched.StartTime(start, finish) //c.Assert(isStart, Equals, false) c.Assert(isStart, Equals, true) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTime4(c *C) { c.Skip("OLD") sched := New() //Min, Hour, DayM, Month, DayW err := sched.SetTime("*/5", "*/4", "*/3", "*/3", "*") // Check returning values c.Assert(err, IsNil) // Check length array //c.Assert(len(sched.Time), Equals, 1) start, _ := time.Parse(layout, "2014-01-21 15:05:00") finish, _ := time.Parse(layout, "2015-01-25 15:05:00") isStart := sched.StartTime(start, finish) //c.Assert(isStart, Equals, false) c.Assert(isStart, Equals, true) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTime5(c *C) { c.Skip("OLD") sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("*", "*", "*", "*/5", "*/5") sched.SetTime("*", "*", "*/5", "*", "*/5") sched.SetTime("*", "*/5", "*", "*", "*/5") sched.SetTime("*/5", "*", "*", "*", "*/5") // Check length array //c.Assert(len(sched.Time), Equals, 4) start, _ := time.Parse(layout, "2014-01-21 16:16:00") finish, _ := time.Parse(layout, "2014-01-21 16:16:00") isStart := sched.StartTime(start, finish) c.Assert(isStart, Equals, false) } func (s *CronSchedulerScriptTestsSuite) TestConfigScriptAddTime6(c *C) { c.Skip("OLD") sched := New() //Min, Hour, DayM, Month, DayW sched.SetTime("*", "*/7", "*", "*", "*") sched.SetTime("*/7", "*", "*", "*", "*") start, _ := time.Parse(layout, "2014-01-21 17:17:00") finish, _ := time.Parse(layout, "2014-01-21 17:17:00") isStart := sched.StartTime(start, finish) c.Assert(isStart, Equals, false) }
/** * Navigate into the next or previous window. * * <p>Called when the user performs window navigation with keyboard shortcuts. * * <p><strong>Note:</strong> Caller is responsible to recycle the pivot. * * @return {@code true} if any accessibility action is successfully performed. */ private boolean navigateToWindowTarget( AccessibilityNodeInfoCompat pivot, NavigationAction navigationAction, EventId eventId) { AccessibilityWindowInfo currentWindow = AccessibilityNodeInfoUtils.getWindow(pivot.unwrap()); if (!FILTER_WINDOW_FOR_WINDOW_NAVIGATION.accept(currentWindow)) { return false; } AccessibilityNodeInfoCompat target = null; Map<AccessibilityNodeInfoCompat, Boolean> speakingNodeCache = new HashMap<>(); try { WindowTraversal windowTraversal = new WindowTraversal(service); boolean isScreenRtl = WindowUtils.isScreenLayoutRTL(service); target = searchTargetInNextOrPreviousWindow( screenStateMonitor.getCurrentScreenState(), windowTraversal, isScreenRtl, currentWindow, navigationAction.searchDirection, focusFinder, /* shouldRestoreLastFocus= */ true, actorState.getFocusHistory(), FILTER_WINDOW_FOR_WINDOW_NAVIGATION, NavigationTarget.createNodeFilter( NavigationTarget.TARGET_DEFAULT, speakingNodeCache)); return (target != null) && setAccessibilityFocusInternal(target, navigationAction, eventId); } finally { AccessibilityNodeInfoUtils.recycleNodes(target); } }
import statistics from statistics import mode def most_common(List): return(mode(List)) def countX(lst, x): return lst.count(x) def ans(lst,count): if(count<=len(lst)/2): if(len(lst)%2==0): return 0 else: return 1 else: return len(lst)-2*(len(lst)-count) T=int(input()) while T>0: n=int(input()) List=list(map(int,input('').split())) print(ans(List,List.count(most_common(List)))) T-=1
import { RoutableFunction } from "../interface/common-interfaces"; import { ControllerInterface } from '../interface/controller-interface'; import { InjectorInterface } from '../interface/injector-interface'; import { deepCopy, prevVer } from '../util'; import { RouteOptions } from "../decorator/route"; interface RouteMetadataInterface { version: string, path: string, ctor: any, handlers: ControllerInterface, options: RouteMetadataOptionsInterface } export interface RouteMetadataOptionsInterface { /** * an authenticator passed through UserAuth decorator. */ userAuthInjector?: InjectorInterface, /** * options passed through Route decorator as a parameter. */ routeParamOptions?: RouteOptions } export interface RouteInfo { method: "get"|"post"|"put"|"delete", path: string, ctor: any, handler: RoutableFunction, routeOptions: RouteMetadataOptionsInterface } export class MetadataStorage { private routes: RouteMetadataInterface[] = []; public registerRoute(path: string, version: string, ctor: any, options: RouteMetadataOptionsInterface) { this.routes.push({ version: version, path: path, ctor: ctor, handlers: { index: ctor.prototype?.index, show: ctor.prototype?.show, post: ctor.prototype?.post, put: ctor.prototype?.put, delete: ctor.prototype?.delete }, options: options }); } public buildRoutes(prefix?: string): RouteInfo[] { let sortedRoutes = this.routes .sort((a, b) => { if (a.version > b.version) { return 1; } else if (a.version < b.version) { return -1; } else { return a.path < b.path ? 1 : -1; } }); let versionMap: { [version: string]: RouteMetadataInterface[] } = {}; sortedRoutes.forEach((route) => { let targetPaths = versionMap[route.version]; if (targetPaths == null) { let prev = prevVer(route.version); if (prev) { let prevPaths = versionMap[prev]; if (prevPaths) { targetPaths = prevPaths.map((r) => deepCopy(r)); targetPaths.forEach((r) => { r.version = route.version; }); } else { targetPaths = []; } } else { targetPaths = []; } } const foundIndex = targetPaths.findIndex((r) => r.path === route.path); if (foundIndex !== -1) { targetPaths[foundIndex] = route; } else { targetPaths.push(route); } versionMap[route.version] = targetPaths; }); // versionMap["v1"].forEach((r) => console.log(`v1 [${r.path}] index handler = ${r.handlers.index}`)); // versionMap["v2"].forEach((r) => console.log(`v2 [${r.path}] index handler = ${r.handlers.index}`)); let routeInfos: RouteInfo[] = []; for(let key in versionMap) { let pathPrefix = "/"; if (prefix) { pathPrefix += prefix; } versionMap[key].forEach((route) => { const treatAsAction: boolean = route.options.routeParamOptions?.treatAsAction || false; if (route.handlers.index) { routeInfos.push({ method: "get", path: `${pathPrefix}/${route.version}/${route.path}`, ctor: route.ctor, handler: route.handlers.index, routeOptions: route.options }) } if (route.handlers.show) { if (treatAsAction === false) { routeInfos.push({ method: "get", path: `${pathPrefix}/${route.version}/${route.path}/:id`, ctor: route.ctor, handler: route.handlers.show, routeOptions: route.options }) } } if (route.handlers.post) { routeInfos.push({ method: "post", path: `${pathPrefix}/${route.version}/${route.path}`, ctor: route.ctor, handler: route.handlers.post, routeOptions: route.options }) } if (route.handlers.put) { routeInfos.push({ method: "put", path: `${pathPrefix}/${route.version}/${route.path}${treatAsAction ? "" : "/:id"}`, ctor: route.ctor, handler: route.handlers.put, routeOptions: route.options }) } if (route.handlers.delete) { routeInfos.push({ method: "delete", path: `${pathPrefix}/${route.version}/${route.path}${treatAsAction ? "" : "/:id"}`, ctor: route.ctor, handler: route.handlers.delete, routeOptions: route.options }) } }) } return routeInfos; } }
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.ai.formrecognizer.implementation.util; import com.azure.ai.formrecognizer.models.DocumentFieldType; import com.azure.ai.formrecognizer.administration.models.DocumentFieldSchema; import java.util.Map; /** * The helper class to set the non-public properties of an {@link DocumentFieldSchema} instance. */ public final class DocumentFieldSchemaHelper { private static DocumentFieldSchemaAccessor accessor; private DocumentFieldSchemaHelper() { } /** * Type defining the methods to set the non-public properties of an {@link DocumentFieldSchema} instance. */ public interface DocumentFieldSchemaAccessor { void setType(DocumentFieldSchema documentFieldSchema, DocumentFieldType type); void setDescription(DocumentFieldSchema documentFieldSchema, String description); void setExample(DocumentFieldSchema documentFieldSchema, String example); void setItems(DocumentFieldSchema documentFieldSchema, DocumentFieldSchema items); void setProperties(DocumentFieldSchema documentFieldSchema, Map<String, DocumentFieldSchema> properties); } /** * The method called from {@link DocumentFieldSchema} to set it's accessor. * * @param documentFieldSchemaAccessor The accessor. */ public static void setAccessor(final DocumentFieldSchemaHelper.DocumentFieldSchemaAccessor documentFieldSchemaAccessor) { accessor = documentFieldSchemaAccessor; } static void setType(DocumentFieldSchema documentFieldSchema, DocumentFieldType type) { accessor.setType(documentFieldSchema, type); } static void setDescription(DocumentFieldSchema documentFieldSchema, String description) { accessor.setDescription(documentFieldSchema, description); } static void setExample(DocumentFieldSchema documentFieldSchema, String example) { accessor.setExample(documentFieldSchema, example); } static void setItems(DocumentFieldSchema documentFieldSchema, DocumentFieldSchema items) { accessor.setItems(documentFieldSchema, items); } static void setProperties(DocumentFieldSchema documentFieldSchema, Map<String, DocumentFieldSchema> properties) { accessor.setProperties(documentFieldSchema, properties); } }
/* This function takes a LevelMap which is stored in a JSON file and loads it * --- A JSON file is basically a file that's used to store a ton of different * types of information */ private JSONObject loadToJson(String filename) { BufferedReader filein = null; String linein; String json = ""; try { filein = new BufferedReader(new FileReader(filename)); while ((linein = filein.readLine()) != null) { json = json + linein; } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (filein != null) { try { filein.close(); } catch (IOException e) { e.printStackTrace(); } } } return (JSONObject) JSONValue.parse(json); }
/** * Simple FilterInputStream that can replace occurrances of bytes with something else. */ public class ReplacingInputStream extends FilterInputStream { // while matching, this is where the bytes go. int[] buf=null; int matchedIndex=0; int unbufferIndex=0; int replacedIndex=0; private final byte[] pattern; private final byte[] replacement; private State state=State.NOT_MATCHED; // simple state machine for keeping track of what we are doing private enum State { NOT_MATCHED, MATCHING, REPLACING, UNBUFFER } /** * @param is input * @return nested replacing stream that replaces \n\r (DOS) and \r (MAC) line endings with UNIX ones "\n". */ public static InputStream newLineNormalizingInputStream(InputStream is) { return new ReplacingInputStream(new ReplacingInputStream(is, "\n\r", "\n"), "\r", "\n"); } /** * Replace occurances of pattern in the input. Note: input is assumed to be UTF-8 encoded. If not the case use byte[] based pattern and replacement. * @param in input * @param pattern pattern to replace. * @param replacement the replacement or null */ public ReplacingInputStream(InputStream in, String pattern, String replacement) { this(in,pattern.getBytes(StandardCharsets.UTF_8), replacement==null ? null : replacement.getBytes(StandardCharsets.UTF_8)); } /** * Replace occurances of pattern in the input. * @param in input * @param pattern pattern to replace * @param replacement the replacement or null */ public ReplacingInputStream(InputStream in, byte[] pattern, byte[] replacement) { super(in); Validate.notNull(pattern); Validate.isTrue(pattern.length>0, "pattern length should be > 0", pattern.length); this.pattern = pattern; this.replacement = replacement; // we will never match more than the pattern length buf = new int[pattern.length]; } @Override public int read(byte[] b, int off, int len) throws IOException { // copy of parent logic; we need to call our own read() instead of super.read(), which delegates instead of calling our read if (b == null) { throw new NullPointerException(); } else if (off < 0 || len < 0 || len > b.length - off) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return 0; } int c = read(); if (c == -1) { return -1; } b[off] = (byte)c; int i = 1; try { for (; i < len ; i++) { c = read(); if (c == -1) { break; } b[off + i] = (byte)c; } } catch (IOException ee) { } return i; } @Override public int read(byte[] b) throws IOException { // call our own read return read(b, 0, b.length); } @Override public int read() throws IOException { // use a simple state machine to figure out what we are doing int next; switch (state) { case NOT_MATCHED: // we are not currently matching, replacing, or unbuffering next=super.read(); if(pattern[0] == next) { // clear whatever was there buf=new int[pattern.length]; // clear whatever was there // make sure we start at 0 matchedIndex=0; buf[matchedIndex++]=next; if(pattern.length == 1) { // edgecase when the pattern length is 1 we go straight to replacing state=State.REPLACING; // reset replace counter replacedIndex=0; } else { // pattern of length 1 state=State.MATCHING; } // recurse to continue matching return read(); } else { return next; } case MATCHING: // the previous bytes matched part of the pattern next=super.read(); if(pattern[matchedIndex]==next) { buf[matchedIndex++]=next; if(matchedIndex==pattern.length) { // we've found a full match! if(replacement==null || replacement.length==0) { // the replacement is empty, go straight to NOT_MATCHED state=State.NOT_MATCHED; matchedIndex=0; } else { // start replacing state=State.REPLACING; replacedIndex=0; } } } else { // mismatch -> unbuffer buf[matchedIndex++]=next; state=State.UNBUFFER; unbufferIndex=0; } return read(); case REPLACING: // we've fully matched the pattern and are returning bytes from the replacement next=replacement[replacedIndex++]; if(replacedIndex==replacement.length) { state=State.NOT_MATCHED; replacedIndex=0; } return next; case UNBUFFER: // we partially matched the pattern before encountering a non matching byte // we need to serve up the buffered bytes before we go back to NOT_MATCHED next=buf[unbufferIndex++]; if(unbufferIndex==matchedIndex) { state=State.NOT_MATCHED; matchedIndex=0; } return next; default: throw new IllegalStateException("no such state " + state); } } @Override public String toString() { return state.name() + " " + matchedIndex + " " + replacedIndex + " " + unbufferIndex; } }
<filename>api_test.go package v8 import ( "github.com/stretchr/testify/assert" "github.com/v8platform/designer" "github.com/v8platform/runner" "reflect" "testing" ) func TestCreateInfobase(t *testing.T) { if testing.Short() { t.Skip("skipped for integrated tests") } type args struct { create runner.Command opts []interface{} } temp := t.TempDir() tests := []struct { name string args args want *Infobase wantErr bool }{ { "simple", args{ create: designer.CreateFileInfoBaseOptions{ File: temp, }, }, &Infobase{ Connect: FilePath{ File: temp, }, }, false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := CreateInfobase(tt.args.create, tt.args.opts...) if (err != nil) != tt.wantErr { t.Errorf("CreateInfobase() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("CreateInfobase() got = %v, want %v", got, tt.want) } }) } } func TestApiConfig(t *testing.T) { ib := NewFileInfobase("./test_ib") tests := []struct { name string where ConnectionString want Command opts []interface{} wantArgs []string }{ { "LoadCfg", ib, LoadCfg("./1cv8.cf"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadCfg ./1cv8.cf", }, }, { "LoadCfg_UpdateDBCfg", ib, LoadCfg("./1cv8.cf", UpdateDBCfg(true, true)), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadCfg ./1cv8.cf", "/UpdateDBCfg", //"-Dynamic+", FIX: поправить после исправления https://github.com/v8platform/marshaler/issues/1 "-Server", }, }, { "LoadConfigFromFiles", ib, LoadConfigFromFiles("./src"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadConfigFromFiles ./src", }, }, { "LoadConfigFromFiles_WithFiles", ib, LoadConfigFromFiles("./src").WithFiles("./src/file1.xml", "./src/file2.xml"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadConfigFromFiles ./src", "-files ./src/file1.xml,./src/file2.xml", }, }, { "LoadConfigFromFiles_WithListFile", ib, LoadConfigFromFiles("./src").WithListFile("./file_list.xml"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadConfigFromFiles ./src", "-listFile ./file_list.xml", }, }, { "LoadConfigFromFiles_WithUpdateDumpInfo", ib, LoadConfigFromFiles("./src").WithUpdateDumpInfo(), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadConfigFromFiles ./src", "-updateConfigDumpInfo", }, }, { "LoadConfigFromFiles_UpdateDBCfg", ib, LoadConfigFromFiles("./src", UpdateDBCfg(true, true)), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/LoadConfigFromFiles ./src", "/UpdateDBCfg", //"-Dynamic+", FIX: поправить после исправления https://github.com/v8platform/marshaler/issues/1 "-Server", }, }, { "UpdateCfg", ib, UpdateCfg("./1cv8.cf", false), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/UpdateCfg ./1cv8.cf", }, }, { "UpdateCfg_force", ib, UpdateCfg("./1cv8.cf", true), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/UpdateCfg ./1cv8.cf", "-Force", }, }, { "UpdateCfg_UpdateDBCfg", ib, UpdateCfg("./1cv8.cf", true, UpdateDBCfg(true, true)), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/UpdateCfg ./1cv8.cf", "-Force", "/UpdateDBCfg", //"-Dynamic+", FIX: поправить после исправления https://github.com/v8platform/marshaler/issues/1 "-Server", }, }, { "DumpCfg", ib, DumpCfg("./1cv8.cf"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpCfg ./1cv8.cf", }, }, { "DumpConfigToFiles", ib, DumpConfigToFiles("./src", false), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", }, }, { "DumpConfigToFiles_force", ib, DumpConfigToFiles("./src", true), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", "-force", }, }, { "DumpConfigToFiles_update", ib, DumpConfigToFiles("./src", false).WithUpdate(""), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", "-update", }, }, { "DumpConfigToFiles_configDumpInfo", ib, DumpConfigToFiles("./src", false).WithUpdate("./dumpInfo.xml"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", "-update", "-configDumpInfoForChanges ./dumpInfo.xml", }, }, { "GetChangesForConfigDump", ib, GetChangesForConfigDump("./src", "./dumpInfo.xml", false), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", "-getChanges ./dumpInfo.xml", }, }, { "GetChangesForConfigDump_force", ib, GetChangesForConfigDump("./src", "./dumpInfo.xml", true), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", "-getChanges ./dumpInfo.xml", "-force", }, }, { "GetChangesForConfigDump_WithConfigDumpInfo", ib, GetChangesForConfigDump("./src", "./dumpInfo.xml", false). WithConfigDumpInfo("./old_dumpInfo.xml"), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/DumpConfigToFiles ./src", "-getChanges ./dumpInfo.xml", "-configDumpInfoForChanges ./old_dumpInfo.xml", }, }, { "DisableCfgSupport", ib, DisableCfgSupport(), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/ManageCfgSupport", "-disableSupport", }, }, { "DisableCfgSupport_force", ib, DisableCfgSupport(true), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/ManageCfgSupport", "-disableSupport", "-force", }, }, { "RollbackCfg", ib, RollbackCfg(), nil, []string{ "DESIGNER", "/IBConnectionString File='./test_ib';", "/DisableStartupDialogs", "/RollbackCfg", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { runTestCase(t, tt) }) } } func TestApiOptions(t *testing.T) { //ib := NewFileInfobase("./test_ib") tests := []struct { name string where ConnectionString want Command opts []interface{} wantArgs []string }{ { "options", Infobase{ Connect: ServerPath{ Server: "app-server", Ref: "ib_name", }, User: "administrator", Password: "password", AllowServerLicenses: true, SeparatorList: DatabaseSeparatorList{DatabaseSeparator{ Use: true, Value: "sep1", }}, UsePrivilegedMode: true, Locale: "ru_RU", }, designer.LoadCfgOptions{ File: "./1cv8.cf", Designer: designer.NewDesigner(), }, []interface{}{ WithCredentials("admin", "<PASSWORD>"), WithOut("./out_file", true), WithUC("UnlockCode"), }, []string{ "DESIGNER", "/IBConnectionString Srvr=app-server;Ref='ib_name';Usr=administrator;Pwd=password;LicDstr=Y;ZN=+sep1;Prmod=1;Locale=ru_RU", "/DisableStartupDialogs", "/LoadCfg ./1cv8.cf", "/Out ./out_file -NoTruncate", "/UC UnlockCode", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { runTestCase(t, tt) }) } } func runTestCase(t *testing.T, tt struct { name string where ConnectionString want Command opts []interface{} wantArgs []string }) { v8run := runner.NewPlatformRunner(tt.where, tt.want, tt.opts...) got := v8run.Args() for _, arg := range tt.wantArgs { assert.Contains(t, got, arg) } }
// Unmarshal deserailize config into object func (configMgr *ConfigurationManager) Unmarshal(obj interface{}) error { rv := reflect.ValueOf(obj) if rv.Kind() != reflect.Ptr || rv.IsNil() { err := errors.New("invalid object supplied") lager.Logger.Error("invalid object supplied ", err) return err } return configMgr.unmarshal(rv, doNotConsiderTag) }
/** * Copyright (C) 2010-2016 eBusiness Information, Excilys Group * Copyright (C) 2016-2019 the AndroidAnnotations project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed To in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.androidannotations.test; import static org.fest.assertions.api.Assertions.assertThat; import static org.robolectric.Robolectric.setupActivity; import static org.robolectric.shadow.api.Shadow.extract; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.shadows.ShadowTextView; import android.text.Editable; import android.text.SpannableStringBuilder; import android.text.TextWatcher; import android.widget.TextView; @RunWith(RobolectricTestRunner.class) public class TextWatchedActivityTest { private TextWatchedActivity_ activity; @Before public void setUp() { activity = setupActivity(TextWatchedActivity_.class); } @Test public void testAfterTextChangeHandled() { assertThat(activity.afterTextChangeHandled).isFalse(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); afterTextChanged(textView, null); assertThat(activity.afterTextChangeHandled).isTrue(); } @Test public void testBeforeTextChangeHandled() { assertThat(activity.beforeTextChangeHandled).isFalse(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); beforeTextChanged(textView, null, 0, 0, 0); assertThat(activity.beforeTextChangeHandled).isTrue(); } @Test public void testOnTextChangeHandled() { assertThat(activity.onTextChangeHandled).isFalse(); TextView textView = (TextView) activity.findViewById(R.id.watchedEditText); onTextChanged(textView, null, 0, 0, 0); assertThat(activity.onTextChangeHandled).isTrue(); } @Test public void testAfterTextChangeTextViewPassed() { assertThat(activity.afterTextView).isNull(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); afterTextChanged(textView, null); assertThat(activity.afterTextView).isEqualTo(textView); } @Test public void testAfterTextChangeEditablePassed() { assertThat(activity.afterEditable).isNull(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); Editable s = new SpannableStringBuilder("hello"); afterTextChanged(textView, s); assertThat(activity.afterEditable).isEqualTo(s); } @Test public void testOnTextChangeTextViewPassed() { assertThat(activity.onTextView).isNull(); TextView textView = (TextView) activity.findViewById(R.id.watchedEditText); onTextChanged(textView, null, 0, 0, 0); assertThat(activity.onTextView).isEqualTo(textView); } @Test public void testOnTextChangeParametersPassed() { assertThat(activity.onSequence).isNull(); assertThat(activity.onStart).isZero(); assertThat(activity.onBefore).isZero(); assertThat(activity.onCount).isZero(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); CharSequence s = "helloOnTextChange"; int start = 1; int before = 2; int count = 3; onTextChanged(textView, s, start, before, count); assertThat(activity.onSequence).isEqualTo(s); assertThat(activity.onStart).isEqualTo(start); assertThat(activity.onBefore).isEqualTo(before); assertThat(activity.onCount).isEqualTo(count); } @Test public void testBeforeTextChangeTextViewPassed() { assertThat(activity.beforeTextView).isNull(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); beforeTextChanged(textView, null, 0, 0, 0); assertThat(activity.beforeTextView).isEqualTo(textView); } @Test public void testBeforeTextChangeParametersPassedPassed() { assertThat(activity.beforeTextView).isNull(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); beforeTextChanged(textView, null, 0, 0, 0); assertThat(activity.beforeTextView).isEqualTo(textView); } @Test public void testBeforeTextChangeParametersPassed() { assertThat(activity.beforeSequence).isNull(); assertThat(activity.beforeStart).isZero(); assertThat(activity.beforeAfter).isZero(); assertThat(activity.beforeCount).isZero(); TextView textView = (TextView) activity.findViewById(R.id.helloTextView); CharSequence s = "helloBeforeTextChange"; int start = 1; int after = 2; int count = 3; beforeTextChanged(textView, s, start, count, after); assertThat(activity.beforeSequence).isEqualTo(s); assertThat(activity.beforeStart).isEqualTo(start); assertThat(activity.beforeAfter).isEqualTo(after); assertThat(activity.beforeCount).isEqualTo(count); } private static void afterTextChanged(TextView textView, Editable s) { ShadowTextView shadowTextView = extract(textView); for (TextWatcher textWatcher : shadowTextView.getWatchers()) { textWatcher.afterTextChanged(s); } } private static void beforeTextChanged(TextView textView, CharSequence s, int start, int count, int after) { ShadowTextView shadowTextView = extract(textView); for (TextWatcher textWatcher : shadowTextView.getWatchers()) { textWatcher.beforeTextChanged(s, start, count, after); } } private static void onTextChanged(TextView textView, CharSequence s, int start, int before, int count) { ShadowTextView shadowTextView = extract(textView); for (TextWatcher textWatcher : shadowTextView.getWatchers()) { textWatcher.onTextChanged(s, start, before, count); } } }
/* information about observing circumstances including time and location from GPS or manually entered. * use Serial1 hardware RX1/TX1 for Adafruit GPS shield on Mega. */ #include "Circum.h" // serial configuration for GPS #define GPS_TX_PIN 13 // not needed #define GPS_RX_PIN 12 #define GPS_INVERT false #define GPS_BUFSIZE 512 #define GPS_BAUD 9600 /* constructor */ Circum::Circum () { // create serial connection to GPS board resetWatchdog(); //ss = new SoftwareSerial (GPS_RX_PIN, GPS_TX_PIN, GPS_INVERT, GPS_BUFSIZE); //ss->begin(GPS_BAUD); ss = new SoftwareSerial (); ss->begin(GPS_BAUD, GPS_RX_PIN, GPS_TX_PIN, SWSERIAL_8N1, false, 512); // create GPS parser resetWatchdog(); GPS = new TinyGPS(); // init values resetWatchdog(); latitude = 30.0; longitude = -110.0; altitude = 700.0; obs = NULL; newObserver (latitude, longitude, altitude); hdop = 99.0; nsats = 0; setnow (2018, 1, 1, 0, 0, 0); magdecl (latitude, longitude, altitude, decimalYear(), &magdeclination); // init flags gps_lock = false; gps_ok = false; time_overridden = false; loc_overridden = false; } /* return current time as a decimal year */ float Circum::decimalYear() { // get time now, sets dt_now.DN and dt_now.TN int year; uint8_t month, day, h, m, s; getnow (year, month, day, h, m, s); // get time on jan 1 this year DateTime y0 (year, 1, 1, 0, 0, 0); // n days this year int nd = (year%4) ? 365 : 366; // return year and fraction return (year + ((dt_now.DN + dt_now.TN) - (y0.DN + y0.TN))/nd); } /* send latest values to web page. * N.B. names must match ids in web page */ void Circum::sendNewValues (WiFiClient client) { int year; uint8_t month, day, h, m, s; getnow (year, month, day, h, m, s); client.print (F("GPS_Date=")); printDate (client, year, month, day); printPL (client, time_overridden ? BADNEWS : NORMAL); client.print (F("GPS_UTC=")); printHMS (client, h, m, s); printPL (client, time_overridden ? BADNEWS : NORMAL); client.print (F("GPS_Status=")); if (gps_lock) { if (time_overridden || loc_overridden) client.println (F("Overridden!")); else client.println (F("Locked+")); } else if (gps_ok) client.println (F("No lock!")); else client.println (F("Not found!")); client.print (F("GPS_Enable=")); if (gps_lock && (time_overridden || loc_overridden)) client.println (F("true")); else client.println (F("false")); client.print (F("GPS_Lat=")); client.print (latitude, 3); printPL (client, loc_overridden ? BADNEWS : NORMAL); client.print (F("GPS_Long=")); client.print (longitude, 3); printPL (client, loc_overridden ? BADNEWS : NORMAL); client.print (F("GPS_Alt=")); client.print (altitude); printPL (client, loc_overridden ? BADNEWS : NORMAL); client.print (F("GPS_MagDecl=")); client.println (magdeclination); client.print (F("GPS_HDOP=")); client.println (hdop); client.print (F("GPS_NSat=")); client.println (nsats); } /* print value v in sexagesimal format, can be negative */ void Circum::printSexa (WiFiClient client, float v) { bool isneg = v < 0; if (isneg) { v = -v; client.print (F("-")); } uint8_t h = (uint8_t) v; v = (v - h)*60; uint8_t m = (uint8_t) v; v = (v - m)*60; uint8_t s = (uint8_t) v; printHMS (client, h, m, s); } /* print time */ void Circum::printHMS (WiFiClient client, uint8_t h, uint8_t m, uint8_t s) { client.print (h); if (m < 10) client.print (F(":0")); else client.print (F(":")); client.print (m); if (s < 10) client.print (F(":0")); else client.print (F(":")); client.print (s); } /* print the given date */ void Circum::printDate (WiFiClient client, int y, uint8_t m, uint8_t d) { client.print(y); client.print(F(" ")); client.print(m); client.print(F(" ")); client.print(d); } /* end a value with the given print level code */ void Circum::printPL (WiFiClient client, PrintLevel pl) { switch (pl) { case BADNEWS: client.println (F("!")); break; case GOODNEWS: client.println (F("+")); break; default: client.println (F("")); break; } } /* process name = value. * return whether we recognize it */ bool Circum::overrideValue (char *name, char *value) { if (!strcmp (name, "GPS_UTC")) { // op is setting new UTC int year; uint8_t month, day, h, m, s; getnow (year, month, day, h, m, s); // start with now char *mat, *sat; // start of min and sec, if any h = strtol (value, &mat, 10); // override hour if (mat != value) { // if new minute ... m = strtol (mat, &sat, 10); // override minutes if (sat != mat) // if new seconds ... s = strtol (sat, NULL, 10); // override seconds } setnow (year, month, day, h, m, s); // set system time to new value time_overridden = true; // set flag that op has overridden GPS time target->updateTopo(); // update target to new time target->findNextPass(); // update pass from now target->computeSkyPath(); // and show return (true); } if (!strcmp (name, "GPS_Date")) { // op is setting new date int year; uint8_t month, day, h, m, s; getnow (year, month, day, h, m, s); // start with now char *mat, *dat; // start of month and day, if any year = strtol (value, &mat, 10); // override year if (mat != value) { // if new month ... month = strtol (mat, &dat, 10); // override month if (dat != mat) // if new day ... day = strtol (dat, NULL, 10); // override day } setnow (year, month, day, h, m, s); // set system time to new value time_overridden = true; // set flag that op has overridden GPS time target->updateTopo(); // update target to new time target->findNextPass(); // update pass from now target->computeSkyPath(); // and show return (true); } if (!strcmp (name, "GPS_Lat")) { latitude = atof (value); newObserver (latitude, longitude, altitude); loc_overridden = true; // set flag that op has overridden GPS loc target->updateTopo(); // update target to new time target->findNextPass(); // update pass from here target->computeSkyPath(); // and show magdecl (latitude, longitude, altitude, decimalYear(), &magdeclination); return (true); } if (!strcmp (name, "GPS_Long")) { longitude = atof (value); newObserver (latitude, longitude, altitude); loc_overridden = true; // set flag that op has overridden GPS loc target->updateTopo(); // update target to new time target->findNextPass(); // update pass from here target->computeSkyPath(); // and show magdecl (latitude, longitude, altitude, decimalYear(), &magdeclination); return (true); } if (!strcmp (name, "GPS_Alt")) { altitude = atof (value); newObserver (latitude, longitude, altitude); loc_overridden = true; // set flag that op has overridden GPS loc target->updateTopo(); // update target to new time target->findNextPass(); // update pass from here target->computeSkyPath(); // and show magdecl (latitude, longitude, altitude, decimalYear(), &magdeclination); return (true); } if (!strcmp (name, "GPS_Enable")) { time_overridden = false; // resume GPS values loc_overridden = false; // resume GPS values } return (false); // not one of ours } /* call occasionally to sync our system time from GPS, if it is running ok. */ void Circum::checkGPS() { resetWatchdog(); // read more from GPS, process when new message complete while (ss->available()) { // Serial.print((char)ss->peek()); if (GPS->encode(ss->read())) { // note we receiving GPS sentences ok gps_ok = true; // get location and age float new_lat, new_lng, new_alt; unsigned long loc_fix_age; GPS->f_get_position(&new_lat, &new_lng, &loc_fix_age); new_alt = GPS->f_altitude(); // get time and age unsigned long time_fix_age; int new_year; byte new_mon, new_day, new_hr, new_min, new_sec, new_hund; GPS->crack_datetime (&new_year, &new_mon, &new_day, &new_hr, &new_min, &new_sec, &new_hund, &time_fix_age); // determine whether data are up to date if (loc_fix_age == TinyGPS::GPS_INVALID_AGE || time_fix_age == TinyGPS::GPS_INVALID_AGE) { gps_lock = false; Serial.println("No fix detected"); } else { gps_lock = true; if (loc_fix_age > 5000 || time_fix_age > 5000) Serial.println("Warning: possible stale data!"); } if (gps_lock) { // update system time from GPS unless op has overridden if (!time_overridden) setnow (new_year, new_mon, new_day, new_hr, new_min, new_sec); // update location from GPS, unless op has overridden or within allowed jitter if (!loc_overridden && (fabs(latitude - new_lat) > 0.01 || fabs(longitude - new_lng) > 0.01 || fabs (altitude - new_alt) > 100) ) { latitude = new_lat; longitude = new_lng; altitude = new_alt; newObserver (latitude, longitude, altitude); magdecl (latitude, longitude, altitude, decimalYear(), &magdeclination); } // get fix quality info hdop = 0.01*GPS->hdop(); nsats = (int)GPS->satellites(); } } } } /* get time from dt_now advanced to current millis() */ void Circum::getnow(int &year, uint8_t &month, uint8_t &day, uint8_t &h, uint8_t &m, uint8_t &s) { dt_now.TN = dt_TN0; dt_now.DN = dt_DN0; dt_now.add ((long)((millis() - dt_m0)/1000)); dt_now.gettime(year, month, day, h, m, s); } /* init dt_now based on current millis() */ void Circum::setnow(int year, uint8_t month, uint8_t day, uint8_t h, uint8_t m, uint8_t s) { dt_m0 = millis(); dt_now.settime(year, month, day, h, m, s); dt_TN0 = dt_now.TN; dt_DN0 = dt_now.DN; } /* return age of satellite elements in days */ float Circum::age (Satellite *sat) { return ((dt_now.DN + dt_now.TN) - (sat->DE + sat->TE)); } /* install a new Observer */ void Circum::newObserver (float lat, float lng, float hgt) { if (obs) delete (obs); obs = new Observer (lat, lng, hgt); } /* return the current Observer */ Observer *Circum::observer() { return (obs); }
// onViewStateRestored isn't called for dialogs if a view isn't returned on onCreateView @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); mDelegate.onViewStateRestored(savedInstanceState); }
Nutritional status of the elderly. II. Anthropometry, dietary and biochemical data of old pensioners in Perugia at the fifth year follow-up. 206 aged pensioners of the city of Perugia have been examined in a longitudinal study, with emphasis on body composition, diet, life habits and clinical-biochemical data. From the anthropometric data of the individuals examined in 1981 it is evident that both in men and women obesity is common especially among the women. The opposite is true for the arm muscle area. Comparing the same men examined in 1976 and 1981 body fatness decreased in the five years interval. Food and nutrient intakes are rather similar in the 1976 and 1981 surveys. Consumption of milk, eggs, fish and legumes is not high. The men, but not the women, tended to a high consumption of meat and alcoholic beverages. The intake in 1981 of thiamin, riboflavin and vitamin A seems in general to be inadequate according to our recommended values. The biochemical data show practically no difference in the two surveys. Serum cholesterol continued to be rather high for an Italian population.
/* returns this as a string representation */ string BlockHeader::to_s() { BlockHeader *header = clone(); string str; mpz_t mpz_hash; mpz_init(mpz_hash); get_hash(mpz_hash); stringstream ss; ss << "BlockHeader: " << mpz_to_hex(mpz_hash) << "\n"; ss << " version: " << version << "\n"; ss << " hash prev: "; if (have_little_endian()) for (uint32_t i = SHA256_DIGEST_LENGTH; i > 0; i--) ary_push_hex(str, header->hash_prev_block, i - 1); ss << str << "\n"; ss << " hash merkle: "; str.clear(); if (have_little_endian()) for (uint32_t i = SHA256_DIGEST_LENGTH; i > 0; i--) ary_push_hex(str, header->hash_merkle_root, i - 1); ss << str << "\n"; ss << " time: " << time << "\n"; ss << " difficulty: " << difficulty << "\n"; ss << " nonce: " << nonce << "\n"; ss << " shift: " << shift << "\n"; ss << " target: " << target << "\n"; ss << " adder: "; str.clear(); if (have_little_endian()) for (uint32_t i = header->adder.size(); i > 0; i--) ary_push_hex(str, header->adder, i - 1); ss << str << "\n"; mpz_clear(mpz_hash); return ss.str(); }
/* * oxCore is available under the MIT License (2014). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.gluu.persist.cloud.spanner.operation.impl; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import org.gluu.persist.cloud.spanner.model.ResultCode; import org.gluu.persist.cloud.spanner.model.TableMapping; import org.gluu.persist.exception.KeyConversionException; import org.gluu.persist.exception.MappingException; import org.gluu.persist.exception.operation.ConfigurationException; import org.gluu.persist.exception.operation.ConnectionException; import org.gluu.persist.exception.operation.PersistenceException; import org.gluu.persist.operation.auth.PasswordEncryptionMethod; import org.gluu.orm.util.ArrayHelper; import org.gluu.orm.util.PropertiesHelper; import org.gluu.orm.util.StringHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.auth.oauth2.GoogleCredentials; import com.google.cloud.NoCredentials; import com.google.cloud.spanner.DatabaseClient; import com.google.cloud.spanner.DatabaseId; import com.google.cloud.spanner.ResultSet; import com.google.cloud.spanner.Spanner; import com.google.cloud.spanner.SpannerException; import com.google.cloud.spanner.SpannerOptions; import com.google.cloud.spanner.Statement; import com.google.cloud.spanner.Type; import com.google.cloud.spanner.Type.Code; import com.google.cloud.spanner.Type.StructField; /** * Perform connection pool initialization * * @author <NAME> Date: 04/14/2021 */ public class SpannerConnectionProvider { private static final Logger LOG = LoggerFactory.getLogger(SpannerConnectionProvider.class); private static final String QUERY_HEALTH_CHECK = "SELECT 1"; private static final String QUERY_PARENT_TABLE = "SELECT TABLE_NAME, PARENT_TABLE_NAME FROM information_schema.tables WHERE table_catalog = '' and table_schema = '' and parent_table_name is NOT NULL"; private static final String QUERY_TABLE_SCHEMA = "SELECT TABLE_NAME, COLUMN_NAME, SPANNER_TYPE, IS_NULLABLE FROM information_schema.columns WHERE table_catalog = '' and table_schema = ''"; private static final String CLIENT_PROPERTIES_PREFIX = "connection.client-property"; private Properties props; private Properties clientConnectionProperties; private int creationResultCode; private ArrayList<String> binaryAttributes, certificateAttributes; private PasswordEncryptionMethod passwordEncryptionMethod; private String connectionProject; private String connectionInstance; private String connectionDatabase; private String connectionEmulatorHost; private String connectionCredentialsFile; private long defaultMaximumResultSize; private long maximumResultDeleteSize; private Map<String, Map<String, StructField>> tableColumnsMap; private Map<String, Set<String>> tableNullableColumnsSet; private Map<String, Set<String>> tableChildAttributesMap; private DatabaseClient dbClient; private Spanner spanner; protected SpannerConnectionProvider() { } public SpannerConnectionProvider(Properties props) { this.props = props; this.tableColumnsMap = new HashMap<>(); this.tableNullableColumnsSet = new HashMap<>(); this.tableChildAttributesMap = new HashMap<>(); } public void create() { try { init(); } catch (Exception ex) { this.creationResultCode = ResultCode.OPERATIONS_ERROR_INT_VALUE; Properties clonedProperties = (Properties) props.clone(); LOG.error("Failed to create connection with properties: '{}'. Exception: {}", clonedProperties, ex); } } protected void init() throws Exception { if (!props.containsKey("connection.project")) { throw new ConfigurationException("Property 'connection.project' is mandatory!"); } this.connectionProject = props.getProperty("connection.project"); if (!props.containsKey("connection.instance")) { throw new ConfigurationException("Property 'connection.instance' is mandatory!"); } this.connectionInstance = props.getProperty("connection.instance"); if (!props.containsKey("connection.database")) { throw new ConfigurationException("Property 'connection.database' is mandatory!"); } this.connectionDatabase = props.getProperty("connection.database"); if (props.containsKey("connection.emulator-host")) { this.connectionEmulatorHost = props.getProperty("connection.emulator-host"); } Properties filteredDriverProperties = PropertiesHelper.findProperties(props, CLIENT_PROPERTIES_PREFIX, "."); this.clientConnectionProperties = new Properties(); for (Entry<Object, Object> driverPropertyEntry : filteredDriverProperties.entrySet()) { String key = StringHelper.toString(driverPropertyEntry.getKey()).substring(CLIENT_PROPERTIES_PREFIX.length() + 1); String value = StringHelper.toString(driverPropertyEntry.getValue()); clientConnectionProperties.put(key, value); } if (props.containsKey("statement.limit.default-maximum-result-size")) { this.defaultMaximumResultSize = StringHelper.toLong(props.getProperty("statement.limit.default-maximum-result-size"), 1000); } if (props.containsKey("statement.limit.maximum-result-delete-size")) { this.maximumResultDeleteSize = StringHelper.toLong(props.getProperty("statement.limit.maximum-result-delete-size"), 10000); } this.connectionCredentialsFile = null; if (props.containsKey("connection.credentials-file")) { this.connectionCredentialsFile = props.getProperty("connection.credentials-file"); } openWithWaitImpl(); LOG.info("Created connection pool"); if (props.containsKey("password.encryption.method")) { this.passwordEncryptionMethod = PasswordEncryptionMethod.getMethod(props.getProperty("password.encryption.method")); } else { this.passwordEncryptionMethod = PasswordEncryptionMethod.HASH_METHOD_SHA256; } this.binaryAttributes = new ArrayList<String>(); if (props.containsKey("binaryAttributes")) { String[] binaryAttrs = StringHelper.split(props.get("binaryAttributes").toString().toLowerCase(), ","); this.binaryAttributes.addAll(Arrays.asList(binaryAttrs)); } LOG.debug("Using next binary attributes: '{}'", binaryAttributes); this.certificateAttributes = new ArrayList<String>(); if (props.containsKey("certificateAttributes")) { String[] binaryAttrs = StringHelper.split(props.get("certificateAttributes").toString().toLowerCase(), ","); this.certificateAttributes.addAll(Arrays.asList(binaryAttrs)); } LOG.debug("Using next binary certificateAttributes: '{}'", certificateAttributes); loadTableMetaData(); this.creationResultCode = ResultCode.SUCCESS_INT_VALUE; } private void loadTableMetaData() { LOG.info("Scanning DB metadata..."); long takes = System.currentTimeMillis(); try (ResultSet resultSet = executeQuery(QUERY_PARENT_TABLE)) { if (resultSet.next()) { int tableNameIdx = resultSet.getColumnIndex("TABLE_NAME"); int parentTableNameIdx = resultSet.getColumnIndex("PARENT_TABLE_NAME"); do { String parentTableName = resultSet.getString(parentTableNameIdx); String tableName = resultSet.getString(tableNameIdx); Set<String> childAttributes; if (tableChildAttributesMap.containsKey(parentTableName)) { childAttributes = tableChildAttributesMap.get(parentTableName); } else { childAttributes = new HashSet<>(); tableChildAttributesMap.put(parentTableName, childAttributes); } if (tableName.startsWith(parentTableName + "_")) { tableName = tableName.substring(parentTableName.length() + 1); } childAttributes.add(tableName); } while (resultSet.next()); } } catch (SpannerException ex) { throw new ConnectionException("Failed to get database metadata", ex); } LOG.debug("Build child attributes map: '{}'.", tableChildAttributesMap); HashMap<String, Type> typeMap = buildSpannerTypesMap(); try (ResultSet resultSet = executeQuery(QUERY_TABLE_SCHEMA)) { if (resultSet.next()) { int tableNameIdx = resultSet.getColumnIndex("TABLE_NAME"); int columnNameIdx = resultSet.getColumnIndex("COLUMN_NAME"); int spannerTypeIdx = resultSet.getColumnIndex("SPANNER_TYPE"); int isNullableIdx = resultSet.getColumnIndex("IS_NULLABLE"); do { String tableName = resultSet.getString(tableNameIdx); String columnName = resultSet.getString(columnNameIdx); String spannerType = resultSet.getString(spannerTypeIdx); String isNullable = resultSet.getString(isNullableIdx); // Load table schema Map<String, StructField> tableColumns; if (tableColumnsMap.containsKey(tableName)) { tableColumns = tableColumnsMap.get(tableName); } else { tableColumns = new HashMap<>(); tableColumnsMap.put(tableName, tableColumns); } String comparebleType = toComparableType(spannerType); Type type = typeMap.get(comparebleType); if (type == null) { throw new ConnectionException(String.format("Failed to parse SPANNER_TYPE: '%s'", spannerType)); } tableColumns.put(columnName.toLowerCase(), StructField.of(columnName, type)); // Check if column nullable Set<String> nullableColumns; if (tableNullableColumnsSet.containsKey(tableName)) { nullableColumns = tableNullableColumnsSet.get(tableName); } else { nullableColumns = new HashSet<>(); tableNullableColumnsSet.put(tableName, nullableColumns); } boolean nullable = "yes".equalsIgnoreCase(isNullable); if (nullable) { nullableColumns.add(columnName.toLowerCase()); } } while (resultSet.next()); } } catch (SpannerException ex) { throw new ConnectionException("Failed to get database metadata", ex); } LOG.debug("Build table columns map: '{}'.", tableColumnsMap); takes = System.currentTimeMillis() - takes; LOG.info("Metadata scan finisehd in {} milliseconds", takes); } private HashMap<String, Type> buildSpannerTypesMap() { HashMap<String, Type> typeMap = new HashMap<>(); // We have to add all types manually because Type is not enum and there is no method to get them all addSpannerType(typeMap, Type.bool()); addSpannerType(typeMap, Type.int64()); addSpannerType(typeMap, Type.numeric()); addSpannerType(typeMap, Type.float64()); addSpannerType(typeMap, Type.string()); addSpannerType(typeMap, Type.bytes()); addSpannerType(typeMap, Type.timestamp()); addSpannerType(typeMap, Type.date()); return typeMap; } private static String toComparableType(String spannerType) { int idx = spannerType.indexOf("("); if (idx != -1) { spannerType = spannerType.substring(0, idx); } idx = spannerType.indexOf(">"); if (idx == -1) { return spannerType.toLowerCase(); } return spannerType.substring(0, idx).toLowerCase(); } private void addSpannerType(HashMap<String, Type> typeMap, Type type) { typeMap.put(type.toString().toLowerCase(), type); typeMap.put(Code.ARRAY.name().toLowerCase() + "<" + type.toString().toLowerCase(), Type.array(type)); } private void openWithWaitImpl() throws Exception { long connectionMaxWaitTimeMillis = StringHelper.toLong(props.getProperty("connection.client.create-max-wait-time-millis"), 30 * 1000L); LOG.debug("Using connection timeout: '{}'", connectionMaxWaitTimeMillis); Exception lastException = null; int attempt = 0; long currentTime = System.currentTimeMillis(); long maxWaitTime = currentTime + connectionMaxWaitTimeMillis; do { attempt++; if (attempt > 0) { LOG.info("Attempting to create client connection: '{}'", attempt); } try { open(); if (isConnected()) { break; } else { LOG.info("Failed to connect to Spanner"); destroy(); throw new ConnectionException("Failed to create client connection"); } } catch (Exception ex) { lastException = ex; } try { Thread.sleep(5000); } catch (InterruptedException ex) { LOG.error("Exception happened in sleep", ex); return; } currentTime = System.currentTimeMillis(); } while (maxWaitTime > currentTime); if (lastException != null) { throw lastException; } } private void open() throws FileNotFoundException, IOException { SpannerOptions.Builder optionsBuilder = SpannerOptions.newBuilder(); if (StringHelper.isNotEmpty(connectionEmulatorHost)) { optionsBuilder.setEmulatorHost(connectionEmulatorHost); } if (StringHelper.isNotEmpty(connectionCredentialsFile)) { optionsBuilder.setCredentials(GoogleCredentials.fromStream(new FileInputStream(connectionCredentialsFile))); } else { optionsBuilder.setCredentials(NoCredentials.getInstance()); } optionsBuilder.setProjectId(connectionProject); DatabaseId databaseId = DatabaseId.of(connectionProject, connectionInstance, connectionDatabase); this.spanner = optionsBuilder.build().getService(); this.dbClient = spanner.getDatabaseClient(databaseId); } public boolean destroy() { boolean result = true; if (this.spanner != null) { try { this.spanner.close(); } catch (RuntimeException ex) { LOG.error("Failed to close spanner instance", ex); result = false; } } return result; } public boolean isConnected() { if (this.dbClient == null) { return false; } boolean isConnected = true; try (ResultSet resultSet = executeQuery(QUERY_HEALTH_CHECK)) { return resultSet.next(); } catch (Exception ex) { LOG.error("Failed to check connection", ex); isConnected = false; } return isConnected; } public int getCreationResultCode() { return creationResultCode; } public boolean isCreated() { return ResultCode.SUCCESS_INT_VALUE == creationResultCode; } public ArrayList<String> getBinaryAttributes() { return binaryAttributes; } public ArrayList<String> getCertificateAttributes() { return certificateAttributes; } public boolean isBinaryAttribute(String attributeName) { if (StringHelper.isEmpty(attributeName)) { return false; } return binaryAttributes.contains(attributeName.toLowerCase()); } public boolean isCertificateAttribute(String attributeName) { if (StringHelper.isEmpty(attributeName)) { return false; } return certificateAttributes.contains(attributeName.toLowerCase()); } public PasswordEncryptionMethod getPasswordEncryptionMethod() { return passwordEncryptionMethod; } public TableMapping getTableMappingByKey(String key, String objectClass, String tableName) { if (!tableColumnsMap.containsKey(tableName)) { throw new MappingException(String.format("Table '%s' is not exists in metadata'", tableName)); } Map<String, StructField> columTypes = tableColumnsMap.get(tableName); if ("_".equals(key)) { return new TableMapping("", tableName, objectClass, columTypes); } String[] baseNameParts = key.split("_"); if (ArrayHelper.isEmpty(baseNameParts)) { throw new KeyConversionException("Failed to determine base key part!"); } TableMapping tableMapping = new TableMapping(baseNameParts[0], tableName, objectClass, columTypes); return tableMapping; } public TableMapping getTableMappingByKey(String key, String objectClass) { return getTableMappingByKey(key, objectClass, objectClass); } public TableMapping getChildTableMappingByKey(String key, TableMapping tableMapping, String columnName) { String childTableName = tableMapping.getTableName() + "_" + columnName; if (!tableColumnsMap.containsKey(childTableName)) { return null; } TableMapping childTableMapping = getTableMappingByKey(key, tableMapping.getObjectClass(), childTableName); return childTableMapping; } public Set<String> getTableChildAttributes(String objectClass) { return tableChildAttributesMap.get(objectClass); } public Map<String, TableMapping> getChildTablesMapping(String key, TableMapping tableMapping) { Set<String> childAttributes = tableChildAttributesMap.get(tableMapping.getObjectClass()); if (childAttributes == null) { return null; } Map<String, TableMapping> childTableMapping = new HashMap<>(); for (String childAttribute : childAttributes) { TableMapping childColumTypes = getChildTableMappingByKey(key, tableMapping, childAttribute); if (childColumTypes == null) { String childTableName = tableMapping.getTableName() + "_" + childAttribute; throw new MappingException(String.format("Table '%s' is not exists in metadata'", childTableName)); } childTableMapping.put(childAttribute.toLowerCase(), childColumTypes); } return childTableMapping; } public Set<String> getTableNullableColumns(String objectClass) { return tableNullableColumnsSet.get(objectClass); } public DatabaseClient getClient() { return dbClient; } private ResultSet executeQuery(String sql) { return this.dbClient.singleUse().executeQuery(Statement.of(sql)); } public Map<String, Map<String, StructField>> getDatabaseMetaData() { return tableColumnsMap; } public long getDefaultMaximumResultSize() { return defaultMaximumResultSize; } public long getMaximumResultDeleteSize() { return maximumResultDeleteSize; } }
import fs from 'fs'; import hogan from 'hogan.js'; export function renderVerifyIdentityItemPage(vars: { account: object; chainName: string; confirmationAddress: string; }) { const template = fs.readFileSync(`${__dirname}/templates/verifyIdentityItemTemplate.mustache`, 'utf8'); const compiled = hogan.compile(template); return compiled.render(vars); } export function renderVerificationResultPage(vars: { identityItem: string; account: object; chainName: string; content: string; }) { const template = fs.readFileSync(`${__dirname}/templates/verificationResultTemplate.mustache`, 'utf8'); const compiled = hogan.compile(template); return compiled.render(vars); }
<filename>cmd/add-task/main.go package main import ( "context" "encoding/json" "github.com/aws/aws-lambda-go/lambda" "github.com/smalleats/serverless-todo-example/errors" "github.com/smalleats/serverless-todo-example/todo" ) type request struct { Note string `json:"note"` } type adder interface { Add(string) (todo.Task, error) } type handler func(request) (todo.Task, error) func getHandler(store adder) handler { return func(r request) (todo.Task, error) { t, err := store.Add(r.Note) switch err.(type) { case nil: return t, nil case todo.ValidationError: return t, errors.WithCode(err, errors.CodeInvalidInput) default: return t, errors.WithCode(err, errors.CodeApplicationError) } } } func fromRaw(h handler) func(context.Context, json.RawMessage) (todo.Task, error) { return func(ctx context.Context, event json.RawMessage) (todo.Task, error) { var r request if err := json.Unmarshal(event, &r); err != nil { return todo.Task{}, errors.WithCode(err, errors.CodeBadInput) } return h(r) } } func main() { store := todo.MockStore{} lambda.Start(fromRaw(getHandler(store))) }
const { registerSuite } = intern.getInterface('object'); const { assert } = intern.getPlugin('chai'); import * as timing from '../../../src/async/timing'; import { throwImmediatly } from '../../support/util'; import { isEventuallyRejected } from '../../support/util'; import Promise from '@dojo/shim/Promise'; registerSuite('async/timing', { 'delay()': { 'delay returning a value after the given timeout': function () { return timing.delay<number>(251)(Date.now()).then(function (start: number) { const diff: number = Date.now() - start; assert.approximately(diff, 251, 100); }); }, 'delay executing a function that returns a value after the given timeout': function () { const now = Date.now(); const getNow = function() { return Date.now(); }; return timing.delay<number>(251)( getNow ).then(function (finish: number) { const diff: number = finish - now; assert.approximately(diff, 251, 100); }); }, 'delay executing a function that returns another promise after the given timeout': function () { const now = Date.now(); const getNow = function() { return Promise.resolve( Date.now() ); }; return timing.delay<number>(251)( getNow ).then(function (finish: number) { const diff: number = finish - now; assert.approximately(diff, 251, 150); }); }, 'delay should return undefined when the value is not passed in': function () { return timing.delay(251)().then(function (value) { assert.isUndefined(value); }); }, 'delay can be reusable': function () { const start = Date.now(); const delay = timing.delay(251); const p1 = delay().then(function() { assert.approximately(Date.now() - start, 251, 150); }); const p2 = delay('foo').then(function(value) { assert.strictEqual(value, 'foo'); assert.approximately(Date.now() - start, 251, 150); }); const p3 = delay(() => Promise.resolve('bar')).then(function(value) { assert.strictEqual(value, 'bar'); assert.approximately(Date.now() - start, 251, 150); }); return Promise.all([p1, p2, p3]); } }, 'timeout()': { 'called before the timeout; resolves the promise': function () { return Promise.resolve('unused').then(timing.timeout(100, new Error('Error'))); }, 'called before the timeout; passes function; resolves the promise': function () { return Promise.resolve((): string => 'unused').then(timing.timeout(100, new Error('Error'))); }, 'called after the timeout; rejects the promise': function () { return isEventuallyRejected( timing.delay(100)('unused') .then(timing.timeout(1, new Error('expected'))) ); } }, 'DelayedRejection': { 'is eventually rejected': function () { const start = Date.now(); return new timing.DelayedRejection(101).then(throwImmediatly, function (reason) { assert.isUndefined(reason); assert.isAbove(Date.now(), start + 99); return true; }); }, 'is eventually rejected with error': function () { const start = Date.now(); const expectedError = new Error('boom!'); return new timing.DelayedRejection(101, expectedError).then(throwImmediatly, function (reason) { assert.strictEqual(reason, expectedError); assert.isAbove(Date.now(), start + 99); return true; }); }, 'works with race': function () { return Promise.race([timing.delay(1)('success!'), new timing.DelayedRejection(100)]); } } });
Concurrent ATM connection setup reducing need for VP provisioning A common approach used for decreasing end-to-end connection setup delay in ATM networks is to provision partial segments a priori using virtual path connections (VPCs). In this paper, we present an analysis to study the effect of such provisioning. The analysis demonstrates that significant savings in bandwidth, and hence network costs, are achieved if the number of provisioned VPCs are minimized, especially if there is uncertainty in the traffic pattern characterization. For a given example, we show that to achieve a 0.1% blocking probability requirement, if link capacities are shared (i.e., not partitioned a priori to provisioned VPCs), resource savings of 14.4%, 47.9% or 80% are possible under exact, approximate and poor traffic characterizations, respectively. If the above bandwidth savings are to be achieved in networks that use traditional node-by-node connection setup approaches, end-to-end connection setup delay will increase. In this paper, we propose a new connection setup algorithm that allows the savings in bandwidth obtained through sharing while at the same time controlling the connection setup delay.
<gh_stars>10-100 #ifndef __CONFIG_H__ #define __CONFIG_H__ #define WIFI_SSID "" #define WIFI_PASS "" // PIN #define SD_MISO 2 #define SD_MOSI 15 #define SD_SCLK 14 #define SD_CS 13 /* * ETH_CLOCK_GPIO0_IN - default: external clock from crystal oscillator * ETH_CLOCK_GPIO0_OUT - 50MHz clock from internal APLL output on GPIO0 - possibly an inverter is needed for LAN8720 * ETH_CLOCK_GPIO16_OUT - 50MHz clock from internal APLL output on GPIO16 - possibly an inverter is needed for LAN8720 * ETH_CLOCK_GPIO17_OUT - 50MHz clock from internal APLL inverted output on GPIO17 - tested with LAN8720 */ #define ETH_CLK_MODE ETH_CLOCK_GPIO17_OUT // Pin# of the enable signal for the external crystal oscillator (-1 to disable for internal APLL source) #define ETH_POWER_PIN -1 // Type of the Ethernet PHY (LAN8720 or TLK110) #define ETH_TYPE ETH_PHY_LAN8720 // I²C-address of Ethernet PHY (0 or 1 for LAN8720, 31 for TLK110) #define ETH_ADDR 1 // Pin# of the I²C clock signal for the Ethernet PHY #define ETH_MDC_PIN 23 // Pin# of the I²C IO signal for the Ethernet PHY #define ETH_MDIO_PIN 18 #define NRST 5 #define PCIE_PWR 34 #define PCIE_TX 33 #define PCIE_RX 35 #define PCIE_ADC 36 #define PCIE_LED 37 #define PCIE_RST 32 #define LEDS_PIN 12 #endif
// Run starts the primary application. It handles starting background services, // populating package globals & structures, and clean up tasks. func Run(c config.Config) error { var err error cfg = c log = logrus.New() if cfg.Debug { log.Level = logrus.DebugLevel log.Debug("Enabling Debug Logging") } if cfg.DisableLogging { log.Level = logrus.FatalLevel } srv = &server{ httpRouter: httprouter.New(), } srv.httpServer = &http.Server{ Addr: cfg.ListenAddr, Handler: srv.httpRouter, } srv.httpRouter.SaveMatchedRoutePath = true if cfg.EnableTLS { if cfg.GenCerts { cfg.CertFile = "/tmp/cert" cfg.KeyFile = "/tmp/key" log.Infof("Certificate Generation was enabled, creating new test certs at %s and %s", cfg.CertFile, cfg.KeyFile) err := testcerts.GenerateCertsToFile(cfg.CertFile, cfg.KeyFile) if err != nil { return fmt.Errorf("Could not generate test certificates - %s", err) } defer os.Remove(cfg.CertFile) defer os.Remove(cfg.KeyFile) } srv.httpServer.TLSConfig = &tls.Config{ MinVersion: tls.VersionTLS12, CipherSuites: []uint16{ tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, }, } } srv.httpRouter.GET("/health", srv.middleware(srv.Health)) mocked, err = mocks.FromFile(cfg.MocksFile) if err != nil { return err } for m, r := range mocked.Routes { log.Infof("Registering mock %s with path %s", m, r.Path) srv.httpRouter.GET(r.Path, srv.middleware(srv.MockHandler)) srv.httpRouter.POST(r.Path, srv.middleware(srv.MockHandler)) srv.httpRouter.PUT(r.Path, srv.middleware(srv.MockHandler)) srv.httpRouter.DELETE(r.Path, srv.middleware(srv.MockHandler)) } log.Infof("Starting Listener on %s", cfg.ListenAddr) if cfg.EnableTLS { err := srv.httpServer.ListenAndServeTLS(cfg.CertFile, cfg.KeyFile) if err != nil { if err == http.ErrServerClosed { return ErrShutdown } return err } } err = srv.httpServer.ListenAndServe() if err != nil { if err == http.ErrServerClosed { return ErrShutdown } return err } return nil }
Dragon Profile Joined March 2011 Korea (South) 36 Posts Last Edited: 2014-06-29 06:52:47 #1 Dragon Invitational Tournament #3! With help from my sponsor Players: Terran (T) Taeja , Marineking , MMA , Ryung , Keen Zerg (Z) Hyun , Sleep , DRG , Leenock , Ragnarok , Life , Impact Protoss (P) First , Seed , Ruin , Daisy Dates: July 8 ,13,14 Round of 16 (Bo3): Hi Again everyone! I am happy to announceWith help from my sponsor http://mechanische-tastatur.de/ I have made another StarCraft II tournament featuring some of the top StarCraft II players.Terran (T) Taeja , Marineking , MMA , Ryung , KeenZerg (Z) Hyun , Sleep , DRG , Leenock , Ragnarok , Life , ImpactProtoss (P) First , Seed , Ruin , Daisy Tuesday, Jul 08 12:00pm GMT (GMT+00:00) Round of 8 (Bo3): Sunday, Jul 13 12:00pm GMT (GMT+00:00) Semi-finals (Bo3), 3rd/4th Place (Bo5) and Finals (Bo5): Monday, Jul 14 12:00pm GMT (GMT+00:00) Prize Pool Distribution: 1st Place $1200 2nd Place $500 3rd Place $300 Bracket : Stream Link: I will cast the entire tournament in English. Please come and watch. Again, thank you to my sponsor Dragon Invitational Tournament #3 happen. 1st Place $12002nd Place $5003rd Place $300Bracket : http://binarybeast.com/xSC214062910# Stream Link: http://www.twitch.tv/dragon I will cast the entire tournament in English. Please come and watch.Again, thank you to my sponsor http://mechanische-tastatur.de/ for helping makehappen. Busan Dragon Profile Joined March 2011 Korea (South) 36 Posts #2 On June 23 2014 16:18 Dragon wrote: Hi Again everyone! I am happy to announce Dragon Invitational Tournament #3! I have decided to put $2000 of my own money into a StarCraft II tournament featuring some of the top StarCraft II players. Dates: ?? Round of 16 (Bo3): ?????9:00pm JST (GMT+09:00) ?????? 9:00pm JST (GMT+09:00) Round of 8 (Bo3): ???? 9:00pm JST (GMT+09:00) Semi-finals (Bo3), 3rd/4th Place (Bo5) and Finals (Bo5): ???????9:00pm JST (GMT+09:00) Prize Pool Distribution: 1st Place $1200 2nd Place $500 3rd Place $300 Stream Link: I will cast the entire tournament in English. Please come and watch. Sponsor mechanische-tastatur.de Hi Again everyone! I am happy to announce Dragon Invitational Tournament #3!I have decided to put $2000 of my own money into a StarCraft II tournament featuring some of the top StarCraft II players.Dates: ??Round of 16 (Bo3):?????9:00pm JST (GMT+09:00)?????? 9:00pm JST (GMT+09:00)Round of 8 (Bo3):???? 9:00pm JST (GMT+09:00)Semi-finals (Bo3), 3rd/4th Place (Bo5) and Finals (Bo5):???????9:00pm JST (GMT+09:00)Prize Pool Distribution:1st Place $12002nd Place $5003rd Place $300Stream Link: http://www.twitch.tv/dragon I will cast the entire tournament in English. Please come and watch.Sponsor mechanische-tastatur.de Busan UlTiMa00 Profile Joined October 2013 Indonesia 98 Posts #3 invite maru pleaseeeee :D Killmouse Profile Joined August 2010 Austria 5688 Posts #4 Dragon rich guy yo qcHanHan Profile Joined September 2011 Denmark 46 Posts #5 awesoooooooooooooooooooooome! :DD HerO - herO - PartinG - sOs - Rain - Zest - Stats Edpayasugo Profile Joined April 2013 United Kingdom 1622 Posts #6 Awesome! FlaSh MMA INnoVation FanTaSy MKP TY Ryung | soO Dark Rogue | HuK PartinG Stork State eurTsItniH Profile Joined January 2012 886 Posts #7 You are awesome Dragon! Twine Profile Joined June 2012 France 246 Posts #8 If you invite maru, innovation, bbyong, zest, rain, Stats, soO, Soulkey & Ragnarok I will send you 3 kebabs to korea. #1 Bomber fan | Jin Air best KT killercrock Profile Joined March 2013 Poland 18 Posts #9 plz Maru, Flash, TY - make it happen ^_^ ETisME Profile Blog Joined April 2011 Hong Kong 11848 Posts #10 Dragon. I like you very very VERY much. I guess you earned a lot of 69cent donation hahaha I am gonna donate some time when I get my PayPal account fixed 其疾如风,其徐如林,侵掠如火,不动如山,难知如阴,动如雷震。 Xinzoe Profile Joined January 2014 Korea (South) 2359 Posts Last Edited: 2014-06-23 15:40:34 #11 !!!! dragon <3. don't invite innovation tho, i want to see a new winner hehe covetousrat Profile Joined October 2010 2097 Posts #12 MARU MARU MARU!! DreamR Profile Blog Joined December 2012 United Kingdom 168 Posts #13 for the love of god. INCLUDE maru and mkp. or even scarlett and polt. there bo? has been onpoint recently with RBBG's mlg that just went by. This game saved me from ending it. shabby Profile Joined March 2010 Norway 6365 Posts #14 Maru, Scarlett, JD, San, MC, Taeja, ssO, Soulkey, Innovation and Zest! Jaedong, Gumibear, Leenock, Byun Faust852 Profile Joined February 2012 Luxembourg 4004 Posts #15 On June 23 2014 23:22 Twine wrote: If you invite maru, innovation, bbyong, zest, rain, Stats, soO, Soulkey & Ragnarok I will send you 3 kebabs to korea. French national currency ! Blargh Profile Joined September 2010 United States 1837 Posts #16 More Innovation please! And if you want to be super awesome, make the finals Bo7!! shid0x Profile Joined July 2012 Korea (South) 4992 Posts #17 Pretty cool RIP MKP Extenz Profile Joined October 2011 Italy 822 Posts #18 dragon so rich ParthB Profile Joined January 2014 Canada 16 Posts #19 Dragon ♥ I love these tournaments imrusty269 Profile Joined January 2014 United States 1404 Posts #20 Dragon is Innovation's new sponsor? Bbyong | MMA | Polt | Dream | Maru | Mvp 1 2 3 4 5 8 9 10 Next All
// Bytes is deprecated. Use Find instead func (b *Box) Bytes(name string) []byte { bb, _ := b.Find(name) oncer.Deprecate(0, "github.com/gobuffalo/packr/v2#Box.Bytes", "Use github.com/gobuffalo/packr/v2#Box.Find instead.") return bb }
def add_compatible_plugins(cls, cluster): for plugin in cls.get_compatible_plugins(cluster): plugin_attributes = dict(plugin.attributes_metadata) plugin_attributes.pop('metadata', None) cls.create({ 'cluster_id': cluster.id, 'plugin_id': plugin.id, 'enabled': False, 'attributes': plugin_attributes })
/* * Copyright (c) 2015 DataTorrent, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datatorrent.lib.appdata.schemas; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.io.IOUtils; /** * This class holds utility methods for processing JSON. * @since 3.0.0 */ public class SchemaUtils { /** * This constructor should not be used. */ private SchemaUtils() { //Do nothing } /** * This is a utility method which loads the contents of a resource file into a string. * @param resource The resource file whose contents need to be loaded. * @return The contents of the specified resource file. */ public static String jarResourceFileToString(String resource) { StringWriter stringWriter = new StringWriter(); try { InputStream is = SchemaUtils.class.getClassLoader().getResourceAsStream(resource); Preconditions.checkArgument(is != null, resource + " could not be found in the resources."); IOUtils.copy(is, stringWriter); } catch(IOException ex) { throw new RuntimeException(ex); } return stringWriter.toString(); } /** * This is a utility method that loads the contents of the given input stream into a string. * @param inputStream The input stream to read from. * @return The contents of the given input stream as a String. */ public static String inputStreamToString(InputStream inputStream) { StringWriter stringWriter = new StringWriter(); try { IOUtils.copy(inputStream, stringWriter); } catch(IOException ex) { throw new RuntimeException(ex); } return stringWriter.toString(); } /** * This is a utility method to check that the given JSONObject has the given keys. * @param jo The {@link JSONObject} to check. * @param fields The keys in the {@link JSONObject} to check. * @return True if the given {@link JSONObject} contains all the given keys. False otherwise. */ public static boolean checkValidKeys(JSONObject jo, Fields fields) { @SuppressWarnings("unchecked") Iterator<String> keyIterator = jo.keys(); Set<String> fieldSet = fields.getFields(); while(keyIterator.hasNext()) { String key = keyIterator.next(); if(!fieldSet.contains(key)) { return false; } } return true; } /** * This is a utility method to check that the given JSONObject has the given keys. * It throws an {@link IllegalArgumentException} if it doesn't contain all the given keys. * @param jo The {@link JSONObject} to check. * @param fields The keys in the {@link JSONObject} to check. */ public static void checkValidKeysEx(JSONObject jo, Fields fields) { @SuppressWarnings("unchecked") Iterator<String> keyIterator = jo.keys(); Set<String> fieldSet = fields.getFields(); while(keyIterator.hasNext()) { String key = keyIterator.next(); if(!fieldSet.contains(key)) { throw new IllegalArgumentException("The key " + key + " is not valid."); } } } /** * This is a utility method to check that the given JSONObject has the given keys. * @param jo The {@link JSONObject} to check. * @param fieldsList The keys in the {@link JSONObject} to check. * @return True if the given {@link JSONObject} contains all the given keys. False otherwise. */ public static boolean checkValidKeys(JSONObject jo, List<Fields> fieldsList) { for(Fields fields: fieldsList) { if(checkValidKeys(jo, fields)) { return true; } } return false; } /** * This is a utility method to check that the given JSONObject has the given keys. * It throws an {@link IllegalArgumentException} if it doesn't contain all the given keys. * @param jo The {@link JSONObject} to check. * @param fieldsList The keys in the {@link JSONObject} to check. * @return True if the given {@link JSONObject} contains all the given keys. False otherwise. */ public static boolean checkValidKeysEx(JSONObject jo, List<Fields> fieldsList) { for(Fields fields: fieldsList) { if(checkValidKeys(jo, fields)) { return true; } } Set<String> keys = Sets.newHashSet(); @SuppressWarnings("unchecked") Iterator<String> keyIterator = jo.keys(); while(keyIterator.hasNext()) { String key = keyIterator.next(); keys.add(key); } throw new IllegalArgumentException("The given json object has an invalid set of keys: " + keys + "\nOne of the following key combinations was expected:\n" + fieldsList); } public static Map<String, String> convertFieldToType(Map<String, Type> fieldToType) { Map<String, String> fieldToTypeString = Maps.newHashMap(); for(Map.Entry<String, Type> entry: fieldToType.entrySet()) { String field = entry.getKey(); String typeString = entry.getValue().name(); fieldToTypeString.put(field, typeString); } return fieldToTypeString; } public static JSONArray findFirstKeyJSONArray(JSONObject jo, String key) { if(jo.has(key)) { try { JSONArray jsonArray = jo.getJSONArray(key); return jsonArray; } catch(JSONException ex) { throw new RuntimeException(ex); } } @SuppressWarnings("unchecked") Iterator<String> keyIterator = jo.keys(); while(keyIterator.hasNext()) { String childKey = keyIterator.next(); JSONArray childJa = null; try { childJa = jo.getJSONArray(childKey); } catch(JSONException ex) { //Do nothing } if(childJa != null) { JSONArray result = findFirstKeyJSONArray(childJa, key); if(result != null) { return result; } continue; } JSONObject childJo = null; try { childJo = jo.getJSONObject(childKey); } catch(JSONException ex) { //Do nothing } if(childJo != null) { JSONArray result = findFirstKeyJSONArray(childJo, key); if(result != null) { return result; } } } return null; } public static JSONArray findFirstKeyJSONArray(JSONArray ja, String key) { for(int index = 0; index < ja.length(); index++) { JSONArray childJa = null; try { childJa = ja.getJSONArray(index); } catch(JSONException ex) { //Do nothing } if(childJa != null) { JSONArray result = findFirstKeyJSONArray(childJa, key); if(result != null) { return result; } continue; } JSONObject childJo = null; try { childJo = ja.getJSONObject(index); } catch(JSONException ex) { //Do nothing } if(childJo != null) { JSONArray result = findFirstKeyJSONArray(childJo, key); if(result != null) { return result; } } } return null; } public static JSONObject findFirstKeyJSONObject(JSONObject jo, String key) { if(jo.has(key)) { try { JSONObject jsonObject = jo.getJSONObject(key); return jsonObject; } catch(JSONException ex) { throw new RuntimeException(ex); } } @SuppressWarnings("unchecked") Iterator<String> keyIterator = jo.keys(); while(keyIterator.hasNext()) { String childKey = keyIterator.next(); JSONArray childJa = null; try { childJa = jo.getJSONArray(childKey); } catch(JSONException ex) { //Do nothing } if(childJa != null) { JSONObject result = findFirstKeyJSONObject(childJa, key); if(result != null) { return result; } continue; } JSONObject childJo = null; try { childJo = jo.getJSONObject(childKey); } catch(JSONException ex) { //Do nothing } if(childJo != null) { JSONObject result = findFirstKeyJSONObject(childJo, key); if(result != null) { return result; } } } return null; } public static JSONObject findFirstKeyJSONObject(JSONArray ja, String key) { for(int index = 0; index < ja.length(); index++) { JSONArray childJa = null; try { childJa = ja.getJSONArray(index); } catch(JSONException ex) { //Do nothing } if(childJa != null) { JSONObject result = findFirstKeyJSONObject(childJa, key); if(result != null) { return result; } continue; } JSONObject childJo = null; try { childJo = ja.getJSONObject(index); } catch(JSONException ex) { //Do nothing } if(childJo != null) { JSONObject result = findFirstKeyJSONObject(childJo, key); if(result != null) { return result; } } } return null; } /** * Converts the given JSONObject into a {@link Map}. * @param jo The {@link JSONObject} to convert. * @return The converted {@link JSONObject}. */ public static Map<String, String> extractMap(JSONObject jo) { Map<String, String> resultMap = Maps.newHashMap(); @SuppressWarnings("unchecked") Iterator<String> keyIterator = jo.keys(); while(keyIterator.hasNext()) { String key = keyIterator.next(); String value; try { value = jo.getString(key); } catch(JSONException ex) { throw new RuntimeException(ex); } resultMap.put(key, value); } return resultMap; } /** * This is a utility method which creates a {@link JSONObject} out of the given map. * @param map The map to convert into a {@link JSONObject}. * @return The converted map. */ public static JSONObject createJSONObject(Map<String, String> map) { JSONObject jo = new JSONObject(); for(Map.Entry<String, String> entry: map.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); try { jo.put(key, value); } catch(JSONException ex) { throw new RuntimeException(ex); } } return jo; } private static final Logger LOG = LoggerFactory.getLogger(SchemaUtils.class); }
//Format -- Formats a string of text to a specified width func Format(text string, width int) (result string) { if len(text) < width { return text } index := width previousIndex := 0 for index < len(text) { spaceExist := false for i := index; i > index-width; i-- { if unicode.IsSpace(rune(text[i])) { index = i spaceExist = true break } } result += text[previousIndex:index] + "\n" if spaceExist { previousIndex = index + 1 } else { previousIndex = index } index += width } result += text[previousIndex:] return result }
package org.bian.dto; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import org.bian.dto.CRConsumerLoanFulfillmentArrangementInitiateInputModelConsumerLoanFulfillmentArrangementInstanceRecordDateType; import javax.validation.Valid; /** * BQRepaymentUpdateInputModelConsumerLoanFulfillmentArrangementInstanceRecord */ public class BQRepaymentUpdateInputModelConsumerLoanFulfillmentArrangementInstanceRecord { private String loanOutstandingBalance = null; private CRConsumerLoanFulfillmentArrangementInitiateInputModelConsumerLoanFulfillmentArrangementInstanceRecordDateType dateType = null; /** * `status: Registered` iso-link: https://www.iso20022.org/standardsrepository/public/wqt/Description/mx/dico/bc/_E1rHgsTGEeChad0JzLk7QA_-1068889728/elements/_E1rHhMTGEeChad0JzLk7QA_-552935123 bian-reference: LoanAccount (as Account).AccountBalance general-info: The outstanding balance * @return loanOutstandingBalance **/ public String getLoanOutstandingBalance() { return loanOutstandingBalance; } public void setLoanOutstandingBalance(String loanOutstandingBalance) { this.loanOutstandingBalance = loanOutstandingBalance; } /** * Get dateType * @return dateType **/ public CRConsumerLoanFulfillmentArrangementInitiateInputModelConsumerLoanFulfillmentArrangementInstanceRecordDateType getDateType() { return dateType; } public void setDateType(CRConsumerLoanFulfillmentArrangementInitiateInputModelConsumerLoanFulfillmentArrangementInstanceRecordDateType dateType) { this.dateType = dateType; } }
<filename>eventuate-tram-sagas-spring-reactive-common/src/test/java/io/eventuate/tram/sagas/spring/reactive/common/ReactiveSagaLockManagerIntegrationTestConfiguration.java package io.eventuate.tram.sagas.spring.reactive.common; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @Configuration @EnableAutoConfiguration @Import(EventuateReactiveTramSagaCommonConfiguration.class) public class ReactiveSagaLockManagerIntegrationTestConfiguration { }
// fetchQueryExecutionsInternal fetches query executions and sends them to ch. func (a *Athenai) fetchQueryExecutionsInternal(ctx context.Context, maxPages float64, resultCh chan *Either, wg *sync.WaitGroup) error { pageNum := 1.0 callback := func(page *athena.ListQueryExecutionsOutput, lastPage bool) bool { wg.Add(1) go func() { defer wg.Done() bgqx, err := a.client.BatchGetQueryExecutionWithContext(ctx, &athena.BatchGetQueryExecutionInput{ QueryExecutionIds: page.QueryExecutionIds, }) if err != nil { resultCh <- &Either{Right: errors.Wrap(err, "BatchGetQueryExecution API error")} } else { resultCh <- &Either{Left: bgqx.QueryExecutions} } }() defer func() { pageNum++ }() log.Printf("# of pages: current = %.0f, max = %.0f\n", pageNum, maxPages) return !lastPage && pageNum < maxPages } err := a.client.ListQueryExecutionsPagesWithContext(ctx, &athena.ListQueryExecutionsInput{}, callback) if err != nil { return errors.Wrap(err, "ListQueryExecutions API error") } return nil }
<filename>chapter_designPatterns/src/main/java/ru/job4j/basepatterns/structural/decorator/SeniorJavaDeveloper.java package ru.job4j.basepatterns.structural.decorator; public class SeniorJavaDeveloper extends DeveloperDecorator{ public SeniorJavaDeveloper(Developer developer) { super(developer); } public String makeCodeReview(){ return " Make code review."; } @Override public String makeJob() { return super.makeJob() + makeCodeReview(); } }
Taming Tris(bipyridine)ruthenium(II) and Its Reactions in Water by Capture/Release with Shape-Switchable Symmetry-Matched Cyclophanes Electron/proton transfers in water proceeding from ground/excited states are the elementary reactions of chemistry. These reactions of an iconic class of molecules—polypyridineRu(II)—are now controlled by capturing or releasing three of them with hosts that are shape-switchable. Reversible erection or collapse of the host walls allows such switchability. Some reaction rates are suppressed by factors of up to 120 by inclusive binding of the metal complexes. This puts nanometric coordination chemistry in a box that can be open or shut as necessary. Such second-sphere complexation can allow considerable control to be exerted on photocatalysis, electrocatalysis, and luminescent sensing involving polypyridineRu(II) compounds. The capturing states of hosts are symmetry-matched to guests for selective binding and display submicromolar affinities. A perching complex, which is an intermediate state between capturing and releasing states, is also demonstrated. Materials and Methods S1. Synthesis schemes, preparative procedures and characterization data of all compounds used. Scheme S1. Synthesis steps and subsequent measurements on these compounds conducted by C.Y.Y. No stereochemistry is intended in the molecular structures. Scheme S2. Synthesis steps and subsequent measurements on these compounds conducted by H.Y.L., except where noted otherwise in section S8. No stereochemistry is intended in the molecular structures. S1.1. 1 H and 13 C NMR Spectra for 2-5, 10. These are given in Figs. S1-S5 respectively. S1.2. Synthesis of bis(4-((5-bromopentyl)oxy)phenyl)methanone (13). 4,4'-dihydroxybenzophenone (2.0 g, 9.3 mmol) was dissolved in acetone (300 mL, HPLC grade) and added dropwise with the aid of a dropping funnel over 3 hours to a mixture of acetone (100 mL, HPLC grade), 1,5-dibromopentane (3.18 mL, 1.688 g/cm 3 , 23.3 mmol) and potassium carbonate (6.45 g, 46.7 mmol) being refluxed in a three-neck flask by heating in an oil bath. The dropping funnel and reflux condenser were connected to drying tubes. The mixture was then refluxed for 12 hours. The hot reaction mixture was then filtered under gravity. TLC on silica of the filtrate showed two spots, one of which corresponded to 4,4'-dihydroxybenzophenone. Then the solution was evaporated to give a white solid. The solid was purified using flash silica chromatography eluting with 70% petroleum ether and 30% ethyl acetate yielding the desired white plate-like solid (2.635 g, 55%). The residual solid was then treated with chloroform (150 mL). The mixture was filtered, and the filtrate was collected. The solvent was evaporated. The residual solid was dissolved in chloroform (20 mL) and heated until boiling. After cooling, methanol (200 mL) was added which turned the transparent solution cloudy. The white solidified oil (0.40 g, 0.47 mmol) was then collected by filtration under suction (0.40 g, 32.9%). S1.8. Synthesis of 2,12,22-trihydroxy-4,10,14,20,24,30-hexaoxa-1,3,11,13,21,23(1,4)hexabenzenacyclotriacontaphane-1 3 ,3 3 ,11 2 ,13 3 ,21 2 ,23 3 -hexacarboxylic acid (3) 2 (0.170 g, 0.153 mmol) was dissolved in water (8.3 mL) with aid of a few drops of dilute sodium hydroxide and then sodium borohydride (0.174 g, 4.59 mmol) added and stirred overnight at room temperature. A few drops of acetic acid were then added to destroy any unreacted sodium borohydride. The mixture was then acidified to approximately pH 1 with 4 M hydrochloric acid. The precipitated product was then filtered, washed with water and dried (0.150 g, 87.8%). S1.8a. Optimized NaBH 4 reduction procedure for conversion of 2 to 3. 2 (56 mg, 0.05 mmol) was dissolved in 0.4 M NaOH (5.0 mL) and then sodium borohydride (10 equiv.) added and kept at 60 o C for 5 min with vigorous stirring. The mixture was then cautiously neutralized with dilute HCl. This was centrifuged, the supernatant was decanted, more distilled water was added and the process repeated. The pellet was then flushed from the centrifuge vial with distilled water and dried under reduced pressure at 60 o C to give a white solid. (52 mg, 93%). 1 H-NMR analysis in D 2 O-NaOD confirmed that 3 is the sole component. S1.9. KMnO 4 oxidation procedure for conversion of 3 back to 2. 3 (10 mg, 0.009 mmol) was dissolved in distilled water (13.4 mL) with NaOH (96.7 mg, 2.4 mmol). KMnO 4 (4.2 mg, 0.028 mmol) was then added and the mixture was kept at 40 o C for 24 hours with vigorous stirring. After this time, methanol (0.67 ml) was added and kept for 1 hour when the purple color faded. The solution was filtered and acidified with dilute HCl. This was centrifuged, the supernatant was decanted, more distilled water was added and the process repeated. The pellet was then flushed from the centrifuge vial with distilled water and dried under reduced pressure at 60 o C to give a white solid. (8 mg, 80%). 1 H-NMR analysis in D 2 O-NaOD confirmed that 2 is the sole component. S1.9a. Optimized KMnO 4 oxidation procedure for conversion of 3 back to 2. 3 (56 mg, 0.05 mmol) was dissolved in 0.4 M NaOH (5.0 mL). KMnO 4 (3 equiv.) was then added and the solution was kept at 60 o C for 5 min with vigorous stirring. For work-up, methanol (0.4 mL) was added and kept 60 o C for 5 min with vigorous stirring. The dark mixture was centrifuged and the supernatant was acidified with dilute HCl. This was centrifuged again, the supernatant was decanted, more distilled water was added and the process repeated. The pellet was then flushed from the centrifuge vial with distilled water and dried under reduced pressure at 60 o C to give a white solid. (54 mg, 96%). 1 H-NMR analysis in D 2 O-NaOD confirmed that 2 is the sole component. S1.10. Stability test of 1 under the oxidation conditions which were applied to 3 with KMnO 4 in water. Judging by its smaller-sized relative, 12 trialcohol 3 is less oxidizable in one-electron processes than 1 (E ox +1.7 c.f. +1.3 V 29 vs sce respectively). However, our oxidation condition of KMnO 4 with methanol work-up leaves 1 with no nett change. 1 (dichloride salt hexahydrate, 5 mg, 0.0067 mmol) was dissolved in distilled water (1.5 mL) with NaOH (72.1 mg, 1.8 mmol). KMnO 4 (1.1 mg, 0.007 mmol) was then added and the mixture was kept at 40 o C for 24 hours with vigorous stirring. After this time, methanol (0.075 ml) was added and kept until the green colour changed to red. The solution was filtered through cotton, hydrochloric acid added and evaporated to dryness under reduced pressure. 1 H-NMR analysis confirmed that 1 was the sole component. S1.10a. In situ KMnO 4 oxidation procedure for conversion of 3 to 2 in the presence of 1. 3 (100 mg, 0.089 mmol) was dissolved in distilled water (5 mL) with NaOH (0.2 g) and 1 (dichloride salt hexahydrate, 67 mg, 0.089 mmol). KMnO 4 (0.0424 g, 0.27 mmol) was then added and the mixture was kept at 60 o C for 5 minutes with vigorous stirring. After this time, methanol (0.67 mL) was added until the purple color faded. The solution was filtered to remove MnO 2 , dilute HCl and NaPF 6 were added. This was centrifuged, the supernatant was decanted, more distilled water was added and the process repeated. The pellet was then flushed from the centrifuge vial with distilled water and dried under reduced pressure at 60 o C to give a red solid (167 mg, 95%). 1 H-NMR analysis in dmso-d 6 confirmed that 2 and 1 are the sole components. S1.11. Stability test of 1 under the reduction conditions which were applied to 2 with NaBH 4 in water. Judging by its smaller-sized relative, 12 triketone 2 is less reducible than 1 (E red -2.3 c.f. -1.3 V 29 respectively). However, our reduction condition of NaBH 4 with aqueous work-up leaves 1 with no nett change. 1 (dichloride salt hexahydrate, 5 mg, 0.0067 mmol) was dissolved in water (1.5 mL), sodium borohydride (7.6 mg, 0.2 mmol) added and stirred for 24 hours at room temperature (ca. 20 o C). After that, the reaction mixture was still red in colour. Hydrochloric acid was then added to destroy unreacted sodium borohydride and evaporated to dryness under reduced pressure. 1 H-NMR analysis confirmed that 1 was the sole component. Thus, under our chemical redox conditions, 2 and 3 can be interconverted without affecting 1. S1.11a. In situ NaBH 4 reduction procedure for conversion of 2 to 3 in the presence of 1. 2 (50 mg, 0.045 mmol) and 1 (dichloride salt hexahydrate, 33.7 mg, 0.045 mmol) was dissolved in water (2.5 mL) with aid of a few drops of dilute NaOH and then NaBH 4 (17 mg, 0.45 mmol) was added and stirred 5 minutes at 60 o C. A few drops of acetic acid were then added to destroy any unreacted NaBH 4 . Then dilute HCl and NaPF 6 were added. The red precipitate was then filtered, washed with water and dried. (85 mg, 96%). 1 H-NMR analysis in dmso-d 6 confirmed that 3 and 1 are the sole components. S1.12. Synthesis of Tetraethyl 5,5'-methylenebis(2-((5-bromopentyl)oxy)isophthalate) (17) 1,5-Dibromopentane (16.73 ml, 1.688 g/ml, 0.123 mol) and potassium carbonate (16.97 g, 0.123 mol) were added to a round bottom flask with dmf (100 ml, HPLC grade) and heated to 70 o C. 16 65 (6 g, 6.94 mmol) was added into a dropping funnel with dmf (300 ml, HPLC grade), then added dropwise over 3 hours and heated for a further hour. The dropping funnel and reflux condenser were connected to drying tubes. Then, the solution was filtered and saturated brine (800 ml) was added. Diethyl ether (325 ml, HPLC grade) was added and the solution was extracted, followed by another 325 ml and then 175 ml. Then, ethereal extracts were combined. The ether layer was dried with sodium sulfate and then filtered. The solvent was reduced by rotavapor to a small volume. The product was purified using flash silica chromatography eluting with ethyl acetate: hexane (1:5 v/v) yielding the desired oily product 17 (59.8% S2. X-ray crystallography of 8. Fig. S7(ii) (packing diagram in plan view) give ball-and-stick representation, with calculated hydrogen positions included. Carbon atoms are shown in grey, oxygen atoms in red and hydrogen atoms in white. The structure was solved using Olex2, 66 with the ShelXS 67 structure solution programme using direct methods and refined with the ShelXL 67 refinement package using least squares minimization. All non-hydrogen atoms in Fig. 1B are close to the mean macrocycle plane. The mean macrocycle plane is the mean plane passing through the atoms noted in red circles in the structure below. The crystallography information file (cif file) for 8 is given in Fig. S6. S3. NMR Δδ maps and 2-D ROESY spectra. Δδ maps are reported in Fig. S8 alongside the corresponding sets of 1 H NMR spectra in D 2 O under various conditions. In contrast, negligible  values are seen in d-dmso solution which shows lack of binding and confirms the importance of hydrophobicity for binding in water. 2-D ROESY spectra are reported in Fig. S8a. barostat was used to maintain the pressure at 1 bar and the SHAKE algorithm was used to constrain bonds involving hydrogen atoms. A time step of 2 fs was used for all MD simulations. For each host-guest complex, a 500-ns production MD simulation was performed in an NPT ensemble with a target pressure of 1 bar and a pressure coupling constant of 2 ps. Two replica runs were conducted for each complex. Clustering analysis was conducted for the equilibrated MD trajectories of the various host-guest complexes and the top three most populated snapshots were selected for the subsequent QM/MM calculations. QM/MM calculations The representative snapshots obtained from the MD simulations were first subjected to 1,000 steps of steepest descent energy minimisation, followed by 1,000 steps of conjugate gradient minimisation using Amber18. 78 The QM/MM geometry optimisations were performed using Chemshell 3.7 81 for all the host-guest complex systems. The QM region is consisted of a total of 184-196 atoms, depending on the host molecule and was calculated using DFT UB3LYP functional 69,70 with D3 dispersion correction and BJ damping set. 82,83 The singlet state of 1 was calculated using def2-TZVP and rest of the atoms in the complex were calculated using def2-SVP basis set. The QM calculations were performed using ORCA 4.2.0 84,85 and the MM region was defined using DL_POLY. RIJCOSX approximation and TightSCF criteria were used in the QM calculations. The effect of the solvent environment on the polarization of the QM wavefunction was considered using the electronic embedding scheme. The QM/MM optimised structures were then used for subsequent structural analysis. Although not studied experimentally due to their aqueous insolubility, 8 and its trialcohol counterpart (11) are also examined for their interaction with 1 via MD simulation . These are broadly similar to those for 2·1 and 3·1 , although the binding interactions are weakened in the absence of carboxylate moieties. S5. Host-dependent luminescence spectroscopy. Conditions for these experiments are reported in the caption to Fig. S16. S6. In situ switching of host system 2/3 in the presence of 1 by redox cycling and observation of the luminescence signal. 3 (56 mg, 0.05 mmol) was dissolved in 0.4 M NaOH (5.0 mL) which was 10 -4 M in 1. KMnO 4 (3 equiv.) was then added and the solution was kept at 60 o C for 5 min with vigorous stirring. This is the oxidation step of the first redox cycle. For work-up, methanol (0.4 mL) was added and kept 60 o C for 5 min with vigorous stirring. After centrifugation, the pale orange supernatant was decanted. An aliquot (0.1 mL) was diluted 10-fold and analyzed in microcuvets by uv-vis absorption spectroscopy and luminescence spectroscopy. The relative luminescence quantum yield was obtained by excitation at 453 nm, using 1 in water as the reference at the same absorbance. NaBH 4 (10 equiv.) was then added and the pale orange solution was kept at 60 o C for 5 min with stirring. This is the reduction step of the first redox cycle. The solution was cautiously neutralized with 10 M H 2 SO 4 and then made alkaline (NaOH). An aliquot (0.1 mL) was diluted 10-fold and the relative luminescence quantum yield was obtained as before. The oxidation step was then repeated to launch the second redox cycle. Work-up and spectroscopic analysis was carried out as before. The reduction step was then conducted to complete the second redox cycle. Work-up and spectroscopic analysis was carried out as before. The third redox cycle was achieved similarly. Salt accumulation, usually found in such experiments involving chemical switching, 16 does not interfere significantly with the spectroscopic measurements leading to Figure 5A. Clear 'high-lowhigh-low-high-low' switching of the relative luminescence quantum yield is found. The luminescence enhancement(LE) factors seen here are slightly smaller than those reported in Table 1 because the concentration conditions are different in the two situations. For control purposes, the same series of redox cycles were carried out on another sample of 1 with the host omitted. These are also shown in Figure 5A and their relative luminescence quantum yields remain essentially constant. For reference, the absolute luminescence quantum yield of 1 in water is 0.042. 29 S7. In situ switching of host system 4/5 in the presence of 1 by redox cycling and observation of the luminescence signal. 5 (68.5 mg, 0.05 mmol) was put through the sequence in Section S6 and the results are shown in Figure 5B. Strong 'high-low-high-low-high-low' switching of the relative luminescence quantum yield is found. The luminescence enhancement(LE) factors seen here are slightly smaller than those reported in Table 1 because the concentration conditions are different in the two situations. For control purposes, the same series of redox cycles were carried out on another sample of 1 with the host omitted. These are also shown in Figure 5B and their relative luminescence quantum yields remain essentially constant. Host concentration-dependent luminescence intensities of guests like 1 (I L ) also yield binding constants () for host-guest pairs (Table 1, Figure S16 Analysis of the scan rate dependence of current in the cyclic voltammograms (Fig. S17) gives the diffusion coefficients of 1, 2·1 and 3·1 as 1.8x10 -6 , 2.1x10 -6 and 1.9x10 -8 cm 2 s -1 respectively, when 1 is in the Ru(II) form. In terms of this parameter, the protection factors offered by hosts 2 and 3 towards electron transfer from 1 to the electrode are 0.9 and 93 respectively. The host protection factor 93 arises from the ratio of diffusion coefficients 1.8x10 -6 /1.9x10 -8 . When 1 is in the Ru(III) form, the diffusion coefficients of 1, 2·1 and 3·1 are 7.5x10 -7 , 7.7x10 -7 and 8.7x10 -8 cm 2 s -1 respectively. This corresponds to host protection factors of 1 (which means no protection) and 9 for 2 and 3 respectively. Conditions for differential pulse voltammetry are given in Fig. S18. S9. Phenolate quenching of polypyridineRu(II) luminescence. When a host binds with polypyridineRu(II) complex 1 or 6, the latter is protected to some extent from colliding with phenolates and the degree of luminescence quenching would become smaller. In order to extract the host protection factors (HPF) caused by complexation, the Stern-Volmer equation 31 is used to obtain quenching rates in 0.1 M NaOH in water. Without host or phenolate being present, the only quencher of the luminescence is O 2 in air. Equation (S2) is our starting point: When phenolate is added, we get equation (S3): Combining equations (S2) and (S3), we get equation (S4): I no phenolate k ′ q = ( I no phenolate I phenolate -1 )( I 0 I no phenolate ) 1 where = phenolate quenching rate constant involving 1 or 6 without host, = O 2 quenching k ′ q k q rate constant involving 1 or 6 without host, I no phenolate = luminescence intensity of 1 or 6 in water, I phenolate = luminescence intensity of 1 or 6 and various phenolates in water, I 0 = luminescence intensity of 1 or 6 in argon-bubbled water, luminescence lifetime of 1,  0 = 560 ns (in water) 29 and luminescence lifetime of 6, τ 0 = 960 ns (in water). 29 When a host is added, a new set of equations (S5)-(S7) arise in a similar way, but with a new lifetime  host : Combining equations (S5) and (S6), we get equation (S7): I no phenolate with host k ′′′ q = ( I no phenolate with host I phenolate with host -1 )( I 0 with host I no phenolate with host ) 1 where = phenolate quenching rate constant to 1 or 6 with host, = O 2 quenching rate constant k ′′′ q k ′′ q to 1 or 6 with host. I no phenolate with host = luminescence intensity of 1 or 6 in water with host, I phenolate with host = luminescence intensity of 1 or 6 and phenolate in water with host, I 0 with host = luminescence intensity of 1 or 6 in argon-bubbled water. We note that  host / 0 = LE, which is the host-induced luminescence enhancement factor. These values are given in Table 1. Equations (S4) and (S7) allow us to calculate phenolate quenching rate constant in 1 or 6 without k ′ q host and in 1 or 6 with host. The right hand sides of equations (S4) and (S7), can be divided k ′′′ q into two factors, and in equation (S4) analysis of Fig. S19A, for example, because the phenolates are good quenchers of the luminescence of 1. Sample calculations are shown in Fig. S20 in terms of the processed graphs obtained from the data in Fig. S19A. The graphs in each panel's main set are the phenolatedependent luminescence changes of 1, 2·1 and 3·1 in which the ordinate is the total luminescence intensity. The inset graphs in each panel have or as the ordinate. In I no phenolate I phenolate -1 I no phenolate with host I phenolate with host -1 these graphs, only the points whose phenolate concentrations are lower than 2x10 -3 M are analysed in order to obtain the initial slopes from the Stern-Volmer plots. The quenching rate constants obtained from Fig. S20 are given in Table S1. These allow us to calculate the factors of protection offered by host 2 or 3 against quenching by phenolates. The largest factor seen is 120 for the case of 5·6 with 2,6-dimethylphenolate. It shows the potential of controlling the catalytic processes of coordination complexes by inclusively binding them. The perching complex 2·1 shows a relatively smaller protection factor of 9.9 with 2,6dimethylphenolate. This allows a degree of tunable control over the properties of polypyridineRu(II) complexes. S10. pH-dependent luminescence of 7 57,90 without/with various hosts. This study was conducted by C.Y.Y. Conditions for Fig. 8: pH-dependent luminescence quantum yields of 10 -5 M 7 without/with various hosts (10 -3 M each) in aerated water with 0.1 M phosphate buffers excited at isosbestic points. 1 in water is used as the luminescence quantum yield standard. 29 2 and 3 begin to precipitate at pH<6.3, whereas 4 and 5 begin to precipitate at pH<4.0. The green line marks pH=7 which roughly demarcates the lower pH region of excited state deprotonation from the higher pH region of ground state deprotonation. The green line also connects with the photograph. Conditions for photograph: Luminescence emission of 10 -4 M 7 alone and in the presence of 2, 3, 4 and 5 (10 -3 M each) in aerated water with 0.1 M phosphate buffer at pH 7.0. Luminescence excited from above at 366 nm. Ground state pK a values of 7 are also ordered according to perturbation of its deprotonation equilibrium by the local environment caused by hosts. These values are 10.4 (in the presence of 5), 9.6 (3), 9.1 (2), 9.0 (4) and 8.8 (free). Signs of  values, whether negative or positive, are symbolized by green or red circles respectively. B. As in A, but guest=1, host=3. C. As in A, but in 0.1 M NaOD/D 2 O instead of pD 7.0 buffer, and where guest=6, host=2. D. As in C, but guest=6, host=3. E. As in A, but guest=1, host=4. F. As in A, but guest=1, host=5. G. As in A, but guest=1, host=10. H. As in C, but guest=6, host=4. I. As in C, but guest=6, host=5. J. As in C, but guest=6, host=10. K. As in C, but guest=6, host=5 and at 60 o C rather than the usual 27 o C. L. As in K, but guest=6, host=10. The last two cases were conducted because some signals which were broad at 27 o C were sharpened under these conditions. Fig. S8a. A. 2-D ROESY spectrum of a mixture of guest 1 and host 2. Conditions are as given in the caption to Figure 2. Peak assignments are also to be found in Figure 2. B. As in A, but for host 3 instead of 2. C. As in A, but for host 4 instead of 2. Peak assignments are to be found in Figure 6. D. As in C, but for host 5 instead of 4. E. As in C, but for host 10 instead of 4. 1.6x10 -6 , 2.5x10 -6 , 3.16x10 -6 , 4x10 -6 , 5x10 -6 , 6.3x10 -6 , 8x10 -6 , 1x10 -5 , 1.26x10 -5 , 1.6x10 -5 , 2x10 -5 , 2.5x10 -5 , 5x10 -5 , 1x10 -4 , 2x10 -4 and 1x10 -3 M). The corresponding spectra of the hypothetical mixtures are also shown, where the individual spectra of 6 and the host 5 are summed. The small but significant differences in absorbance at 262 nm between actual mixture and the arithmetic sum of host and guest's absorbances can be analysed to yield logβ=5.0. D. As in C, but with 10 instead of 5, yielding logβ=5.3. Table 1. Intensity(a.u.) Fig. 2A. B. As in A, but with 5x10 -3 M 1 + 5x10 -3 M 2, so that the D value of 1 in the presence of 2 is produced. C. As in A, but with 5x10 -3 M 1 + 5x10 -3 M 3, so that the D value of 1 in the presence of 3 is produced. This value is markedly smaller than that found in A. Red data points in inset allows calculation of D values in the Ru(III) form for each case.
The evolution of an area centralis and visual streak in the marsupial Setonix brachyurus. The distribution, morphology, size, and number of cells in the retinal ganglion layer of the marsupial Setonix brachyurus, "quokka," was studied from 25 days postnatal to adulthood using Nissl-stained wholemounts. The total cell population was evenly distributed up to 50 days, but by 75 days highest densities were generally observed in a broad band extending across the nasotemporal axis. At 87 days, a temporally situated area centralis was seen for the first time. This was embedded in a horizontally aligned visual streak, the nasal arm of which contained areas of high density. By 106 days, densities in the area centralis had stabilized while peripheral values were higher than adult levels even at 180 days. In the adult, the area centralis was surrounded by a weak visual streak. Retinal area increased steadily during development to reach 168 mm2 at 180 days, the adult range being 225-250 mm2. All cells in the ganglion layer appeared undifferentiated and rounded at 33 days with soma diameters of 3-6 micrometers; by 70 days diameters had increased to 4-12 micrometers and some cells had axon hillocks containing Nissl substance. From 87 days we distinguished ganglion cells, which constituted 54-63% of the total. These were identified by deeply stained Nissl substance and had diameters of 7-18 micrometers, compared to 7-23 micrometers at 143 days and 7-24 micrometers in the adult; the remaining cells, termed glia/interneurons, were 5-8 micrometers throughout. Only ganglion cells were organized into an area centralis and visual streak. Glia/interneurons were evenly distributed except at the extreme periphery, where their density increased. In sectioned material, the ganglion layer was distinct from 25 days while the neuroblastic layer separated only between 48 and 85 days. From 25 to 250 days the total number of cells in the ganglion layer remained similar to the adult range of 336,000-393,000. At both 87 days and in adults optic axon counts fell between 180,000 and 224,000, close to ganglionic cell estimates. At 25 and 34 days, respectively, optic axon numbers were 75,000 and 172,000. Myelination was absent at 25 and 34 days, 3% at 87 days, and almost 100% in adults. Mechanisms are discussed whereby the area centralis and visual streak may evolve from an even distribution of cells while their number remains constant; migration is considered likely to be important.
<filename>zend/sapi.cpp<gh_stars>1000+ /** * Sapi.cpp * * This file holds the implementation for the Php::sapi_name() function * * @author <NAME> <<EMAIL>> */ /** * Dependencies */ #include "includes.h" /** * Open PHP namespace */ namespace Php { /** * Retrieve the sapi name we're running on * @return const char* */ const char *sapi_name() { return sapi_module.name; } /** * End of namespace */ }
<reponame>ChinX/huawei-apm package api import ( "bufio" "fmt" "github.com/apache/thrift/lib/go/thrift" "github.com/go-chassis/huawei-apm/pkg/fifo" "github.com/go-chassis/huawei-apm/thrift/gen-go/apm" "github.com/go-mesh/openlogging" "github.com/openzipkin-contrib/zipkin-go-opentracing/thrift/gen-go/zipkincore" "sync" ) var once sync.Once var defaultService APM //every API should be called after a certain interval type APM interface { //ReportDiscoveryInfo send info to APM ReportDiscoveryInfo(info *apm.TDiscoveryInfo) error //one time per 5 min ReportKPI(message []*apm.TKpiMessage) error //one time per 1 min, source service can be several, so it is a slice ReportTracing(span []*zipkincore.Span) error //one time per 1 min } type DefaultAPM struct { writer *bufio.Writer } func GetAPMClient(app, serviceName string) (APM, error) { var err error once.Do(func() { defaultAPM := &DefaultAPM{} defaultAPM.writer, err = fifo.NewWriter(app, serviceName) if err != nil { openlogging.Error("can not create writer:" + err.Error()) } defaultService = defaultAPM }) return defaultService, err } func (da *DefaultAPM) ReportDiscoveryInfo(info *apm.TDiscoveryInfo) error { t := thrift.NewTMemoryBuffer() p := thrift.NewTBinaryProtocolTransport(t) if err := info.Write(p); err != nil { openlogging.Error("can not serialize discovery: " + err.Error()) return err } n, err := da.writer.Write(t.Buffer.Bytes()) if err != nil { openlogging.Error("can not report discovery: " + err.Error()) return err } err = da.writer.Flush() if err != nil { openlogging.Error("can not flush discovery: " + err.Error()) return err } openlogging.Debug(fmt.Sprintf("write inventory size %d to fifo", n)) return nil } func (da *DefaultAPM) ReportKPI(messages []*apm.TKpiMessage) error { for _, m := range messages { t := thrift.NewTMemoryBuffer() p := thrift.NewTBinaryProtocolTransport(t) if err := m.Write(p); err != nil { openlogging.Error("can not serialize kpi: " + err.Error()) return err } openlogging.Debug(t.String()) _, err := da.writer.Write(t.Buffer.Bytes()) if err != nil { openlogging.Error("can not report kpi: " + err.Error()) return err } err = da.writer.Flush() if err != nil { openlogging.Error("can not flush kpi: " + err.Error()) return err } } return nil } func (da *DefaultAPM) ReportTracing(spans []*zipkincore.Span) error { t := thrift.NewTMemoryBuffer() p := thrift.NewTBinaryProtocolTransport(t) if err := p.WriteListBegin(thrift.STRUCT, len(spans)); err != nil { openlogging.Error("can not serialize spans: " + err.Error()) return err } for _, s := range spans { if err := s.Write(p); err != nil { openlogging.Error("can not serialize spans: " + err.Error()) return err } } if err := p.WriteListEnd(); err != nil { openlogging.Error("can not serialize spans: " + err.Error()) return err } openlogging.Debug(t.String()) _, err := da.writer.Write(t.Buffer.Bytes()) if err != nil { openlogging.Error("can not report spans: " + err.Error()) return err } err = da.writer.Flush() if err != nil { openlogging.Error("can not flush spans: " + err.Error()) return err } return nil }
<reponame>JTarball/pwa-experiment import { html, css } from "lit"; import { customElement, state } from "lit/decorators.js"; import "@vaadin/button"; import { utility } from "@vaadin/vaadin-lumo-styles/utility"; import { badge } from "@vaadin/vaadin-lumo-styles/badge.js"; import { spacing } from "@vaadin/vaadin-lumo-styles/utilities/spacing.js"; import { PageElement } from "../../helpers/page-element.js"; import { themeStyles } from "../../themes/yld0-theme/styles.js"; import { verifyUser } from "../../auth/auth.js"; import "../../components/molecules/yld0-simple-message-box/yld0-simple-message-box"; @customElement("page-verify-user") export class PageVerifyUser extends PageElement { @state() private loaded: Boolean = false; @state() private success: Boolean = false; @state() private title: string = ""; @state() private subtitle: string = ""; @state() private help: string = "Please contact <EMAIL> if you're experiencing issues verifying your account."; @state() private img: string = ""; static styles = [ badge, utility, spacing, themeStyles, // Table styling and a few extras css` section { margin-top: 200px; margin-right: auto; margin-left: auto; padding: 1rem; padding-top: 1rem; } vaadin-button { background-color: var(--lumo-shade-10pct); } `, ]; private firstUpdated() { this.verifyToken(); } private async verifyToken() { // Attempt to verify token const urlSearchParams = new URLSearchParams(this.location?.search); const params = Object.fromEntries(urlSearchParams.entries()); const token = params.token || ""; if (token) { const response = await verifyUser(token); this.loaded = true; if (response?.ok) { this.title = "Good to go!"; this.subtitle = "User is now verified, please click below to login in."; this.img = "/images/user-verified-tick-green.png"; // svg not working at the moment } else { const data = await response?.json(); const error_detail = JSON.stringify(data); if (response == undefined) { this.success = false; this.title = "Something is broken"; this.subtitle = "We seem to be experiencing some internal issues, hopefully we will solve them soon."; this.img = "/images/server-error.svg"; this.help = "Please contact <EMAIL> if this doesn't get resolved."; } else if (response?.status == 400) { switch (data.detail) { case "VERIFY_USER_BAD_TOKEN": this.img = "/images/error.svg"; this.title = "Urm, Bad Token!"; this.subtitle = `The token is invalid for verifying your user.`; break; case "VERIFY_USER_ALREADY_VERIFIED": this.img = "/images/user-verified-tick-grey.svg"; this.title = "User is already verified!"; this.subtitle = `The user is already verified, you can login.`; break; default: this.subtitle = error_detail; } } } } } render() { return html` <section> <yld0-simple-message-box ?loaded=${this.loaded} boxImg="${this.img}" boxTitle="${this.title}" boxSubtitle="${this.subtitle}" help="${this.help}"> ${this.success ? html`<vaadin-button theme="secondary success">login</vaadin-button>` : html``} </yld0-simple-message-box> </section> `; } }
/** * Tests that the <i>CheckIfInLane</i> method checks * that the boat is within the left lane boundary. */ @Test public void testCheckIfInLaneLeftBoundaryLimit(){ lane = new Lane(0,100); boat = new Boat(game, 10, lane, "testBoat"); boat.setXPosition(-4); Assertions.assertTrue(boat.CheckIfInLane()); }
<reponame>michaelhollman/csce451-osp<gh_stars>0 #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <errno.h> #include <time.h> #include <signal.h> #include "scheduler.h" #include "worker.h" /* * define the extern global variables here. */ sem_t queue_sem; /* semaphore for scheduler queue */ thread_info_list sched_queue; /* list of current workers */ static int quit = 0; static timer_t timer; static thread_info_t *currentThread= 0; static long wait_times; static long run_times; static int completed = 0; static int thread_count = 0; static void exit_error(int); /* helper function. */ static void wait_for_queue(); /* * Update the worker's current running time. * This function is called every time the thread is suspended. */ void update_run_time(thread_info_t *info) { // pretty straight forware update of timer clock_gettime(CLOCK_REALTIME, &info->suspend_time); info->run_time += time_difference(&info->suspend_time, &info->resume_time); } /* * Update the worker's current waiting time. * This function is called every time the thread resumes. */ void update_wait_time(thread_info_t *info) { // pretty straight forward tmer update clock_gettime(CLOCK_REALTIME, &info->resume_time); info->wait_time += time_difference(&info->resume_time, &info->suspend_time); } static void init_sched_queue(int queue_size) { /* set up a semaphore to restrict access to the queue */ sem_init(&queue_sem, 0, queue_size); /* initialize the scheduler queue */ sched_queue.head = sched_queue.tail = 0; pthread_mutex_init(&sched_queue.lock, NULL); /* initialize the timer */ struct sigevent sigev; sigev.sigev_notify = SIGEV_SIGNAL; sigev.sigev_signo = SIGALRM; sigev.sigev_value.sival_ptr = &timer; if (timer_create(CLOCK_REALTIME, &sigev, &timer) == -1) { perror("error initializing timer (timer_create)"); } } /* * signal a worker thread that it can resume. */ static void resume_worker(thread_info_t *info) { printf("Scheduler: resuming %lu.\n", info->thrid); /* * signal the worker thread that it can resume */ if (pthread_kill(info->thrid, SIGUSR2) != 0) { perror("Error resuming worker thread (resume_worker)"); } /* update the wait time for the thread */ update_wait_time(info); } /*send a signal to the thread, telling it to kill itself*/ void cancel_worker(thread_info_t *info) { /* send a signal to the thread, telling it to kill itself*/ if (pthread_kill(info->thrid, SIGTERM) != 0) { perror("Error killing worker thread. (cancel_worker)"); } /* Update global wait and run time info */ wait_times += info->wait_time; run_times += info->run_time; completed++; /* Update schedule queue */ leave_scheduler_queue(info); if (completed >= thread_count) { sched_yield(); /* Let other threads terminate. */ printf("The total wait time is %f seconds.\n", (float)wait_times / 1000000); printf("The total run time is %f seconds.\n", (float)run_times / 1000000); printf("The average wait time is %f seconds.\n", (float)wait_times / 1000000 / thread_count); printf("The average run time is %f seconds.\n", (float)run_times / 1000000 / thread_count); } } /* * signals a worker thread that it should suspend. */ static void suspend_worker(thread_info_t *info) { // int whatgoeshere = 0; printf("Scheduler: suspending %lu.\n", info->thrid); /*update the run time for the thread*/ update_run_time(info); /* Update quanta remaining. */ info->quanta = info->quanta - 1; /* decide whether to cancel or suspend thread */ if(info->quanta > 0) { /* * Thread still running: suspend. * Signal the worker thread that it should suspend. */ struct sigaction usr1_action; usr1_action.sa_flags = SA_SIGINFO; usr1_action.sa_sigaction = suspend_thread; sigemptyset(&usr1_action.sa_mask); sigaction(SIGUSR1, &usr1_action, NULL); /* Update Schedule queue */ list_remove(&sched_queue,info->le); list_insert_tail(&sched_queue,info->le); } else { /* Thread done: cancel */ cancel_worker(info); } } /* * this is the scheduling algorithm * pick the next worker thread from the available list * you may need to add new information to the thread_info struct */ static thread_info_t *next_worker() { if (completed >= thread_count) return 0; wait_for_queue(); printf("Scheduler: scheduling.\n"); /* return the thread_info_t for the next thread to run */ return sched_queue.head->info; } void timer_handler() { thread_info_t *info = 0; /* once the last worker has been removed, we're done. */ if (list_size(&sched_queue) == 0) { quit = 1; return; } /*suspend the current worker*/ if (currentThread) suspend_worker(currentThread); //resume the next worker info = next_worker(); /* Update currentThread */ currentThread = info; if (info) resume_worker(info); else quit = 1; } /* * Set up the signal handlers for SIGALRM, SIGUSR1, and SIGTERM. */ void setup_sig_handlers() { /* Setup timer handler for SIGALRM signal in scheduler */ struct sigaction sigalrm_action; sigalrm_action.sa_flags = SA_SIGINFO; sigalrm_action.sa_sigaction = timer_handler; sigemptyset(&sigalrm_action.sa_mask); sigaction(SIGALRM, &sigalrm_action, NULL); /* Setup cancel handler for SIGTERM signal in workers */ struct sigaction sigterm_action; sigterm_action.sa_flags = SA_SIGINFO; sigterm_action.sa_sigaction = cancel_thread; sigemptyset(&sigterm_action.sa_mask); sigaction(SIGTERM, &sigterm_action, NULL); /* Setup suspend handler for SIGUSR1 signal in workers */ struct sigaction sigusr_action; sigusr_action.sa_flags = SA_SIGINFO; sigusr_action.sa_sigaction = suspend_thread; sigemptyset(&sigusr_action.sa_mask); sigaction(SIGUSR1, &sigusr_action, NULL); } /* * waits until there are workers in the scheduling queue. */ static void wait_for_queue() { while(!list_size(&sched_queue)) { printf("Scheduler: waiting for workers.\n"); sched_yield(); } } /* * runs at the end of the program just before exit. */ static void clean_up() { /* * destroy any mutexes/condition variables/semaphores that were created. * free any malloc'd memory not already free'd */ sem_destroy(&queue_sem); pthread_mutex_destroy(&sched_queue.lock); } /* * prints the program help message. */ static void print_help(const char *progname) { printf("usage: %s <num_threads> <queue_size> <i_1, i_2 ... i_numofthreads>\n", progname); printf("\tnum_threads: the number of worker threads to run\n"); printf("\tqueue_size: the number of threads that can be in the scheduler at one time\n"); printf("\ti_1, i_2 ...i_numofthreads: the number of quanta each worker thread runs\n"); } /* * prints an error summary and exits. */ static void exit_error(int err_num) { fprintf(stderr, "failure: %s\n", strerror(err_num)); exit(1); } /* * creates the worker threads. */ static void create_workers(int thread_count, int *quanta) { int i = 0; int err = 0; for (i = 0; i < thread_count; i++) { thread_info_t *info = (thread_info_t *) malloc(sizeof(thread_info_t)); info->quanta = quanta[i]; if ((err = pthread_create(&info->thrid, NULL, start_worker, (void *)info)) != 0) { exit_error(err); } printf("Main: detaching worker thread %lu.\n", info->thrid); pthread_detach(info->thrid); /* initialize the time variables for each thread for performance evalution*/ clock_gettime(CLOCK_REALTIME, &info->suspend_time); clock_gettime(CLOCK_REALTIME, &info->resume_time); } } /* * runs the scheduler. */ static void *scheduler_run(void *unused) { wait_for_queue(); /* start the timer */ struct itimerspec timer_s; timer_s.it_value.tv_sec = QUANTUM; timer_s.it_interval.tv_sec = QUANTUM; timer_s.it_value.tv_nsec = 0; timer_s.it_interval.tv_nsec = 0; if (timer_settime(timer, 0, &timer_s, NULL) == -1) { perror("error starting the timer (scheduler_run)"); } /*keep the scheduler thread alive*/ while( !quit ) sched_yield(); return NULL; } /* * starts the scheduler. * returns 0 on success or exits program on failure. */ static int start_scheduler(pthread_t *thrid) { int err = 0; if ((err = pthread_create(thrid, NULL, scheduler_run, 0)) != 0) { exit_error(err); } return err; } /* * reads the command line arguments and starts the scheduler & worker threads. */ int smp5_main(int argc, const char** argv) { int queue_size = 0; int ret_val = 0; int *quanta,i; pthread_t sched_thread; /* check the arguments. */ if (argc < 3) { print_help(argv[0]); exit(0); } thread_count = atoi(argv[1]); queue_size = atoi(argv[2]); quanta = (int*)malloc(sizeof(int)*thread_count); if (argc != 3 + thread_count) { print_help(argv[0]); exit(0); } for ( i = 0; i < thread_count; i++) quanta[i] = atoi(argv[i+3]); printf("Main: running %d workers with queue size %d for quanta:\n", thread_count, queue_size); for ( i = 0; i < thread_count; i++) printf(" %d", quanta[i]); printf("\n"); /*setup the sig handlers for scheduler and workers*/ setup_sig_handlers(); /* initialize anything that needs to be done for the scheduler queue. */ init_sched_queue(queue_size); /* creates a thread for the scheduler. */ start_scheduler(&sched_thread); /* creates the worker threads and returns. */ create_workers(thread_count, quanta); /* wait for scheduler to finish */ printf("Main: waiting for scheduler %lu.\n", sched_thread); pthread_join(sched_thread, (void **) &ret_val); /* clean up our resources */ clean_up(); /* this will wait for all other threads */ pthread_exit(0); } long time_difference(const struct timespec *time1, const struct timespec *time2) { return (time1->tv_sec - time2->tv_sec) * 1000000 + (time1->tv_nsec - time2->tv_nsec) / 1000; }
<reponame>Ray-Keiyaku/medum package path import ( "fmt" "medum/text" "os" "path/filepath" "github.com/mitchellh/go-homedir" ) //GetPath return the location of ".medum" folder func GetPath() string { path, err := homedir.Dir() if err != nil { fmt.Printf(text.HomedirError, err) os.Exit(1) } return filepath.Join(path, ".medum") } //GetConfigPath get the location of config file func GetConfigPath() string { return filepath.Join(GetPath(), "config.json") } // GetDataPath get the location of data func GetDataPath() string { return filepath.Join(GetPath(), "data.db") }
x=input() m=int(input()) a1=[int(i)-1 for i in input().split()] ll=len(x) ll+=ll%2 u=(ll//2)*[0] for i in range(m): u[a1[i]]+=1 for i in range(1,ll//2): u[i]=u[i]+u[i-1] p1='' p2='' #print(u) for i in range(len(x)//2): if u[i]%2==0: p1+=x[i] p2=x[-i-1]+p2 else: p2=x[i]+p2 p1+=x[-i-1] #print(p1,p2) if len(x)%2==1: print(p1+x[len(x)//2]+p2) else: print(p1+p2)
def rapidtide_workflow( in_file: str, prefix: str, venousrefine: bool = False, nirs: bool = False, realtr: str = "auto", antialias: bool = True, invertregressor: bool = False, interptype: str = "univariate", offsettime: Optional[float] = None, butterorder: Optional[int] = None, arbvec: Optional[Tuple[float, float]] = None, filtertype="arb", numestreps: int = 10000, dosighistfit: bool = True, windowfunc: str = "hamming", gausssigma: float = 0.0, useglobalref: bool = False, meanscaleglobal: bool = False, slicetimes: Optional[str] = None, preprocskip: int = 0, nothresh: bool = True, oversampfactor: int = 2, regressorfile: Optional[str] = None, inputfreq: float = 1.0, inputstarttime: float = 0.0, corrweighting: str = "none", dodetrend: bool = True, corrmaskthreshpct: float = 1.0, corrmaskname: Optional[str] = None, fixeddelayvalue: Optional[float] = None, lag_extrema: Tuple[float, float] = (-30.0, 30.0), widthlimit: float = 100.0, bipolar: bool = False, zerooutbadfit: bool = True, findmaxtype: str = "gauss", despeckle_passes: int = 0, despeckle_thresh: int = 5, refineprenorm: str = "mean", refineweighting: str = "R2", passes: int = 1, includemaskname: Optional[str] = None, excludemaskname: Optional[str] = None, lagminthresh: float = 0.5, lagmaxthresh: float = 5.0, ampthresh: float = 0.3, sigmathresh: float = 100.0, refineoffset: bool = False, psdfilter: bool = False, lagmaskside: str = "both", refinetype: str = "avg", savelagregressors: bool = True, savecorrtimes: bool = False, histlen: int = 100, timerange: Tuple[int, int] = (-1, 10000000), glmsourcefile: Optional[str] = None, doglmfilt: bool = True, preservefiltering: bool = False, showprogressbar: bool = True, dodeconv: bool = False, internalprecision: str = "double", isgrayordinate: bool = False, fakerun: bool = False, displayplots: bool = False, nonumba: bool = False, sharedmem: bool = True, memprofile: bool = False, nprocs: int = 1, debug: bool = False, cleanrefined: bool = False, dodispersioncalc: bool = False, fix_autocorrelation: bool = False, tmaskname: Optional[str] = None, doprewhiten: bool = False, saveprewhiten: bool = False, armodelorder: int = 1, offsettime_total: Optional[float] = None, ampthreshfromsig: bool = False, nohistzero: bool = False, fixdelay: bool = False, usebutterworthfilter: bool = False, ): pass
package Assignments; import dbUtil.dbConnection; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; public class AssignmentPageModel { /** * Responsible for direct interaction with database for Assignment page */ Connection connection; public AssignmentPageModel(){ try { this.connection = dbConnection.getConnection(); } catch (SQLException ex) { ex.printStackTrace(); } if (this.connection == null) { System.exit(1); } } /** * Get result set for grade data from the database * @param bundleid * @return * @throws SQLException */ public ResultSet getGradeData(String bundleid) throws SQLException{ return this.connection.createStatement().executeQuery("SELECT * FROM '" + bundleid + "'"); } }
/** * Unit test for alliances. * * @author MKL. */ @RunWith(BlockJUnit4ClassRunner.class) public class AllianceTest { @Test public void testFusionAlliances() { List<Alliance> alliances = new ArrayList<>(); Alliance.fusion(alliances); Assert.assertEquals(0, alliances.size()); PlayableCountryEntity autriche = new PlayableCountryEntity(); autriche.setName("autriche"); PlayableCountryEntity baviere = new PlayableCountryEntity(); baviere.setName("baviere"); PlayableCountryEntity crimee = new PlayableCountryEntity(); crimee.setName("crimee"); PlayableCountryEntity danemark = new PlayableCountryEntity(); danemark.setName("danemark"); PlayableCountryEntity egypte = new PlayableCountryEntity(); egypte.setName("egypte"); PlayableCountryEntity france = new PlayableCountryEntity(); france.setName("france"); List<PlayableCountryEntity> countries = new ArrayList<>(); countries.add(autriche); countries.add(baviere); Alliance alliance = new Alliance(countries, 5); alliances.add(alliance); countries = new ArrayList<>(); countries.add(crimee); countries.add(danemark); alliance = new Alliance(countries, 8); alliances.add(alliance); Alliance.fusion(alliances); Assert.assertEquals(2, alliances.size()); Assert.assertEquals(5, alliances.get(0).getInitiative()); Assert.assertEquals(2, alliances.get(0).getCountries().size()); Assert.assertEquals(8, alliances.get(1).getInitiative()); Assert.assertEquals(2, alliances.get(1).getCountries().size()); countries = new ArrayList<>(); countries.add(danemark); countries.add(autriche); alliance = new Alliance(countries, 8); alliances.add(alliance); Alliance.fusion(alliances); Assert.assertEquals(1, alliances.size()); Assert.assertEquals(5, alliances.get(0).getInitiative()); Assert.assertEquals(4, alliances.get(0).getCountries().size()); alliances.clear(); countries = new ArrayList<>(); countries.add(autriche); countries.add(baviere); alliance = new Alliance(countries, 5); alliances.add(alliance); countries = new ArrayList<>(); countries.add(crimee); countries.add(danemark); alliance = new Alliance(countries, 8); alliances.add(alliance); countries = new ArrayList<>(); countries.add(egypte); countries.add(france); alliance = new Alliance(countries, 11); alliances.add(alliance); countries = new ArrayList<>(); countries.add(autriche); countries.add(france); alliance = new Alliance(countries, 14); alliances.add(alliance); countries = new ArrayList<>(); countries.add(egypte); countries.add(danemark); alliance = new Alliance(countries, 17); alliances.add(alliance); Alliance.fusion(alliances); Assert.assertEquals(1, alliances.size()); Assert.assertEquals(5, alliances.get(0).getInitiative()); Assert.assertEquals(6, alliances.get(0).getCountries().size()); } }
<gh_stars>0 /* eslint-disable prettier/prettier */ import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common'; import { InjectRepository } from '@nestjs/typeorm'; import { Repository } from 'typeorm'; import { User } from './entities'; @Injectable() export class UserService { constructor( @InjectRepository(User) private readonly userRepository: Repository<User> ) {} async getMany() { return await this.userRepository.find() } async getOne(id) { const user = await this.userRepository.findOne(id) if (!user) { throw new NotFoundException('User does not exists') } return user } async createOne(dto) { const userExist = await this.userRepository.findOne({login: dto.login}) if (userExist) { throw new BadRequestException('User already registered with login') } const newUser = this.userRepository.create(dto) const user: any = await this.userRepository.save(newUser) delete user.senha; return user } async editOne(id, dto) { const user = await this.userRepository.findOne(id) if (!user) { throw new NotFoundException('User does not exist!'); } const editedUser = Object.assign(user, dto); return await this.userRepository.save(editedUser); } async delitetOne(id) { const user = await this.getOne(id) return await this.userRepository.remove(user) } }
Teaching and Learning of E-Commerce at the Hong Kong Polytechnic University: From a Business Education Prospective This paper gives an overview of the work done at the Hong Kong Polytechnic University’s (PolyU’s) E-Commerce Laboratory on teaching and learning platforms for electronic commerce (EC), ranging from an E-Mall shopping platform to an agent-based EC Trading Platform. The platforms are designed and developed to encourage the nontextbook learning of EC concepts through active learning in role-playing games. We describe the overall design and development of some EC platforms used for undergraduate business and management students at PolyU. Our experiences indicate that students like the practical components of the course and are interested in a nontextbook approach to learning by playing games. We believe that other business colleges with EC curricula will benefit from this approach.
After allegedly getting into a brief exchange about the inherent misogyny in the term "friend zone," Tony Daniel appears to have deleted his Twitter account. Source: Tumblr via Outhouse Editor GLX Hi there! Stoneman, head of Outhouse S&P here. Earlier today, Action Comics artist Tony Daniel, according to some screenshots that The Outhouse is unable to confirm the veracity of, got into an exchange on Twitter "over the term "friend zone" and whether or not it is sexist. Afterward, he apparently quit Twitter, or at least made it inaccessible to all but approved followers. Twitter's error message claims the page "doesn't exist." Read the Tumblr account here. Here's an excerpt: Tony Daniel, writer of such classics [insert sarcasm font] Battle for the Cowl is currently writing Action Comics. I will not comment on the quality of that book because it’s inconsequential, but lets just say it ain’t on a my pull list. In his most recent issue he has Clark say to Jimmy that he’s been “friend zoned”. If you’re following me you don’t need to be told why the idea of friend zoning is sexist, but after seeing a post on here about how someone confronted him about it I went to see if he’d apologized. He had not. So I decided that I would add my voice to what I hope will be others. Let’s just say, he did not take kindly to it. Now before I keep posting I want to make a point of where I was trying to say here. It’s something that Smooth did in his post but really Bomani Jones said once on Hoop Speak. No one has racism or sexism repellent. It’s something we all grow up with and it becomes a part of us. My goal here was to say that the thing he did was sexist, not that he was sexist. Spoiler alert, he did not take it that way. It should be noted, for those who think I'm being unfair, that Daniel's side of the story cannot be represented since he deleted or hid his Twitter account. I did verify that the account used to be active, and appears to be Daniel's account, and that it is now unreachable. Also, his gallery of Facebook header images contained the image at the top of the article, objectifying Wonder Woman's butt. The Tumblr blog where the story comes from, "leaguerulesfrownupon," describes its purpose as "Comics and Sports from a guy working for ESPN/NBA." I have no idea how to figure out Tumblr, but from what I can tell, the blog seems to have many posts with hundreds of notes, and the Twitter account has over 25,000 tweets. The whole thing is strange, and I'm not sure what to think of it, so I'm just reporting what I know, and leaving it to you, long suffering Outhouse readers, to decide for yourselves. We're going to put out a request for comment tonight, but, you know, DC creators aren't allowed to talk to us.
Guest post by Ruth Bonnett In late 2010, I wrote about how bad science can give rise to bad policy. My Christmas message to farmers in Australia and around the world: I am an urban dweller who just happens to think that the ‘unsettled science’ and restriction of technology has brought about bad policy by way of the Kyoto Protocol and subsequent disastrous consequences for our farmers, and anyone else who values sound science, property rights and our democratic freedoms Little did I know that our Prime Minister, Julia Gillard, who promised that ‘there would be no carbon tax under the government I lead‘ would now break her sacred trust with the Australian public on 24th February 2011 and seek to burden all Australians with a tax on carbon dioxide emissions. To inflict this harm, our Prime Ministers has resorted to several unconscionable tactics including characterising the producers of this nations as ‘big polluters’, launching personal attacks on critics, and on ordinary Australians who protest against this ‘carbon’ tax, and by bringing out stars such as Cate Blanchett and Michael Caton to somehow convince us ordinary folk that this ‘carbon’ tax will somehow reduce the temperature of the planet, make grey skies blue and improve the health of our children. Never mind that Canada has just handed Conservative Stephen Harper a comfortable majority – the only party to go to the Canadian people without a headline ‘climate change’ policy. Never mind that Canada, Japan and Russia have all indicated that they would not join a second round of carbon (dioxide) cuts under the Kyoto Protocol at United Nations talks this year, with the USA reiterating that it would remain outside the treaty. Never mind that unilateral action by Australians would put us in a vulnerable trading position and severely impact our economy. And never mind the bald faced lies used to sell this bad tax, based on unsettled science, fear mongering and outright hysterical claims. Recent polls have shown that fully two thirds of Australians want an election before any carbon (dioxide) tax is introduced. Indeed, say YES. Say YES to an election. Advertisements Share this: Print Email Twitter Facebook Pinterest LinkedIn Reddit
Towards cognitive BCI: Neural correlates of sustained attention in a continuous performance task Development of brain-computer interfaces interacting with cognitive functions is a hot topic in neural engineering since it may lead to innovative and powerful diagnosis, rehabilitation, and training methods. This paper addresses the problem of measuring sustained visual attention using electroencephalography and presents an experiment inspired by continuous performance tasks used in neuropsychology along with the classification results obtained when trying to discriminate between low and high attention states. Following a leave-one-subject-out validation approach, 76% accuracy was obtained when discriminating thirty second epochs and 69% accuracy using five second epochs.
def manage_schedulable(self, freerun_entry: FreerunProcessEntry, flow_request=None): uow = None if freerun_entry.related_unit_of_work: uow = self.uow_dao.get_one(freerun_entry.related_unit_of_work) try: if uow is None: self._process_state_embryo(freerun_entry, flow_request) elif uow.is_requested or uow.is_in_progress: self._process_state_in_progress(freerun_entry, uow) elif uow.is_finished or uow.is_invalid: self._process_terminal_state(freerun_entry, uow, flow_request) else: msg = f'Unknown state {uow.state} of the UOW {uow.db_id}' self._log_message(ERROR, freerun_entry, msg) except LookupError as e: msg = f'Lookup issue for schedulable: {freerun_entry.db_id} in timeperiod {uow.timeperiod}, because of: {e}' self._log_message(WARNING, freerun_entry, msg)
<reponame>tiankafei/java<gh_stars>1-10 package org.tiankafei.ui.chart.line; import java.awt.Color; import java.util.List; import org.jfree.chart.ChartUtilities; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.plot.XYPlot; import org.jfree.chart.renderer.xy.XYSplineRenderer; import org.jfree.data.xy.XYSeries; import org.jfree.data.xy.XYSeriesCollection; import org.jfree.ui.RectangleInsets; import org.tiankafei.ui.chart.dto.TiankafeiAxisDTO; import org.tiankafei.ui.chart.dto.TiankafeiAxixPointDTO; import org.tiankafei.ui.chart.dto.TiankafeiLineCharDTO; import org.tiankafei.ui.chart.modelsui.TkfChartPanel; import org.tiankafei.ui.chart.modelsui.TkfDemoPanel; import org.tiankafei.ui.chart.modelsui.TkfFreeChart; /** * 自定义线形面板 * * @author 甜咖啡 */ public class TiankafeiLineCharPanel extends TkfDemoPanel { private static final long serialVersionUID = 6476341102778047459L; /** * 自定义线形参数对象 */ private TiankafeiLineCharDTO tiankafeiLineCharVO; /** * 构造自定义线形面板对象 */ public TiankafeiLineCharPanel() { tiankafeiLineCharVO = new TiankafeiLineCharDTO(); } /** * 初始化自定义线形面板 */ public void initTiankafeiLineCharPanel() { XYSeriesCollection xySeriesCollection = new XYSeriesCollection(); List<TiankafeiAxisDTO> tiankafeiAxisList = tiankafeiLineCharVO.getTiankafeiAxisList(); for (int index = 0, length = tiankafeiAxisList.size(); index < length; index++) { TiankafeiAxisDTO tiankafeiAxisDTO = tiankafeiAxisList.get(index); XYSeries xySeries = createXySeries(tiankafeiAxisDTO); xySeriesCollection.addSeries(xySeries); } NumberAxis xNumberAxis = new NumberAxis(tiankafeiLineCharVO.getxLabel()); xNumberAxis.setAutoRangeIncludesZero(false); NumberAxis yNumberAxis = new NumberAxis(tiankafeiLineCharVO.getyLabel()); yNumberAxis.setAutoRangeIncludesZero(false); XYPlot xyPlot = new XYPlot(xySeriesCollection, xNumberAxis, yNumberAxis, new XYSplineRenderer()); xyPlot.setBackgroundPaint(Color.lightGray); xyPlot.setDomainGridlinePaint(Color.white); xyPlot.setRangeGridlinePaint(Color.white); xyPlot.setAxisOffset(new RectangleInsets(4.0D, 4.0D, 4.0D, 4.0D)); TkfFreeChart tkfFreeChart = new TkfFreeChart("XYSplineRenderer", TkfFreeChart.DEFAULT_TITLE_FONT, xyPlot, true); addFreeChart(tkfFreeChart); ChartUtilities.applyCurrentTheme(tkfFreeChart); TkfChartPanel tkfChartPanel = new TkfChartPanel(tkfFreeChart); add(tkfChartPanel); } /** * 创建轴线 * * @param tiankafeiAxisDTO 图形的轴对象 * @return 轴线 */ private XYSeries createXySeries(TiankafeiAxisDTO tiankafeiAxisDTO) { XYSeries xySeries = new XYSeries(tiankafeiAxisDTO.getTitle()); List<TiankafeiAxixPointDTO> tiankafeiAxixPointList = tiankafeiAxisDTO.getTiankafeiAxixPointList(); for (int index = 0, length = tiankafeiAxixPointList.size(); index < length; index++) { TiankafeiAxixPointDTO tiankafeiAxixPointDTO = tiankafeiAxixPointList.get(index); xySeries.add(tiankafeiAxixPointDTO.getxPointValue(), tiankafeiAxixPointDTO.getyPointValue()); } return xySeries; } /** * 获取自定义线形参数对象 * * @return 自定义线形参数对象 */ public TiankafeiLineCharDTO getTiankafeiLineCharVO() { return tiankafeiLineCharVO; } /** * 设置自定义线形参数对象 * * @param tiankafeiLineCharVO 自定义线形参数对象 */ public void setTiankafeiLineCharVO(TiankafeiLineCharDTO tiankafeiLineCharVO) { this.tiankafeiLineCharVO = tiankafeiLineCharVO; } }
<reponame>noecl1/multihammer package Model; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import javafx.collections.FXCollections; import javafx.collections.ObservableList; public class ProductosModel { private dbConnection con; public ArrayList<Producto> getProductos() { ArrayList<Producto> productos = new ArrayList<>(); con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("SELECT id_producto, material, unidad, precio, cantidad, razon_social, productos.existencia as estado FROM `materiales` INNER JOIN `productos` ON `productos`.`id_material` = `materiales`.`id_material` INNER JOIN proveedores ON productos.`id_proveedor` = proveedores.`id_proveedor` ORDER BY material ASC"); rs = ps.executeQuery(); while(rs.next()) { Producto x = new Producto(rs.getString("material"), rs.getString("unidad"), rs.getString("precio"), rs.getString("razon_social"), rs.getString("cantidad"),rs.getString("id_producto"), rs.getString("estado")); productos.add(x); } } catch (SQLException ex) { System.out.println(ex); } con.disconnect(); return productos; } public String agregarProducto(String material, String descripcion, String unidad, String precio, String proveedor, String cantidad) { String message = "Producto y material agregado exitosamente"; int id_material; con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("INSERT INTO materiales (material, descripcion) VALUES (?,?)", Statement.RETURN_GENERATED_KEYS); ps.setString(1, material); ps.setString(2, descripcion); ps.executeUpdate(); rs = ps.getGeneratedKeys(); rs.next(); id_material = rs.getInt(1); ps = reg.prepareStatement("SELECT * from productos WHERE id_material = ? AND unidad = ?"); ps.setInt(1, id_material); ps.setString(2, unidad); rs = ps.executeQuery(); if(!rs.next()) { ps = reg.prepareStatement("INSERT INTO productos (id_material, unidad, precio, id_proveedor, cantidad) VALUES (?,?,?,(SELECT proveedores.`id_proveedor` FROM proveedores WHERE proveedores.`razon_social` = ?),?)"); ps.setInt(1, id_material); ps.setString(2, unidad); ps.setString(3, precio); ps.setString(4, proveedor); ps.setString(5, cantidad); ps.executeUpdate(); } else { message = "Un material no puede tener más de una vez la misma unidad"; } } catch (SQLException ex) { System.out.println(ex); message = "El nombre del material no se puede repetir"; } con.disconnect(); return message; } public String agregarUnidad(ComboBoxClass material, String unidad, String precio, String proveedor, String cantidad) { String message = "Unidad agregada exitosamente"; int id_material; con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("SELECT * from productos WHERE id_material = ? AND unidad = ?"); ps.setString(1, material.getId()); ps.setString(2, unidad); rs = ps.executeQuery(); if(!rs.next()) { ps = reg.prepareStatement("INSERT INTO productos (id_material, unidad, precio, id_proveedor, cantidad) VALUES (?,?,?,(SELECT proveedores.`id_proveedor` FROM proveedores WHERE proveedores.`razon_social` = ?),?)"); ps.setString(1, material.getId()); ps.setString(2, unidad); ps.setString(3, precio); ps.setString(4, proveedor); ps.setString(5, cantidad); ps.executeUpdate(); } else { message = "Un material no puede tener más de una vez la misma unidad"; } } catch (SQLException ex) { System.out.println(ex); } con.disconnect(); return message; } public String modificarProducto(String material, String descripcion, String unidad, String precio, String proveedor, String cantidad, String id_producto) { String message = "Producto y material modificado exitosamente"; int id_material; con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("UPDATE materiales SET material = ?, descripcion = ? WHERE id_material = (SELECT id_material FROM productos WHERE id_producto = ?)"); ps.setString(1, material); ps.setString(2, descripcion); ps.setString(3, id_producto); ps.executeUpdate(); ps = reg.prepareStatement("SELECT * from productos WHERE id_material = (SELECT id_material FROM productos WHERE id_producto = ?) AND unidad = ?"); ps.setString(1, id_producto); ps.setString(2, unidad); rs = ps.executeQuery(); // Almacena confirmación de que existe un registro con la misma unidad boolean aux = rs.next(); if(!aux || rs.getString("id_producto").equals(id_producto)) { ps = reg.prepareStatement("UPDATE productos SET unidad = ?, precio = ?, id_proveedor = (SELECT id_proveedor FROM proveedores WHERE razon_social = ?), cantidad = ? WHERE id_producto = ?"); ps.setString(1, unidad); ps.setString(2, precio); ps.setString(3, proveedor); ps.setString(4, cantidad); ps.setString(5, id_producto); ps.executeUpdate(); } else { message = "Un material no puede tener más de una vez la misma unidad"; } } catch (SQLException ex) { System.out.println(ex); message = "El nombre del material no se puede repetir"; } con.disconnect(); return message; } public ObservableList<String> getProveedores() { ArrayList<String> proveedores = new ArrayList<>(); ObservableList<String> prov = FXCollections.observableList(proveedores); con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("SELECT razon_social from proveedores"); rs = ps.executeQuery(); while(rs.next()) { proveedores.add(rs.getString("razon_social")); } } catch (SQLException ex) { System.out.println(ex); } con.disconnect(); return prov; } public String getDescripcion(String id_producto) { con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; String descripcion = ""; try { ps = reg.prepareStatement("SELECT descripcion from materiales WHERE id_material = (SELECT id_material from productos WHERE id_producto = ?)"); ps.setString(1, id_producto); rs = ps.executeQuery(); if(rs.next()) { descripcion = rs.getString("descripcion"); } } catch (SQLException ex) { System.out.println(ex); } con.disconnect(); return descripcion; } public ObservableList<ComboBoxClass> getMateriales() { ArrayList<ComboBoxClass> mat = new ArrayList<>(); ObservableList<ComboBoxClass> materiales = FXCollections.observableList(mat); ComboBoxClass cbc; con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("SELECT id_material, material from materiales"); rs = ps.executeQuery(); while(rs.next()) { cbc = new ComboBoxClass(rs.getString("id_material"), rs.getString("material")); mat.add(cbc); } } catch (SQLException ex) { System.out.println(ex); } con.disconnect(); return materiales; } public void cambiarEstado(String id_producto) { con = new dbConnection(); Connection reg = con.getConnection(); PreparedStatement ps; ResultSet rs; try { ps = reg.prepareStatement("UPDATE productos SET existencia = (SELECT IF(existencia = '1', '0', '1')) WHERE productos.`id_producto` = ?"); ps.setString(1, id_producto); ps.executeUpdate(); } catch (SQLException ex) { System.out.println(ex); } con.disconnect(); } }
// RegisterAlias registers a new alias. // See also Alias. // // If an alias already exists, RegisterAlias calls panic(). func (p *Program[E, P, F, R]) RegisterAlias(alias Alias) { if p.aliases == nil { p.aliases = make(map[string]Alias) } name := alias.Name if _, ok := p.aliases[name]; ok { panic("RegisterAlias(): Alias already registered") } p.aliases[name] = alias }
Characterization of Subpopulations of Chicken Mononuclear Phagocytes That Express TIM4 and CSF1R The phosphatidylserine receptor TIM4, encoded by TIMD4, mediates the phagocytic uptake of apoptotic cells. We applied anti-chicken TIM4 mAbs in combination with CSF1R reporter transgenes to dissect the function of TIM4 in the chick (Gallus gallus). During development in ovo, TIM4 was present on the large majority of macrophages, but expression became more heterogeneous posthatch. Blood monocytes expressed KUL01, class II MHC, and CSF1R-mApple uniformly. Around 50% of monocytes were positive for surface TIM4. They also expressed many other monocyte-specific transcripts at a higher level than TIM4− monocytes. In liver, highly phagocytic TIM4hi cells shared many transcripts with mammalian Kupffer cells and were associated with uptake of apoptotic cells. Although they expressed CSF1R mRNA, Kupffer cells did not express the CSF1R-mApple transgene, suggesting that additional CSF1R transcriptional regulatory elements are required by these cells. By contrast, CSF1R-mApple was detected in liver TIM4lo and TIM4− cells, which were not phagocytic and were more abundant than Kupffer cells. These cells expressed CSF1R alongside high levels of FLT3, MHCII, XCR1, and other markers associated with conventional dendritic cells in mice. In bursa, TIM4 was present on the cell surface of two populations. Like Kupffer cells, bursal TIM4hi phagocytes coexpressed many receptors involved in apoptotic cell recognition. TIM4lo cells appear to be a subpopulation of bursal B cells. In overview, TIM4 is associated with phagocytes that eliminate apoptotic cells in the chick. In the liver, TIM4 and CSF1R reporters distinguished Kupffer cells from an abundant population of dendritic cell–like cells.
//find largest binary search tree given binary tree using BFS traversal class Solution { public: bool isBST(TreeNode* root, int &res, int &lower, int &upper){ if(!root)return true; int left_size=0,right_size=0; int left_lower,left_upper,right_lower,right_upper; bool left=isBST(root->left,left_size,left_lower,left_upper); bool right=isBST(root->right,right_size,right_lower,right_upper); if(left&&right){ if((!root->left||root->val>left_upper)&&(!root->right||root->val<right_lower)){ res=left_size+right_size+1; lower=root->left?left_lower:root->val; upper=root->right?right_upper:root->val; return true; } } res=max(left_size,right_size); return false; } int largestBSTSubtree(TreeNode* root) { int res=0,lower=INT_MIN,upper=INT_MAX; isBST(root,res,lower,upper); return res; } }
<gh_stars>1-10 //React import { useContext } from 'react'; //Externals import SettingsContext from '../contexts/SettingsContext'; import type { SettingsContextValue } from '../contexts/SettingsContext'; const useSettings = (): SettingsContextValue => useContext(SettingsContext); export default useSettings;
Comment on Alkanani et al. Alterations in Intestinal Microbiota Correlate With Susceptibility to Type 1 Diabetes. Diabetes 2015;64:3510–3520 The search for potential microorganisms associated with autoimmunity and their metabolic role is crucial to understand the origin and evolution of type 1 diabetes (T1D) and other autoimmune diseases. An interesting article by Alkanani et al. (1) published in Diabetes described the microbiota structure of American patients with new-onset T1D, comparing them with subjects with islet autoimmunity, seronegative first-degree relatives, and a healthy control group without family history of autoimmunity. One of their main findings was an increase in …
<filename>cmd/unregister.go<gh_stars>1-10 package cmd import ( "strings" "github.com/bwmarrin/discordgo" ) // Unregister allows a queue user to remove themselves from the queue func Unregister(cmdInfo CommandInfo) { // cmdInfo.CmdOps[1:] starts after ;unregister if len(cmdInfo.CmdOps) != 3 { // Error - not enough arguments msg := cmdInfo.createMsgEmbed( "Error: Wrong Arguments", errThumbURL, "Try checking your syntax.", errColor, format( createFields("EXAMPLE", cmdInfo.Prefix+"unregister event 1234", false), )) cmdInfo.Ses.ChannelMessageSendEmbed(cmdInfo.BotChID, msg) return } args := cmdInfo.CmdOps[1:] switch strings.ToLower(args[0]) { case "event": cmdInfo.removeFromEvent(args[1], cmdInfo.Msg.Author) case "trade": cmdInfo.removeFromTrade(args[1], cmdInfo.Msg.Author) } } // helper func which removes queue users from an event queue func (c CommandInfo) removeFromEvent(eventID string, user *discordgo.User) { if !c.Service.Event.EventExists(eventID) { // event does not exist msg := c.createMsgEmbed( "Error: Event does not exist", errThumbURL, "Event ID: "+eventID, errColor, format( createFields("Suggestion", "Try checking if you supplied the correct Event ID", false), )) c.Ses.ChannelMessageSendEmbed(c.BotChID, msg) return } // remove user c.Service.Event.Remove(eventID, user) // Remove tracking on user c.Service.User.RemoveQueue(eventID, user) // successfully removed user msg := c.createMsgEmbed( "Removed from Event", checkThumbURL, "Queue ID: "+c.CmdOps[2], successColor, format( createFields("User", user.Mention(), true), createFields("Suggestion", "Feel free to queue for any other events or create your own.", false), )) c.Ses.ChannelMessageSendEmbed(c.BotChID, msg) } // helper func to remove user's offer from trade event func (c CommandInfo) removeFromTrade(tradeID string, user *discordgo.User) { if !c.Service.Trade.Exists(tradeID) { // trade does not exist msg := c.createMsgEmbed( "Error: Trade does not exist", errThumbURL, "Trade ID: "+tradeID, errColor, format( createFields("Suggestion", "Try checking if you supplied the correct Trade ID", false), )) c.Ses.ChannelMessageSendEmbed(c.BotChID, msg) return } offer := c.Service.Trade.GetOffer(tradeID, user.ID) // remove user c.Service.Trade.Remove(tradeID, user) // remove tracking on user c.Service.User.RemoveOffer(tradeID, user) // successfully removed user msg := c.createMsgEmbed( "User Withdrew From Trade", checkThumbURL, "Trade ID: "+tradeID, successColor, format( createFields("User", user.Mention(), true), createFields("Offer", offer, true), createFields("Suggestion", "Feel free to offer for any other trades or create your own.", false), )) c.Ses.ChannelMessageSendEmbed(c.BotChID, msg) }
/** * This function will copy memory content from source address to destination * address. * * @param dst the address of destination memory * @param src the address of source memory * @param count the copied length * * @return the address of destination memory */ void *elog_memcpy(void *dst, const void *src, size_t count) { char *tmp = (char *) dst, *s = (char *) src; assert(dst); assert(src); while (count--) *tmp++ = *s++; return dst; }
<filename>app/test/fixtures/chat/fakeChatId.ts export const fakeChatId = '3ece1b67-8e42-442c-8d38-cc150ad328af';
<filename>modules/kafka_scanner.py<gh_stars>0 import json from confluent_kafka import Consumer, KafkaError from modules import settings, User class KafkaScanner: """ Class representing a kafka scanner. This polls for new messages on the kafka bus and upon receiving them handles the user. """ ad = None consumer = None def __init__(self, ad=None): """ Initialise an instance of the KafkaScanner. Params: - ad - The Active Directory Connector to be user. """ self.ad = ad self.consumer = Consumer(settings.KAFKA_SETTINGS) self.consumer.subscribe(['ENTITY_RISK_LEVEL']) print('Kafka Scanner initialised.') def scan_for_message(self): """ Method for running a kafka bus scan. Polls for new messages and upon receiving one, if the user risk level is 4/5 it handles the user. """ while True: msg = self.consumer.poll(0.1) if msg is None: continue elif not msg.error(): message = json.loads(msg.value().decode('utf8')) print('Received message: {0}'.format(message)) if message['risk_level'] >= 4: user = User(message['user_id'].replace(' ', '.')) user.handle() elif msg.error().code() == KafkaError._PARTITION_EOF: print('End of partition reached {0}/{1}' .format(msg.topic(), msg.partition())) else: print('Error occured: {0}'.format(msg.error().str()))
def delete(self, t): node = self.find(t) deleted = self.root.delete() self.reroot() return deleted
<reponame>laws-africa/constitution-app-react<gh_stars>0 import * as Constitution from '../assets/data/constitution.json'; import { TableOfContents } from './toc'; export const constitutionData: any = (Constitution as any).default; // wrap the entire content in a div so the body has a single child export const constitutionRoot = new DOMParser().parseFromString( "<div>" + constitutionData.body + "</div>", 'text/html'); export const constitutionBody = constitutionRoot.body.firstElementChild; export const toc = new TableOfContents(Constitution.toc);
// // CImapAccount::GetAutoExpungeSetting() // // Returns the auto expunge setting for this personality and, if the setting is for a percent, // returns the percent in iPercent. // int CImapAccount::GetAutoExpungeSetting(int *piPercent) { CString key; TCHAR szValue[32]; key = g_Personalities.GetIniKeyName(IDS_INI_IMAP_AUTO_EXPUNGE_NEVER); g_Personalities.GetProfileString(m_strPersona, key, "", szValue, sizeof(szValue)); if (szValue[0] == '1') { return IDS_INI_IMAP_AUTO_EXPUNGE_NEVER; } key = g_Personalities.GetIniKeyName(IDS_INI_IMAP_AUTO_EXPUNGE_ALWAYS); g_Personalities.GetProfileString(m_strPersona, key, "", szValue, sizeof(szValue)); if (szValue[0] == '1') { return IDS_INI_IMAP_AUTO_EXPUNGE_ALWAYS; } key = g_Personalities.GetIniKeyName(IDS_INI_IMAP_AUTO_EXPUNGE_ON_PCT); g_Personalities.GetProfileString(m_strPersona, key, "", szValue, sizeof(szValue)); if (szValue[0] == '1') { key = g_Personalities.GetIniKeyName(IDS_INI_IMAP_AUTO_EXPUNGE_PCT); g_Personalities.GetProfileString(m_strPersona, key, "", szValue, sizeof(szValue)); *piPercent = atoi(szValue); return IDS_INI_IMAP_AUTO_EXPUNGE_ON_PCT; } return -1; }
import { Controller, UseGuards, Request, Post, Get, Req, Body } from '@nestjs/common'; import { ApiTags, ApiResponse, ApiParam } from '@nestjs/swagger'; import { AuthService } from './auth.service'; import { LocalAuthGuard } from './guards/local.guard'; import { AuthRegisterDto } from './dto/auth.register.dto'; import { AuthLoginDto } from './dto/auth.login.dto'; @ApiTags('auth') @Controller('auth') export class AuthController { constructor(private authService: AuthService) {} @UseGuards(LocalAuthGuard) @ApiResponse({ status: 201, description: 'User was authenticated correctly' }) @ApiResponse({ status: 401, description: 'User authentication failed' }) @ApiParam({ name: 'username', description: 'User unique username', type: 'string' }) @ApiParam({ name: 'password', description: 'User password in plaintext', type: 'string' }) @Post('login') async login(@Body() dto: AuthLoginDto) { return this.authService.login(dto); } @ApiResponse({ status: 201, description: 'User was created correctly' }) @ApiResponse({ status: 401, description: 'Cannot create user' }) @ApiParam({ name: 'username', description: 'User unique nickname', type: 'string' }) @ApiParam({ name: 'password', description: 'User password in plaintext', type: 'string' }) @Post('register') async register(@Body() dto: AuthRegisterDto) { return this.authService.register(dto); } }
def ajax_message(request, payload, flavor='message'): ajax_continue(request, AjaxMessage(payload, flavor, None))
Dynamic Photoelectrochemical Device with Open-Circuit Potential Insensitive to Thermodynamic Voltage Loss. The open-circuit potential ( Voc) represents the maximum thermodynamic potential in a device, and achieving a high Voc is crucial for self-biased photoelectrochemical (PEC) devices that use only solar energy to produce chemical energy. In general, Voc is limited by the photovoltage ( Vph), which is a potential difference generated by light-induced thermodynamic processes at semiconductor photoelectrodes, such as the generation and recombination of charge carriers. Therefore, low light intensity and nanostructured semiconductor materials degrade Vph (and Voc) by inefficient carrier generation and by enhancing recombination loss, respectively. Here, we report that Voc in dynamic PEC devices employing a porous NiO x/Si photocathode is insensitive to thermodynamic losses, which was clarified by varying the carrier generation and recombination rates. The Voc values were observed to be unchanged even under a low light intensity of 0.1 sun, as well as for different morphologies such as nanostructured and polycrystalline Si. These findings shed light on the potential merit of dynamically operated PEC systems.
def force_models_consistency(self): for d in self._datasets: for m in self._models: if ( m.datasets_names is None or d.name in m.datasets_names ) and m not in d.models: d.models.append(m)
If you’ve been to my blog before, you’ve probably heard me mention the name Yoshinori Kanada. I’m not going to write another biography on the guy, but I will say he died in 2009 as a legend to the anime industry and its fans. His charismatic approach to his work as animator broke down many barriers and showed that animators could stand-out and express their own styles in their work. You can read a good overview of this guy here (I do recommend it). But I’m going to focus on just one of his many achievements: his immortal fire dragon from the move Harmaggedon (anime adaptation of the manga Genma Taisen). Perhaps not the most iconic product of his career, it is probably his most remarkable from an animation point of view, and certainly a milestone in the history of anime. The fire dragon carried on the spirit of his stand-out sequence from Adieu Galaxy Express 999 (1981), which depicted a ghost formed from liquid and smoke. That ghost already impressed audiences and animators, but the style of effects animation would be pushed to a new level just a couple of years later in Harmaggedon (1983). The fire dragon, the climax of the film, perfected a thrilling new form of effects animation, which combined a sense of stylism and abstraction with an organic approach to motion. The dragon moves as a visual cacophony of wildly undulating lines and swirling, churning, leaping geometries which depicts a body of fire in a very natural and enthralling way. It’s an achievement in animation, the magic of which probably won’t be captured again elsewhere. The abstraction is to do with the use of a few colors, and a lack of shading which simplistically but beautifully captures an image in a 2-dimensional space. Kanada’s Adieu Galaxy Express ghost and Harmaggedon fire dragon featured in Takashi Murakami’s Superflat, where he compared it to the style seen in traditional Japanese wood paintings by Katsushika Hokusai. I’m far from being an expert on art, but what I like about it is how it elegantly represents reality as forms of overlapping color. While I don’t think the glory of this dragon can be replicated, it is a tribute to its persevering influence among animators that it is often paid homage to in their works. This video contains a collection of homages and similar effects dragons (and also the original!). I thought it might be worth having a look at a few of these (and I would love if someone else can help me identify the ones I don’t know). Appropriately, the very first homage is undoubtedly the work or idea of Imaishi Hiroyuki. I say appropriately because, as you probably all know, Imaishi is a devout follower of Kanada’s style and someone with a great deal of respect for him. He has adopted, and exaggerated further, Kanada’s extreme perspectives, crazy character poses, and heavy usage of effects animation. Gurren Lagann is a massive throwback, with love, to the super robot genre that Kanada was such a pivotal influence upon. But before Gurren Lagann, Imaishi got his Kanada on when he was episode director/storyboarder/animation director for the crazy GAINAX comedy Abenobashi, which is where this clip comes from. It was a riotous episode, and Imaishi got some great animators on board to play with his brand of Kanada (Keisuke Watabe, You Yoshinari (and Kou Yoshinari), Sushio, Tokoyuki Matsutake). The episode felt like the precursor to Gurren Lagann. Actually, Imaishi worked on a more subtle reference to the fire dragon in episode 7 of the just-finished Black Rock Shooter. I sadly haven’t seen the episode yet, but a friend of mine pointed it out to me. Imaishi Hiroyuki storyboarded and directed the other-world scenes in most episodes of BRS. In episode 7 it was alongside one of his main animators on Gurren Lagann, and probably someone who he has influenced/mentored a lot himself: Akira Amemiya. One of them had the cheek to sneak it in there! But going back to the video. The dragon that bursts from the cooking pot at 00.48 is the animation work of Seiya Numata (working on 2×2=Shinobuden), another big Kanada fan. Although his style isn’t so directly reminiscent of Kanada’s as Imaishi’s, Kanada’s rebellious and experimental spirit has definitely been picked up by him. he has a big impact on an anime when he’s involved, and always leaves a footprint. Check out this article on Ani no Miyako for more on this guy. But funnily enough, he too appears to have worked in a fire dragon reference into his new season of Milky Holmes. Being character designer, he is heavily involved in that show, and often in a more behind-the-scenes capacity. He was animation director on episode 7, which means he was especially involved in this episode. Whether he animated it uncredited or not, there’s a good chance it was his idea! I haven’t actually seen the other anime in this video, so if anyone wants to enlighten us as to their origins, that would be great! A friend identified the clip with multiple dragons in space at ~3.10 as being from X (and an earlier clip with a pure-red dragon attacking a guy in a ball). It would be cool to know the story behind these ones. Actually, I could keep this post linked in the sidebar and updated whenever we see another fire dragon pop up in anime or can unearth one of these older ones! Please contribute or just share your thoughts!
import math def func(): n=int(input()) a=list(map(int,list(input()))) x=a[:n] y=a[n:] x.sort() y.sort() a=x+y if sum(x)>sum(y): i=0 j=n elif sum(x)==sum(y): return("NO") else: i = n j = 0 while(i<2*n and j<2*n): if(a[i]>a[j]): pass else: return "NO" i+=1 j+=1 return "YES" print(func())
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.io.DataOutput; import java.io.IOException; import java.security.MessageDigest; import java.util.Iterator; import java.util.List; import com.google.common.base.Predicates; import com.google.common.collect.Iterators; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.*; import org.apache.cassandra.db.columniterator.ICountableColumnIterator; import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.index.SecondaryIndexManager; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.io.sstable.ColumnStats; import org.apache.cassandra.io.sstable.SSTable; import org.apache.cassandra.io.util.DataOutputBuffer; import org.apache.cassandra.utils.MergeIterator; import org.apache.cassandra.utils.StreamingHistogram; /** * LazilyCompactedRow only computes the row bloom filter and column index in memory * (at construction time); it does this by reading one column at a time from each * of the rows being compacted, and merging them as it does so. So the most we have * in memory at a time is the bloom filter, the index, and one column from each * pre-compaction row. * * When write() or update() is called, a second pass is made over the pre-compaction * rows to write the merged columns or update the hash, again with at most one column * from each row deserialized at a time. */ public class LazilyCompactedRow extends AbstractCompactedRow implements Iterable<OnDiskAtom> { private static Logger logger = LoggerFactory.getLogger(LazilyCompactedRow.class); private final List<? extends ICountableColumnIterator> rows; private final CompactionController controller; private final boolean shouldPurge; private ColumnFamily emptyColumnFamily; private Reducer reducer; private final ColumnStats columnStats; private long columnSerializedSize; private boolean closed; private ColumnIndex.Builder indexBuilder; private ColumnIndex columnsIndex; private final SecondaryIndexManager.Updater indexer; public LazilyCompactedRow(CompactionController controller, List<? extends ICountableColumnIterator> rows) { super(rows.get(0).getKey()); this.rows = rows; this.controller = controller; indexer = controller.cfs.indexManager.updaterFor(key); long maxDelTimestamp = Long.MIN_VALUE; for (OnDiskAtomIterator row : rows) { ColumnFamily cf = row.getColumnFamily(); maxDelTimestamp = Math.max(maxDelTimestamp, cf.deletionInfo().maxTimestamp()); if (emptyColumnFamily == null) emptyColumnFamily = cf; else emptyColumnFamily.delete(cf); } this.shouldPurge = controller.shouldPurge(key, maxDelTimestamp); try { indexAndWrite(null); } catch (IOException e) { throw new RuntimeException(e); } // reach into the reducer used during iteration to get column count, size, max column timestamp // (however, if there are zero columns, iterator() will not be called by ColumnIndexer and reducer will be null) columnStats = new ColumnStats(reducer == null ? 0 : reducer.columns, reducer == null ? Long.MAX_VALUE : reducer.minTimestampSeen, reducer == null ? maxDelTimestamp : Math.max(maxDelTimestamp, reducer.maxTimestampSeen), reducer == null ? new StreamingHistogram(SSTable.TOMBSTONE_HISTOGRAM_BIN_SIZE) : reducer.tombstones ); columnSerializedSize = reducer == null ? 0 : reducer.serializedSize; reducer = null; } private void indexAndWrite(DataOutput out) throws IOException { this.indexBuilder = new ColumnIndex.Builder(emptyColumnFamily, key.key, out); this.columnsIndex = indexBuilder.build(this); } public long write(DataOutput out) throws IOException { assert !closed; DataOutputBuffer clockOut = new DataOutputBuffer(); DeletionTime.serializer.serialize(emptyColumnFamily.deletionInfo().getTopLevelDeletion(), clockOut); long dataSize = clockOut.getLength() + columnSerializedSize + this.indexBuilder.getOpenedMarkerSize(); if (logger.isDebugEnabled()) logger.debug(String.format("clock / column sizes are %s / %s", clockOut.getLength(), columnSerializedSize)); assert dataSize > 0; out.writeLong(dataSize); out.write(clockOut.getData(), 0, clockOut.getLength()); out.writeInt(indexBuilder.writtenAtomCount()); // We rebuild the column index uselessly, but we need to do that because range tombstone markers depend // on indexing. If we're able to remove the two-phase compaction, we'll avoid that. indexAndWrite(out); long secondPassColumnSize = reducer == null ? 0 : reducer.serializedSize; assert secondPassColumnSize == columnSerializedSize : "originally calculated column size of " + columnSerializedSize + " but now it is " + secondPassColumnSize; close(); return dataSize; } public void update(MessageDigest digest) { assert !closed; // no special-case for rows.size == 1, we're actually skipping some bytes here so just // blindly updating everything wouldn't be correct DataOutputBuffer out = new DataOutputBuffer(); try { DeletionTime.serializer.serialize(emptyColumnFamily.deletionInfo().getTopLevelDeletion(), out); out.writeInt(columnStats.columnCount); digest.update(out.getData(), 0, out.getLength()); } catch (IOException e) { throw new AssertionError(e); } Iterator<OnDiskAtom> iter = iterator(); while (iter.hasNext()) { iter.next().updateDigest(digest); } close(); } public boolean isEmpty() { // need to clone emptyColumnFamily to avoid resetting the deletion time. See CASSANDRA-7808. boolean cfIrrelevant = shouldPurge ? ColumnFamilyStore.removeDeletedCF(emptyColumnFamily.cloneMeShallow(), controller.gcBefore) == null : !emptyColumnFamily.isMarkedForDelete(); // tombstones are relevant return cfIrrelevant && columnStats.columnCount == 0; } public int getEstimatedColumnCount() { int n = 0; for (ICountableColumnIterator row : rows) n += row.getColumnCount(); return n; } public AbstractType<?> getComparator() { return emptyColumnFamily.getComparator(); } public Iterator<OnDiskAtom> iterator() { for (ICountableColumnIterator row : rows) row.reset(); reducer = new Reducer(); Iterator<OnDiskAtom> iter = MergeIterator.get(rows, getComparator().onDiskAtomComparator, reducer); return Iterators.filter(iter, Predicates.notNull()); } public ColumnStats columnStats() { return columnStats; } public void close() { for (OnDiskAtomIterator row : rows) { try { row.close(); } catch (IOException e) { throw new RuntimeException(e); } } closed = true; } public DeletionInfo deletionInfo() { return emptyColumnFamily.deletionInfo(); } /** * @return the column index for this row. */ public ColumnIndex index() { return columnsIndex; } private class Reducer extends MergeIterator.Reducer<OnDiskAtom, OnDiskAtom> { // all columns reduced together will have the same name, so there will only be one column // in the container; we just want to leverage the conflict resolution code from CF ColumnFamily container = emptyColumnFamily.cloneMeShallow(); // tombstone reference; will be reconciled w/ column during getReduced RangeTombstone tombstone; long serializedSize = 4; // int for column count int columns = 0; long minTimestampSeen = Long.MAX_VALUE; long maxTimestampSeen = Long.MIN_VALUE; StreamingHistogram tombstones = new StreamingHistogram(SSTable.TOMBSTONE_HISTOGRAM_BIN_SIZE); public void reduce(OnDiskAtom current) { if (current instanceof RangeTombstone) { tombstone = (RangeTombstone)current; } else { IColumn column = (IColumn) current; container.addColumn(column); if (indexer != SecondaryIndexManager.nullUpdater && !column.isMarkedForDelete() && !container.getColumn(column.name()).equals(column)) { indexer.remove(column); } } } protected OnDiskAtom getReduced() { if (tombstone != null) { RangeTombstone t = tombstone; tombstone = null; if (shouldPurge && t.data.isGcAble(controller.gcBefore)) { indexBuilder.tombstoneTracker().update(t, true); return null; } else { serializedSize += t.serializedSizeForSSTable(); return t; } } else { ColumnFamily purged = PrecompactedRow.removeDeletedAndOldShards(key, shouldPurge, controller, container); if (purged == null || !purged.iterator().hasNext()) { // don't call clear() because that resets the deletion time. See CASSANDRA-7808. container = emptyColumnFamily.cloneMeShallow(); return null; } IColumn reduced = purged.iterator().next(); container = emptyColumnFamily.cloneMeShallow(); // PrecompactedRow.removeDeletedAndOldShards have only checked the top-level CF deletion times, // not the range tombstone. For that we use the columnIndexer tombstone tracker. if (indexBuilder.tombstoneTracker().isDeleted(reduced)) return null; serializedSize += reduced.serializedSizeForSSTable(); columns++; minTimestampSeen = Math.min(minTimestampSeen, reduced.minTimestamp()); maxTimestampSeen = Math.max(maxTimestampSeen, reduced.maxTimestamp()); int deletionTime = reduced.getLocalDeletionTime(); if (deletionTime < Integer.MAX_VALUE) { tombstones.update(deletionTime); } return reduced; } } } }
import * as React from 'react' export const MagnifyingGlassIcon = React.memo(() => ( <svg width="55" height="55" fill="none" xmlns="http://www.w3.org/2000/svg"> <path d="M21.5 24.644L25.517 27l-1.066-4.44L28 19.573l-4.674-.392L21.5 15l-1.826 4.181-4.674.392 3.543 2.987-1.06 4.44 4.017-2.356z" fill="#F96216" /> <path d="M21.481 6.783A14.697 14.697 0 0136.18 21.481c0 3.64-1.334 6.987-3.528 9.565l.61.61h1.787l11.306 11.306-3.392 3.392-11.306-11.306v-1.786l-.61-.61a14.734 14.734 0 01-9.565 3.527 14.697 14.697 0 110-29.396zm0 4.523A10.133 10.133 0 0011.306 21.48 10.133 10.133 0 0021.48 31.656a10.133 10.133 0 0010.175-10.175 10.133 10.133 0 00-10.175-10.175z" fill="#A6B6D9" /> </svg> ))
FROM THE EDITOR ” AS A WAY FOR STRATEGIC COMMUNICATION : A STUDY OF THE IMAGE OF A JOURNAL Journals provide an open area for transferring and creating knowledge which can be treated as social activities mediated by text. Where researchers submit their studies, however, it depends upon their perception of a journal’s image as to which one they send it to. Although journal ranking is an index of a journal’s image as perceived by the readers, a journal can be pro-active and create the desired image through the use of ” Letter from the Editor” messages. This study tries to understand how editors produce this image of credibility, and how they promote their idea to the community by writing these letters. In our study, we collecedt sixty “Letter from the Editor” letters from MISQ, ISR and CACM, and applied a metadiscourse analysis to examine the linguistic forces. Our findings suggest that the editors of these journals adopt different strategies to establish or modify their image by adopting different linguistic devices in their letters to their readers, based on the existing image of their journal.
/** * Extension of lazy result slot that adds player access when possible */ public class PlayerSensitiveLazyResultSlot extends LazyResultSlot { private final PlayerEntity player; public PlayerSensitiveLazyResultSlot(PlayerEntity player, LazyResultInventory inventory, int xPosition, int yPosition) { super(inventory, xPosition, yPosition); this.player = player; } @Override public ItemStack getStack() { return this.inventory.getResult(player); } @Override public ItemStack decrStackSize(int amount) { ItemStack result = getStack().copy(); if (!result.isEmpty()) { this.amountCrafted += Math.min(amount, result.getCount()); } return result; } }
<filename>V4 - HybridTTS/Sentence_Encoder/query_encoder.py import tensorflow_text import tensorflow_hub as hub import tensorflow as tf import sys sys.path.append("../") # nopep8 import numpy as np import math import pickle import os from Classifier.model.dialogue_acts import Encoder import Utils.functions as utils from sentence_transformers import SentenceTransformer def encode(texts, contexts): texts = [utils.simple_preprocess(text) for text in texts] contexts = [utils.simple_preprocess(context) for context in contexts] encoder_model = SentenceTransformer( 'sentence-transformers/all-mpnet-base-v2') question_embeddings = encoder_model.encode(texts) context_embeddings = encoder_model.encode(contexts) return np.concatenate([np.asarray(question_embeddings), np.asarray(context_embeddings)], axis=-1)
/** * LogicalPredicate is a compound predicate in which * predicates are combined by "AND" and "OR". */ public class LogicalPredicate extends Predicate implements Serializable { private static final long serialVersionUID = 1L; private Predicate left, right; private Op op; public LogicalPredicate(Predicate left, Op op, Predicate right) { this.left = left; this.op = op; this.right = right; } /** Constants used for return codes in Field.compare */ public enum Op implements Serializable { AND, OR } /** * Filter tuple based on compound predicate * @param tuple * The tuple to compare against * @return true if the compound predicate is true, false otherwise. */ @Override public boolean filter(Tuple tuple) throws SQLError { boolean c1 = left.filter(tuple); boolean c2 = right.filter(tuple); if (op == Op.AND) { return c1 && c2; } else if (op == Op.OR){ return c1 || c2; } else { throw new IllegalStateException("impossible to reach here"); } } }
<reponame>shiqitao/AutoGraph<filename>model_compare.py<gh_stars>0 import pandas as pd from sklearn.metrics import accuracy_score from ModelAPPNP import main_model_appnp from ModelAPPNP2 import main_model_appnp as main_model_appnp_2 from ModelAPPNP3 import main_model_appnp as main_model_appnp_3 from ModelAPPNP4 import main_model_appnp as main_model_appnp_4 from ModelDGI import main_model_dgi from ModelGAT import main_model_gat from ModelGAT2 import main_model_gat as main_model_gat_2 from ModelGAT3 import main_model_gat as main_model_gat_3 from ModelGAT4 import main_model_gat as main_model_gat_4 from ModelGCN import main_model_gcn from ModelGCN2 import main_model_gcn as main_model_gcn_2 from ModelGCN3 import main_model_gcn as main_model_gcn_3 from ModelGCN4 import main_model_gcn as main_model_gcn_4 from Param import Param from tools import file_path, load_data class Model: @staticmethod def train_predict(): data_set = 'c' data = load_data(file_path("{0}_AOE.data".format(data_set))) params = [ # Param("ModelAPPNP", [10, 0.15, 16, "relu"]), # Param("ModelAPPNP", [10, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP", [10, 0.15, 32, "relu"]), # Param("ModelAPPNP", [10, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP", [10, 0.15, 64, "relu"]), # Param("ModelAPPNP", [10, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP", [10, 0.15, 128, "relu"]), # Param("ModelAPPNP", [10, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP", [20, 0.15, 16, "relu"]), # Param("ModelAPPNP", [20, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP", [20, 0.15, 32, "relu"]), # Param("ModelAPPNP", [20, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP", [20, 0.15, 64, "relu"]), # Param("ModelAPPNP", [20, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP", [20, 0.15, 128, "relu"]), # Param("ModelAPPNP", [20, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP", [30, 0.15, 16, "relu"]), # Param("ModelAPPNP", [30, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP", [30, 0.15, 32, "relu"]), # Param("ModelAPPNP", [30, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP", [30, 0.15, 64, "relu"]), # Param("ModelAPPNP", [30, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP", [30, 0.15, 128, "relu"]), # Param("ModelAPPNP", [30, 0.15, 128, "leaky_relu"]), # # Param("ModelAPPNP2", [10, 0.15, 16, "relu"]), # Param("ModelAPPNP2", [10, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP2", [10, 0.15, 32, "relu"]), # Param("ModelAPPNP2", [10, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP2", [10, 0.15, 64, "relu"]), # Param("ModelAPPNP2", [10, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP2", [10, 0.15, 128, "relu"]), # Param("ModelAPPNP2", [10, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP2", [20, 0.15, 16, "relu"]), # Param("ModelAPPNP2", [20, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP2", [20, 0.15, 32, "relu"]), # Param("ModelAPPNP2", [20, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP2", [20, 0.15, 64, "relu"]), # Param("ModelAPPNP2", [20, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP2", [20, 0.15, 128, "relu"]), # Param("ModelAPPNP2", [20, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP2", [30, 0.15, 16, "relu"]), # Param("ModelAPPNP2", [30, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP2", [30, 0.15, 32, "relu"]), # Param("ModelAPPNP2", [30, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP2", [30, 0.15, 64, "relu"]), # Param("ModelAPPNP2", [30, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP2", [30, 0.15, 128, "relu"]), # Param("ModelAPPNP2", [30, 0.15, 128, "leaky_relu"]), # # Param("ModelAPPNP3", [10, 0.15, 16, "relu"]), # Param("ModelAPPNP3", [10, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP3", [10, 0.15, 32, "relu"]), # Param("ModelAPPNP3", [10, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP3", [10, 0.15, 64, "relu"]), # Param("ModelAPPNP3", [10, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP3", [10, 0.15, 128, "relu"]), # Param("ModelAPPNP3", [10, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP3", [20, 0.15, 16, "relu"]), # Param("ModelAPPNP3", [20, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP3", [20, 0.15, 32, "relu"]), # Param("ModelAPPNP3", [20, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP3", [20, 0.15, 64, "relu"]), # Param("ModelAPPNP3", [20, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP3", [20, 0.15, 128, "relu"]), # Param("ModelAPPNP3", [20, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP3", [30, 0.15, 16, "relu"]), # Param("ModelAPPNP3", [30, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP3", [30, 0.15, 32, "relu"]), # Param("ModelAPPNP3", [30, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP3", [30, 0.15, 64, "relu"]), # Param("ModelAPPNP3", [30, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP3", [30, 0.15, 128, "relu"]), # Param("ModelAPPNP3", [30, 0.15, 128, "leaky_relu"]), # # Param("ModelAPPNP4", [10, 0.15, 16, "relu"]), # Param("ModelAPPNP4", [10, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP4", [10, 0.15, 32, "relu"]), # Param("ModelAPPNP4", [10, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP4", [10, 0.15, 64, "relu"]), # Param("ModelAPPNP4", [10, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP4", [10, 0.15, 128, "relu"]), # Param("ModelAPPNP4", [10, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP4", [20, 0.15, 16, "relu"]), # Param("ModelAPPNP4", [20, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP4", [20, 0.15, 32, "relu"]), # Param("ModelAPPNP4", [20, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP4", [20, 0.15, 64, "relu"]), # Param("ModelAPPNP4", [20, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP4", [20, 0.15, 128, "relu"]), # Param("ModelAPPNP4", [20, 0.15, 128, "leaky_relu"]), # Param("ModelAPPNP4", [30, 0.15, 16, "relu"]), # Param("ModelAPPNP4", [30, 0.15, 16, "leaky_relu"]), # Param("ModelAPPNP4", [30, 0.15, 32, "relu"]), # Param("ModelAPPNP4", [30, 0.15, 32, "leaky_relu"]), # Param("ModelAPPNP4", [30, 0.15, 64, "relu"]), # Param("ModelAPPNP4", [30, 0.15, 64, "leaky_relu"]), # Param("ModelAPPNP4", [30, 0.15, 128, "relu"]), # Param("ModelAPPNP4", [30, 0.15, 128, "leaky_relu"]), # # Param("ModelGCN", [1, [16, 16], "relu"]), # Param("ModelGCN", [1, [16, 16], "leaky_relu"]), # Param("ModelGCN", [1, [32, 32], "relu"]), # Param("ModelGCN", [1, [32, 32], "leaky_relu"]), # Param("ModelGCN", [1, [64, 64], "relu"]), # Param("ModelGCN", [1, [64, 64], "leaky_relu"]), # Param("ModelGCN", [1, [128, 128], "relu"]), # Param("ModelGCN", [1, [128, 128], "leaky_relu"]), # Param("ModelGCN", [2, [16, 16, 16], "relu"]), # Param("ModelGCN", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGCN", [2, [32, 32, 32], "relu"]), # Param("ModelGCN", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGCN", [2, [64, 64, 64], "relu"]), # Param("ModelGCN", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGCN", [2, [128, 128, 128], "relu"]), # Param("ModelGCN", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGCN", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGCN", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGCN", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGCN", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGCN", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGCN", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGCN", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGCN", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGCN2", [1, [16, 16], "relu"]), # Param("ModelGCN2", [1, [16, 16], "leaky_relu"]), # Param("ModelGCN2", [1, [32, 32], "relu"]), # Param("ModelGCN2", [1, [32, 32], "leaky_relu"]), # Param("ModelGCN2", [1, [64, 64], "relu"]), # Param("ModelGCN2", [1, [64, 64], "leaky_relu"]), # Param("ModelGCN2", [1, [128, 128], "relu"]), # Param("ModelGCN2", [1, [128, 128], "leaky_relu"]), # Param("ModelGCN2", [2, [16, 16, 16], "relu"]), # Param("ModelGCN2", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGCN2", [2, [32, 32, 32], "relu"]), # Param("ModelGCN2", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGCN2", [2, [64, 64, 64], "relu"]), # Param("ModelGCN2", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGCN2", [2, [128, 128, 128], "relu"]), # Param("ModelGCN2", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGCN2", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGCN2", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGCN2", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGCN2", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGCN2", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGCN2", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGCN2", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGCN2", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGCN3", [1, [16, 16], "relu"]), # Param("ModelGCN3", [1, [16, 16], "leaky_relu"]), # Param("ModelGCN3", [1, [32, 32], "relu"]), # Param("ModelGCN3", [1, [32, 32], "leaky_relu"]), # Param("ModelGCN3", [1, [64, 64], "relu"]), # Param("ModelGCN3", [1, [64, 64], "leaky_relu"]), # Param("ModelGCN3", [1, [128, 128], "relu"]), # Param("ModelGCN3", [1, [128, 128], "leaky_relu"]), # Param("ModelGCN3", [2, [16, 16, 16], "relu"]), # Param("ModelGCN3", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGCN3", [2, [32, 32, 32], "relu"]), # Param("ModelGCN3", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGCN3", [2, [64, 64, 64], "relu"]), # Param("ModelGCN3", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGCN3", [2, [128, 128, 128], "relu"]), # Param("ModelGCN3", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGCN3", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGCN3", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGCN3", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGCN3", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGCN3", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGCN3", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGCN3", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGCN3", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGCN4", [1, [16, 16], "relu"]), # Param("ModelGCN4", [1, [16, 16], "leaky_relu"]), # Param("ModelGCN4", [1, [32, 32], "relu"]), # Param("ModelGCN4", [1, [32, 32], "leaky_relu"]), # Param("ModelGCN4", [1, [64, 64], "relu"]), # Param("ModelGCN4", [1, [64, 64], "leaky_relu"]), # Param("ModelGCN4", [1, [128, 128], "relu"]), # Param("ModelGCN4", [1, [128, 128], "leaky_relu"]), # Param("ModelGCN4", [2, [16, 16, 16], "relu"]), # Param("ModelGCN4", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGCN4", [2, [32, 32, 32], "relu"]), # Param("ModelGCN4", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGCN4", [2, [64, 64, 64], "relu"]), # Param("ModelGCN4", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGCN4", [2, [128, 128, 128], "relu"]), # Param("ModelGCN4", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGCN4", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGCN4", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGCN4", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGCN4", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGCN4", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGCN4", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGCN4", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGCN4", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGAT", [1, [16, 16], "relu"]), # Param("ModelGAT", [1, [16, 16], "leaky_relu"]), # Param("ModelGAT", [1, [32, 32], "relu"]), # Param("ModelGAT", [1, [32, 32], "leaky_relu"]), # Param("ModelGAT", [1, [64, 64], "relu"]), # Param("ModelGAT", [1, [64, 64], "leaky_relu"]), # Param("ModelGAT", [1, [128, 128], "relu"]), # Param("ModelGAT", [1, [128, 128], "leaky_relu"]), # Param("ModelGAT", [2, [16, 16, 16], "relu"]), # Param("ModelGAT", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGAT", [2, [32, 32, 32], "relu"]), # Param("ModelGAT", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGAT", [2, [64, 64, 64], "relu"]), # Param("ModelGAT", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGAT", [2, [128, 128, 128], "relu"]), # Param("ModelGAT", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGAT", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGAT", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGAT", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGAT", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGAT", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGAT", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGAT", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGAT", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGAT2", [1, [16, 16], "relu"]), # Param("ModelGAT2", [1, [16, 16], "leaky_relu"]), # Param("ModelGAT2", [1, [32, 32], "relu"]), # Param("ModelGAT2", [1, [32, 32], "leaky_relu"]), # Param("ModelGAT2", [1, [64, 64], "relu"]), # Param("ModelGAT2", [1, [64, 64], "leaky_relu"]), # Param("ModelGAT2", [1, [128, 128], "relu"]), # Param("ModelGAT2", [1, [128, 128], "leaky_relu"]), # Param("ModelGAT2", [2, [16, 16, 16], "relu"]), # Param("ModelGAT2", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGAT2", [2, [32, 32, 32], "relu"]), # Param("ModelGAT2", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGAT2", [2, [64, 64, 64], "relu"]), # Param("ModelGAT2", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGAT2", [2, [128, 128, 128], "relu"]), # Param("ModelGAT2", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGAT2", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGAT2", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGAT2", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGAT2", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGAT2", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGAT2", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGAT2", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGAT2", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGAT3", [1, [16, 16], "relu"]), # Param("ModelGAT3", [1, [16, 16], "leaky_relu"]), # Param("ModelGAT3", [1, [32, 32], "relu"]), # Param("ModelGAT3", [1, [32, 32], "leaky_relu"]), # Param("ModelGAT3", [1, [64, 64], "relu"]), # Param("ModelGAT3", [1, [64, 64], "leaky_relu"]), # Param("ModelGAT3", [1, [128, 128], "relu"]), # Param("ModelGAT3", [1, [128, 128], "leaky_relu"]), # Param("ModelGAT3", [2, [16, 16, 16], "relu"]), # Param("ModelGAT3", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGAT3", [2, [32, 32, 32], "relu"]), # Param("ModelGAT3", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGAT3", [2, [64, 64, 64], "relu"]), # Param("ModelGAT3", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGAT3", [2, [128, 128, 128], "relu"]), # Param("ModelGAT3", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGAT3", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGAT3", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGAT3", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGAT3", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGAT3", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGAT3", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGAT3", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGAT3", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelGAT4", [1, [16, 16], "relu"]), # Param("ModelGAT4", [1, [16, 16], "leaky_relu"]), # Param("ModelGAT4", [1, [32, 32], "relu"]), # Param("ModelGAT4", [1, [32, 32], "leaky_relu"]), # Param("ModelGAT4", [1, [64, 64], "relu"]), # Param("ModelGAT4", [1, [64, 64], "leaky_relu"]), # Param("ModelGAT4", [1, [128, 128], "relu"]), # Param("ModelGAT4", [1, [128, 128], "leaky_relu"]), # Param("ModelGAT4", [2, [16, 16, 16], "relu"]), # Param("ModelGAT4", [2, [16, 16, 16], "leaky_relu"]), # Param("ModelGAT4", [2, [32, 32, 32], "relu"]), # Param("ModelGAT4", [2, [32, 32, 32], "leaky_relu"]), # Param("ModelGAT4", [2, [64, 64, 64], "relu"]), # Param("ModelGAT4", [2, [64, 64, 64], "leaky_relu"]), # Param("ModelGAT4", [2, [128, 128, 128], "relu"]), # Param("ModelGAT4", [2, [128, 128, 128], "leaky_relu"]), # Param("ModelGAT4", [3, [16, 16, 16, 16], "relu"]), # Param("ModelGAT4", [3, [16, 16, 16, 16], "leaky_relu"]), # Param("ModelGAT4", [3, [32, 32, 32, 32], "relu"]), # Param("ModelGAT4", [3, [32, 32, 32, 32], "leaky_relu"]), # Param("ModelGAT4", [3, [64, 64, 64, 64], "relu"]), # Param("ModelGAT4", [3, [64, 64, 64, 64], "leaky_relu"]), # Param("ModelGAT4", [3, [128, 128, 128, 128], "relu"]), # Param("ModelGAT4", [3, [128, 128, 128, 128], "leaky_relu"]), # # Param("ModelAPPNP", [10, 0.15, 256, "relu"]), # Param("ModelAPPNP", [10, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP", [20, 0.15, 256, "relu"]), # Param("ModelAPPNP", [20, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP", [30, 0.15, 256, "relu"]), # Param("ModelAPPNP", [30, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP2", [10, 0.15, 256, "relu"]), # Param("ModelAPPNP2", [10, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP2", [20, 0.15, 256, "relu"]), # Param("ModelAPPNP2", [20, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP2", [30, 0.15, 256, "relu"]), # Param("ModelAPPNP2", [30, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP3", [10, 0.15, 256, "relu"]), # Param("ModelAPPNP3", [10, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP3", [20, 0.15, 256, "relu"]), # Param("ModelAPPNP3", [20, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP3", [30, 0.15, 256, "relu"]), # Param("ModelAPPNP3", [30, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP4", [10, 0.15, 256, "relu"]), # Param("ModelAPPNP4", [10, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP4", [20, 0.15, 256, "relu"]), # Param("ModelAPPNP4", [20, 0.15, 256, "leaky_relu"]), # Param("ModelAPPNP4", [30, 0.15, 256, "relu"]), # Param("ModelAPPNP4", [30, 0.15, 256, "leaky_relu"]), # # Param("ModelGCN", [1, [256, 256], "relu"]), # Param("ModelGCN", [1, [256, 256], "leaky_relu"]), # Param("ModelGCN", [2, [256, 256, 256], "relu"]), # Param("ModelGCN", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGCN", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGCN", [3, [256, 256, 256, 256], "leaky_relu"]), # Param("ModelGCN2", [1, [256, 256], "relu"]), # Param("ModelGCN2", [1, [256, 256], "leaky_relu"]), # Param("ModelGCN2", [2, [256, 256, 256], "relu"]), # Param("ModelGCN2", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGCN2", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGCN2", [3, [256, 256, 256, 256], "leaky_relu"]), # Param("ModelGCN3", [1, [256, 256], "relu"]), # Param("ModelGCN3", [1, [256, 256], "leaky_relu"]), # Param("ModelGCN3", [2, [256, 256, 256], "relu"]), # Param("ModelGCN3", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGCN3", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGCN3", [3, [256, 256, 256, 256], "leaky_relu"]), # Param("ModelGCN4", [1, [256, 256], "relu"]), # Param("ModelGCN4", [1, [256, 256], "leaky_relu"]), # Param("ModelGCN4", [2, [256, 256, 256], "relu"]), # Param("ModelGCN4", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGCN4", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGCN4", [3, [256, 256, 256, 256], "leaky_relu"]), # # Param("ModelGAT", [1, [256, 256], "relu"]), # Param("ModelGAT", [1, [256, 256], "leaky_relu"]), # Param("ModelGAT", [2, [256, 256, 256], "relu"]), # Param("ModelGAT", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGAT", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGAT", [3, [256, 256, 256, 256], "leaky_relu"]), # Param("ModelGAT2", [1, [256, 256], "relu"]), # Param("ModelGAT2", [1, [256, 256], "leaky_relu"]), # Param("ModelGAT2", [2, [256, 256, 256], "relu"]), # Param("ModelGAT2", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGAT2", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGAT2", [3, [256, 256, 256, 256], "leaky_relu"]), # Param("ModelGAT3", [1, [256, 256], "relu"]), # Param("ModelGAT3", [1, [256, 256], "leaky_relu"]), # Param("ModelGAT3", [2, [256, 256, 256], "relu"]), # Param("ModelGAT3", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGAT3", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGAT3", [3, [256, 256, 256, 256], "leaky_relu"]), # Param("ModelGAT4", [1, [256, 256], "relu"]), # Param("ModelGAT4", [1, [256, 256], "leaky_relu"]), # Param("ModelGAT4", [2, [256, 256, 256], "relu"]), # Param("ModelGAT4", [2, [256, 256, 256], "leaky_relu"]), # Param("ModelGAT4", [3, [256, 256, 256, 256], "relu"]), # Param("ModelGAT4", [3, [256, 256, 256, 256], "leaky_relu"]), # # Param("ModelDGI", [16]), # Param("ModelDGI", [32]), # Param("ModelDGI", [64]), # Param("ModelDGI", [128]), # Param("ModelDGI", [256]), ] f = open('result.txt', 'w', encoding='utf-8') for param in params: average_loss_train = 0 average_loss_valid = 0 average_accuracy_train = 0 average_accuracy_valid = 0 average_accuracy_test = 0 average_epoch = 0 for _ in range(5): if param.model == "ModelGCN": result = main_model_gcn( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelGAT": result = main_model_gat( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelAPPNP": result = main_model_appnp( data=data, K=param.param[0], alpha=param.param[1], hidden=param.param[2], activation=param.param[3], if_all=False ) elif param.model == "ModelGCN2": result = main_model_gcn_2( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelGAT2": result = main_model_gat_2( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelAPPNP2": result = main_model_appnp_2( data=data, K=param.param[0], alpha=param.param[1], hidden=param.param[2], activation=param.param[3], if_all=False ) elif param.model == "ModelGCN3": result = main_model_gcn_3( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelGAT3": result = main_model_gat_3( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelAPPNP3": result = main_model_appnp_3( data=data, K=param.param[0], alpha=param.param[1], hidden=param.param[2], activation=param.param[3], if_all=False ) elif param.model == "ModelGCN4": result = main_model_gcn_4( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelGAT4": result = main_model_gat_4( data=data, num_layers=param.param[0], hidden_list=param.param[1], activation=param.param[2], if_all=False ) elif param.model == "ModelAPPNP4": result = main_model_appnp_4( data=data, K=param.param[0], alpha=param.param[1], hidden=param.param[2], activation=param.param[3], if_all=False ) elif param.model == "ModelDGI": result = main_model_dgi( data=data, hidden=param.param[0], if_all=False ) else: raise ValueError("Model name error: {0}".format(param[0])) solution = pd.read_csv('../data/{0}/test_label.tsv'.format(data_set), sep='\t')['label'] acc_test = accuracy_score(solution, result.result) average_loss_train += result.loss_train / 5.0 average_loss_valid += result.loss_valid / 5.0 average_accuracy_train += result.acc_train / 5.0 average_accuracy_valid += result.acc_valid / 5.0 average_accuracy_test += acc_test / 5.0 average_epoch += result.epoch / 5.0 f.write("Loss Train 166092: {0:.4f}\n".format(result.loss_train)) f.write("Loss Valid 166092: {0:.4f}\n".format(result.loss_valid)) f.write("Acc Train 166092: {0:.4f}\n".format(result.acc_train)) f.write("Acc Valid 166092: {0:.4f}\n".format(result.acc_valid)) f.write("Acc Test 166092: {0:.4f}\n".format(acc_test)) f.write("Epoch 166092: {0:.4f}\n".format(result.epoch)) f.flush() f.write("Average Loss Train 166092: {0:.4f}\n".format(average_loss_train)) f.write("Average Loss Valid 166092: {0:.4f}\n".format(average_loss_valid)) f.write("Average Acc Train 166092: {0:.4f}\n".format(average_accuracy_train)) f.write("Average Acc Valid 166092: {0:.4f}\n".format(average_accuracy_valid)) f.write("Average Acc Test 166092: {0:.4f}\n".format(average_accuracy_test)) f.write("Average Epoch 166092: {0:.4f}\n".format(average_epoch)) f.flush() f.close() Model().train_predict()
// WithContentType is a configuration Option which sets the content-type of the file being saved func WithContentType(contentType string) ConfigOption { return func(c *Config) { c.ContentType = contentType } }
# # Copyright 2009-2015 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Implements: convert_tex_to_utf8: converts latex symbols into UTF8 encodings. """ import re _utf8enc2latex_mapping_simple = { # found in uft8enc.dfu (LaTeX) u'&': '\&', u'\u00A0': r'~', u'\u00C0': r'\`A', u'\u00C1': r'\A', u'\u00C2': r'\^A', u'\u00C3': r'\~A', u'\u00C4': r'\"A', u'\u00C5': r'\r A', u'\u00C6': r'\AE', u'\u00C7': r'\c C', u'\u00C8': r'\`E', u'\u00C9': r'\E', u'\u00CA': r'\^E', u'\u00CB': r'\"E', u'\u00CC': r'\`I', u'\u00CD': r'\I', u'\u00CE': r'\^I', u'\u00CF': r'\"I', u'\u00D0': r'\DH', u'\u00D1': r'\~N', u'\u00D2': r'\`O', u'\u00D3': r'\O', u'\u00D4': r'\^O', u'\u00D5': r'\~O', u'\u00D6': r'\"O', u'\u00D7': r'\texttimes', u'\u00D8': r'\O', u'\u00D9': r'\`U', u'\u00DA': r'\U', u'\u00DB': r'\^U', u'\u00DC': r'\"U', u'\u00DD': r'\Y', u'\u00DE': r'\TH', u'\u00DF': r'\ss', u'\u00E0': r'\`a', u'\u00E1': r'\a', u'\u00E2': r'\^a', u'\u00E3': r'\~a', u'\u00E4': r'\"a', u'\u00E5': r'\r a', u'\u00E6': r'\ae', u'\u00E7': r'\c c', u'\u00E8': r'\`e', u'\u00E9': r'\'e', u'\u00EA': r'\^e', u'\u00EB': r'\"e', u'\u00EC': r'\`i', u'\u00ED': r'\i', u'\u00EE': r'\^\i', u'\u00EF': r'\"\i', u'\u00F0': r'\dh', u'\u00F1': r'\~n', u'\u00F2': r'\`o', u'\u00F3': r'\o', u'\u00F4': r'\^o', u'\u00F5': r'\~o', u'\u00F6': r'\"o', u'\u00F7': r'\textdiv', u'\u00F8': r'\o', u'\u00F9': r'\`u', u'\u00FA': r'\u', u'\u00FB': r'\^u', u'\u00FC': r'\"u', u'\u00FD': r'\y', u'\u00FE': r'\th', u'\u00FF': r'\"y', u'\u0102': r'\u A', u'\u0103': r'\u a', u'\u0104': r'\k A', u'\u0105': r'\k a', u'\u0106': r'\a\'C', u'\u0107': r'\'c', u'\u010C': r'\vC', u'\u010D': r'\vc', u'\u010E': r'\vD', u'\u010F': r'\vd', u'\u0110': r'\DJ', u'\u0111': r'\dj', u'\u0118': r'\k E', u'\u0119': r'\k e', u'\u011A': r'\vE', u'\u011B': r'\ve', u'\u011E': r'\u G', u'\u011F': r'\u g', u'\u0130': r'\.I', u'\u0131': r'\i', u'\u0139': r'\L', u'\u013A': r'\l', u'\u013D': r'\vL', u'\u013E': r'\vl', u'\u0141': r'\L', u'\u0142': r'\l', u'\u0143': r'\N', u'\u0144': r'\n', u'\u0147': r'\vN', u'\u0148': r'\vn', u'\u014A': r'\NG', u'\u014B': r'\ng', u'\u0150': r'\H O', u'\u0151': r'\H o', u'\u0152': r'\OE', u'\u0153': r'\oe', u'\u0154': r'\R', u'\u0155': r'\r', u'\u0158': r'\vR', u'\u0159': r'\vr', u'\u015A': r'\S', u'\u015B': r'\s', u'\u015E': r'\c S', u'\u015F': r'\c s', u'\u0160': r'\vS', u'\u0161': r'\vs', u'\u0162': r'\c T', u'\u0163': r'\c t', u'\u0164': r'\vT', u'\u0165': r'\vt', u'\u016E': r'\r U', u'\u016F': r'\r u', u'\u0170': r'\H U', u'\u0171': r'\H u', u'\u0178': r'\"Y', u'\u0179': r'\Z', u'\u017A': r'\z', u'\u017B': r'\.Z', u'\u017C': r'\.z', u'\u017D': r'\vZ', u'\u017E': r'\vz', } _latex2utf8enc_mapping_simple = {} for unicode_char in _utf8enc2latex_mapping_simple.keys(): _latex2utf8enc_mapping_simple[ _utf8enc2latex_mapping_simple[unicode_char]] = unicode_char def convert_tex_to_utf8(source): """Converts a string with latex symbols into utf8 encoded string.""" source = source.replace('{', '').replace('}', '') for latex_entity in _latex2utf8enc_mapping_simple.keys(): source = source.replace(latex_entity, _latex2utf8enc_mapping_simple[latex_entity]) return source def preprocess_xml(source): #source = convert_tex_to_utf8(source) #source = strip_commands(source) source = fix_white_space(source) source = source.replace('{{', '{') source = source.replace('}}', '}') return source def fix_white_space(source): def _tilde2wsp(hit): return hit.group(0)[0] + ' ' ttable = [(r'\ ', ' '), (r'\!', ' '), ] for a, b in ttable: source = source.replace(a, b) wsp_tilde = re.compile(r'[^/\\]~') return wsp_tilde.sub(_tilde2wsp, source).replace('\~', '~') def strip_commands(source): oldstyle_cmd = re.compile(r'{\\[a-zA-Z]{2,}') newstyle_cmd = re.compile(r'\\[a-zA-Z]+{') source = oldstyle_cmd.sub('{', source) source = newstyle_cmd.sub('{', source) return source def split_multiple(value): """Used to split multiple terms separated by comma (e.g. keywords).""" result = list() for item in value.split(','): item = item.strip() if item: result.append(item) return result
//======================================================================================= // Method: CreateChassis // Description: Create a chassis from the inputs // Returns: Void //======================================================================================= IChassis* ChassisFactory::CreateChassis ( CHASSIS_TYPE type, float wheelDiameter, float wheelBase, float track, std::vector<DragonTalon*> motorControllers ) { switch ( type ) { case TANK_CHASSIS: ChassisFactory::m_chassis = new RegularChassis( wheelDiameter, wheelBase, track, motorControllers ); break; default: printf( "==>> ChassisFactory::CreateChassis Invalid type %d \n", type ); break; } return ChassisFactory::m_chassis; }
#!/usr/bin/env node import * as fs from "fs-extra"; import { spawn } from "child_process"; import * as paths from "./paths"; // Do this as the first thing so that any code reading it knows the right env. process.env.BABEL_ENV = "production"; process.env.NODE_ENV = "production"; // Makes the script crash on unhandled rejections instead of silently // ignoring them. In the future, promise rejections that are not handled will // terminate the Node.js process with a non-zero exit code. process.on("unhandledRejection", (err) => { throw err; }); fs.removeSync(paths.templateNodeModules); fs.ensureSymlinkSync(paths.appNodeModules, paths.templateNodeModules); const runCommand = (command: string) => new Promise((resolve, reject) => { const executedCommand = spawn(command, { stdio: "inherit", shell: true, }); executedCommand.on("error", (error) => { reject(error); }); executedCommand.on("exit", (code) => { if (code === 0) { resolve(code); } else { reject(); } }); }).catch((error) => { console.error(error); process.exit(1); }); const buildTemplate = () => { const packageManager = fs.existsSync(paths.appYarnLock) ? "yarn" : "npx"; fs.emptyDirSync(paths.templateBuild); process.chdir(paths.template); return runCommand(`${packageManager} build`); }; buildTemplate().then(() => { fs.copySync(paths.templateBuild, paths.appBuild); fs.copySync(paths.appPublic, paths.appBuild); console.log(`${paths.appBuild} was created and can be deployed.`); });