text
stringlengths
2
100k
meta
dict
const { red, yellow } = require('chalk') const prefix = `[vue-server-renderer-webpack-plugin]` const warn = exports.warn = msg => console.error(red(`${prefix} ${msg}\n`)) const tip = exports.tip = msg => console.log(yellow(`${prefix} ${msg}\n`)) export const validate = compiler => { if (compiler.options.target !== 'node') { warn('webpack config `target` should be "node".') } if (compiler.options.output && compiler.options.output.libraryTarget !== 'commonjs2') { warn('webpack config `output.libraryTarget` should be "commonjs2".') } if (!compiler.options.externals) { tip( 'It is recommended to externalize dependencies in the server build for ' + 'better build performance.' ) } } export { isJS, isCSS } from '../util'
{ "pile_set_name": "Github" }
//---------------------------------------------------------------------------// // Copyright (c) 2013-2015 Kyle Lutz <[email protected]> // // Distributed under the Boost Software License, Version 1.0 // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt // // See http://boostorg.github.com/compute for more information. //---------------------------------------------------------------------------// // deprecated, use <boost/compute/image/image_sampler.hpp> instead #include <boost/compute/image/image_sampler.hpp>
{ "pile_set_name": "Github" }
// Copyright (C) 2017-2019 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // { dg-do compile { target c++11 } } // { dg-require-normal-mode "" } #include <vector> // PR libstdc++/80553 struct DeletedDtor { ~DeletedDtor() = delete; }; class PrivateDtor { ~PrivateDtor() { } }; void test01() { std::vector<DeletedDtor> v; // { dg-error "here" } } void test02() { std::vector<PrivateDtor> v; // { dg-error "here" } } // { dg-error "value type is destructible" "" { target *-*-* } 0 }
{ "pile_set_name": "Github" }
// Copyright 2009,2010 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // NetBSD system calls. // This file is compiled as ordinary Go code, // but it is also input to mksyscall, // which parses the //sys lines and generates system call stubs. // Note that sometimes we use a lowercase //sys name and wrap // it in our own nicer implementation, either here or in // syscall_bsd.go or syscall_unix.go. package unix import ( "syscall" "unsafe" ) // SockaddrDatalink implements the Sockaddr interface for AF_LINK type sockets. type SockaddrDatalink struct { Len uint8 Family uint8 Index uint16 Type uint8 Nlen uint8 Alen uint8 Slen uint8 Data [12]int8 raw RawSockaddrDatalink } func Syscall9(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno) func sysctlNodes(mib []_C_int) (nodes []Sysctlnode, err error) { var olen uintptr // Get a list of all sysctl nodes below the given MIB by performing // a sysctl for the given MIB with CTL_QUERY appended. mib = append(mib, CTL_QUERY) qnode := Sysctlnode{Flags: SYSCTL_VERS_1} qp := (*byte)(unsafe.Pointer(&qnode)) sz := unsafe.Sizeof(qnode) if err = sysctl(mib, nil, &olen, qp, sz); err != nil { return nil, err } // Now that we know the size, get the actual nodes. nodes = make([]Sysctlnode, olen/sz) np := (*byte)(unsafe.Pointer(&nodes[0])) if err = sysctl(mib, np, &olen, qp, sz); err != nil { return nil, err } return nodes, nil } func nametomib(name string) (mib []_C_int, err error) { // Split name into components. var parts []string last := 0 for i := 0; i < len(name); i++ { if name[i] == '.' { parts = append(parts, name[last:i]) last = i + 1 } } parts = append(parts, name[last:]) // Discover the nodes and construct the MIB OID. for partno, part := range parts { nodes, err := sysctlNodes(mib) if err != nil { return nil, err } for _, node := range nodes { n := make([]byte, 0) for i := range node.Name { if node.Name[i] != 0 { n = append(n, byte(node.Name[i])) } } if string(n) == part { mib = append(mib, _C_int(node.Num)) break } } if len(mib) != partno+1 { return nil, EINVAL } } return mib, nil } //sysnb pipe() (fd1 int, fd2 int, err error) func Pipe(p []int) (err error) { if len(p) != 2 { return EINVAL } p[0], p[1], err = pipe() return } //sys getdents(fd int, buf []byte) (n int, err error) func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { return getdents(fd, buf) } const ImplementsGetwd = true //sys Getcwd(buf []byte) (n int, err error) = SYS___GETCWD func Getwd() (string, error) { var buf [PathMax]byte _, err := Getcwd(buf[0:]) if err != nil { return "", err } n := clen(buf[:]) if n < 1 { return "", EINVAL } return string(buf[:n]), nil } // TODO func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { return -1, ENOSYS } func setattrlistTimes(path string, times []Timespec, flags int) error { // used on Darwin for UtimesNano return ENOSYS } //sys ioctl(fd int, req uint, arg uintptr) (err error) // ioctl itself should not be exposed directly, but additional get/set // functions for specific types are permissible. // IoctlSetInt performs an ioctl operation which sets an integer value // on fd, using the specified request number. func IoctlSetInt(fd int, req uint, value int) error { return ioctl(fd, req, uintptr(value)) } func IoctlSetWinsize(fd int, req uint, value *Winsize) error { return ioctl(fd, req, uintptr(unsafe.Pointer(value))) } func IoctlSetTermios(fd int, req uint, value *Termios) error { return ioctl(fd, req, uintptr(unsafe.Pointer(value))) } // IoctlGetInt performs an ioctl operation which gets an integer value // from fd, using the specified request number. func IoctlGetInt(fd int, req uint) (int, error) { var value int err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) return value, err } func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { var value Winsize err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) return &value, err } func IoctlGetTermios(fd int, req uint) (*Termios, error) { var value Termios err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) return &value, err } func Uname(uname *Utsname) error { mib := []_C_int{CTL_KERN, KERN_OSTYPE} n := unsafe.Sizeof(uname.Sysname) if err := sysctl(mib, &uname.Sysname[0], &n, nil, 0); err != nil { return err } mib = []_C_int{CTL_KERN, KERN_HOSTNAME} n = unsafe.Sizeof(uname.Nodename) if err := sysctl(mib, &uname.Nodename[0], &n, nil, 0); err != nil { return err } mib = []_C_int{CTL_KERN, KERN_OSRELEASE} n = unsafe.Sizeof(uname.Release) if err := sysctl(mib, &uname.Release[0], &n, nil, 0); err != nil { return err } mib = []_C_int{CTL_KERN, KERN_VERSION} n = unsafe.Sizeof(uname.Version) if err := sysctl(mib, &uname.Version[0], &n, nil, 0); err != nil { return err } // The version might have newlines or tabs in it, convert them to // spaces. for i, b := range uname.Version { if b == '\n' || b == '\t' { if i == len(uname.Version)-1 { uname.Version[i] = 0 } else { uname.Version[i] = ' ' } } } mib = []_C_int{CTL_HW, HW_MACHINE} n = unsafe.Sizeof(uname.Machine) if err := sysctl(mib, &uname.Machine[0], &n, nil, 0); err != nil { return err } return nil } /* * Exposed directly */ //sys Access(path string, mode uint32) (err error) //sys Adjtime(delta *Timeval, olddelta *Timeval) (err error) //sys Chdir(path string) (err error) //sys Chflags(path string, flags int) (err error) //sys Chmod(path string, mode uint32) (err error) //sys Chown(path string, uid int, gid int) (err error) //sys Chroot(path string) (err error) //sys Close(fd int) (err error) //sys Dup(fd int) (nfd int, err error) //sys Dup2(from int, to int) (err error) //sys Exit(code int) //sys Faccessat(dirfd int, path string, mode uint32, flags int) (err error) //sys Fadvise(fd int, offset int64, length int64, advice int) (err error) = SYS_POSIX_FADVISE //sys Fchdir(fd int) (err error) //sys Fchflags(fd int, flags int) (err error) //sys Fchmod(fd int, mode uint32) (err error) //sys Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) //sys Fchown(fd int, uid int, gid int) (err error) //sys Flock(fd int, how int) (err error) //sys Fpathconf(fd int, name int) (val int, err error) //sys Fstat(fd int, stat *Stat_t) (err error) //sys Fstatat(fd int, path string, stat *Stat_t, flags int) (err error) //sys Fsync(fd int) (err error) //sys Ftruncate(fd int, length int64) (err error) //sysnb Getegid() (egid int) //sysnb Geteuid() (uid int) //sysnb Getgid() (gid int) //sysnb Getpgid(pid int) (pgid int, err error) //sysnb Getpgrp() (pgrp int) //sysnb Getpid() (pid int) //sysnb Getppid() (ppid int) //sys Getpriority(which int, who int) (prio int, err error) //sysnb Getrlimit(which int, lim *Rlimit) (err error) //sysnb Getrusage(who int, rusage *Rusage) (err error) //sysnb Getsid(pid int) (sid int, err error) //sysnb Gettimeofday(tv *Timeval) (err error) //sysnb Getuid() (uid int) //sys Issetugid() (tainted bool) //sys Kill(pid int, signum syscall.Signal) (err error) //sys Kqueue() (fd int, err error) //sys Lchown(path string, uid int, gid int) (err error) //sys Link(path string, link string) (err error) //sys Listen(s int, backlog int) (err error) //sys Lstat(path string, stat *Stat_t) (err error) //sys Mkdir(path string, mode uint32) (err error) //sys Mkfifo(path string, mode uint32) (err error) //sys Mknod(path string, mode uint32, dev int) (err error) //sys Nanosleep(time *Timespec, leftover *Timespec) (err error) //sys Open(path string, mode int, perm uint32) (fd int, err error) //sys Pathconf(path string, name int) (val int, err error) //sys Pread(fd int, p []byte, offset int64) (n int, err error) //sys Pwrite(fd int, p []byte, offset int64) (n int, err error) //sys read(fd int, p []byte) (n int, err error) //sys Readlink(path string, buf []byte) (n int, err error) //sys Rename(from string, to string) (err error) //sys Revoke(path string) (err error) //sys Rmdir(path string) (err error) //sys Seek(fd int, offset int64, whence int) (newoffset int64, err error) = SYS_LSEEK //sys Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) //sysnb Setegid(egid int) (err error) //sysnb Seteuid(euid int) (err error) //sysnb Setgid(gid int) (err error) //sysnb Setpgid(pid int, pgid int) (err error) //sys Setpriority(which int, who int, prio int) (err error) //sysnb Setregid(rgid int, egid int) (err error) //sysnb Setreuid(ruid int, euid int) (err error) //sysnb Setrlimit(which int, lim *Rlimit) (err error) //sysnb Setsid() (pid int, err error) //sysnb Settimeofday(tp *Timeval) (err error) //sysnb Setuid(uid int) (err error) //sys Stat(path string, stat *Stat_t) (err error) //sys Symlink(path string, link string) (err error) //sys Sync() (err error) //sys Truncate(path string, length int64) (err error) //sys Umask(newmask int) (oldmask int) //sys Unlink(path string) (err error) //sys Unmount(path string, flags int) (err error) //sys write(fd int, p []byte) (n int, err error) //sys mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error) //sys munmap(addr uintptr, length uintptr) (err error) //sys readlen(fd int, buf *byte, nbuf int) (n int, err error) = SYS_READ //sys writelen(fd int, buf *byte, nbuf int) (n int, err error) = SYS_WRITE //sys utimensat(dirfd int, path string, times *[2]Timespec, flags int) (err error) /* * Unimplemented */ // ____semctl13 // __clone // __fhopen40 // __fhstat40 // __fhstatvfs140 // __fstat30 // __getcwd // __getfh30 // __getlogin // __lstat30 // __mount50 // __msgctl13 // __msync13 // __ntp_gettime30 // __posix_chown // __posix_fchown // __posix_lchown // __posix_rename // __setlogin // __shmctl13 // __sigaction_sigtramp // __sigaltstack14 // __sigpending14 // __sigprocmask14 // __sigsuspend14 // __sigtimedwait // __stat30 // __syscall // __vfork14 // _ksem_close // _ksem_destroy // _ksem_getvalue // _ksem_init // _ksem_open // _ksem_post // _ksem_trywait // _ksem_unlink // _ksem_wait // _lwp_continue // _lwp_create // _lwp_ctl // _lwp_detach // _lwp_exit // _lwp_getname // _lwp_getprivate // _lwp_kill // _lwp_park // _lwp_self // _lwp_setname // _lwp_setprivate // _lwp_suspend // _lwp_unpark // _lwp_unpark_all // _lwp_wait // _lwp_wakeup // _pset_bind // _sched_getaffinity // _sched_getparam // _sched_setaffinity // _sched_setparam // acct // aio_cancel // aio_error // aio_fsync // aio_read // aio_return // aio_suspend // aio_write // break // clock_getres // clock_gettime // clock_settime // compat_09_ogetdomainname // compat_09_osetdomainname // compat_09_ouname // compat_10_omsgsys // compat_10_osemsys // compat_10_oshmsys // compat_12_fstat12 // compat_12_getdirentries // compat_12_lstat12 // compat_12_msync // compat_12_oreboot // compat_12_oswapon // compat_12_stat12 // compat_13_sigaction13 // compat_13_sigaltstack13 // compat_13_sigpending13 // compat_13_sigprocmask13 // compat_13_sigreturn13 // compat_13_sigsuspend13 // compat_14___semctl // compat_14_msgctl // compat_14_shmctl // compat_16___sigaction14 // compat_16___sigreturn14 // compat_20_fhstatfs // compat_20_fstatfs // compat_20_getfsstat // compat_20_statfs // compat_30___fhstat30 // compat_30___fstat13 // compat_30___lstat13 // compat_30___stat13 // compat_30_fhopen // compat_30_fhstat // compat_30_fhstatvfs1 // compat_30_getdents // compat_30_getfh // compat_30_ntp_gettime // compat_30_socket // compat_40_mount // compat_43_fstat43 // compat_43_lstat43 // compat_43_oaccept // compat_43_ocreat // compat_43_oftruncate // compat_43_ogetdirentries // compat_43_ogetdtablesize // compat_43_ogethostid // compat_43_ogethostname // compat_43_ogetkerninfo // compat_43_ogetpagesize // compat_43_ogetpeername // compat_43_ogetrlimit // compat_43_ogetsockname // compat_43_okillpg // compat_43_olseek // compat_43_ommap // compat_43_oquota // compat_43_orecv // compat_43_orecvfrom // compat_43_orecvmsg // compat_43_osend // compat_43_osendmsg // compat_43_osethostid // compat_43_osethostname // compat_43_osetrlimit // compat_43_osigblock // compat_43_osigsetmask // compat_43_osigstack // compat_43_osigvec // compat_43_otruncate // compat_43_owait // compat_43_stat43 // execve // extattr_delete_fd // extattr_delete_file // extattr_delete_link // extattr_get_fd // extattr_get_file // extattr_get_link // extattr_list_fd // extattr_list_file // extattr_list_link // extattr_set_fd // extattr_set_file // extattr_set_link // extattrctl // fchroot // fdatasync // fgetxattr // fktrace // flistxattr // fork // fremovexattr // fsetxattr // fstatvfs1 // fsync_range // getcontext // getitimer // getvfsstat // getxattr // ktrace // lchflags // lchmod // lfs_bmapv // lfs_markv // lfs_segclean // lfs_segwait // lgetxattr // lio_listio // listxattr // llistxattr // lremovexattr // lseek // lsetxattr // lutimes // madvise // mincore // minherit // modctl // mq_close // mq_getattr // mq_notify // mq_open // mq_receive // mq_send // mq_setattr // mq_timedreceive // mq_timedsend // mq_unlink // mremap // msgget // msgrcv // msgsnd // nfssvc // ntp_adjtime // pmc_control // pmc_get_info // pollts // preadv // profil // pselect // pset_assign // pset_create // pset_destroy // ptrace // pwritev // quotactl // rasctl // readv // reboot // removexattr // sa_enable // sa_preempt // sa_register // sa_setconcurrency // sa_stacks // sa_yield // sbrk // sched_yield // semconfig // semget // semop // setcontext // setitimer // setxattr // shmat // shmdt // shmget // sstk // statvfs1 // swapctl // sysarch // syscall // timer_create // timer_delete // timer_getoverrun // timer_gettime // timer_settime // undelete // utrace // uuidgen // vadvise // vfork // writev
{ "pile_set_name": "Github" }
const p2p = require('../../p2p'); const logger = require('../util/logger.js'); module.exports = function (vorpal) { vorpal .command('open <port>', 'Open port to accept incoming connections. Eg: open 2727') .alias('o') .action(function(args, callback) { if (args.port) { if(typeof args.port === 'number') { p2p.startServer(args.port); } else { logger.log(`❌ invalid port!`) } } callback(); }) }
{ "pile_set_name": "Github" }
package co.sentinel.lite.viewmodel; import android.arch.lifecycle.ViewModel; import co.sentinel.lite.network.model.VpnUsage; import co.sentinel.lite.repository.VpnRepository; import co.sentinel.lite.util.Resource; import co.sentinel.lite.util.SingleLiveEvent; public class VpnUsageViewModel extends ViewModel { private final VpnRepository mRepository; private final SingleLiveEvent<Resource<VpnUsage>> mVpnUsageLiveEvent; VpnUsageViewModel(VpnRepository iRepository) { mRepository = iRepository; mVpnUsageLiveEvent = iRepository.getVpnUsageLiveEvent(); } public SingleLiveEvent<Resource<VpnUsage>> getVpnUsageLiveEvent() { return mVpnUsageLiveEvent; } public void reloadVpnUsage() { mRepository.getVpnUsageForUser(); } }
{ "pile_set_name": "Github" }
/* This file defines the interface between the m32c simulator and gdb. Copyright (C) 2005-2014 Free Software Foundation, Inc. This file is part of GDB. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ #ifndef SIM_M32C_H #define SIM_M32C_H enum m32c_sim_reg { m32c_sim_reg_r0_bank0, m32c_sim_reg_r1_bank0, m32c_sim_reg_r2_bank0, m32c_sim_reg_r3_bank0, m32c_sim_reg_a0_bank0, m32c_sim_reg_a1_bank0, m32c_sim_reg_fb_bank0, m32c_sim_reg_sb_bank0, m32c_sim_reg_r0_bank1, m32c_sim_reg_r1_bank1, m32c_sim_reg_r2_bank1, m32c_sim_reg_r3_bank1, m32c_sim_reg_a0_bank1, m32c_sim_reg_a1_bank1, m32c_sim_reg_fb_bank1, m32c_sim_reg_sb_bank1, m32c_sim_reg_usp, m32c_sim_reg_isp, m32c_sim_reg_pc, m32c_sim_reg_intb, m32c_sim_reg_flg, m32c_sim_reg_svf, m32c_sim_reg_svp, m32c_sim_reg_vct, m32c_sim_reg_dmd0, m32c_sim_reg_dmd1, m32c_sim_reg_dct0, m32c_sim_reg_dct1, m32c_sim_reg_drc0, m32c_sim_reg_drc1, m32c_sim_reg_dma0, m32c_sim_reg_dma1, m32c_sim_reg_dsa0, m32c_sim_reg_dsa1, m32c_sim_reg_dra0, m32c_sim_reg_dra1, m32c_sim_reg_num_regs }; #endif /* SIM_M32C_H */
{ "pile_set_name": "Github" }
# Copyright (C) 2017 Johnny Vestergaard <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import asyncio import unittest import ftplib from ftplib import FTP from heralding.capabilities import ftp from heralding.reporting.reporting_relay import ReportingRelay class FtpTests(unittest.TestCase): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) self.reporting_relay = ReportingRelay() self.reporting_relay_task = self.loop.run_in_executor( None, self.reporting_relay.start) def tearDown(self): self.reporting_relay.stop() # We give reporting_relay a chance to be finished self.loop.run_until_complete(self.reporting_relay_task) self.server.close() self.loop.run_until_complete(self.server.wait_closed()) self.loop.close() def test_login(self): """Testing different login combinations""" def ftp_login(): ftp_client = FTP() ftp_client.connect('127.0.0.1', 8888, 1) # expect perm exception try: ftp_client.login('james', 'bond') _ = ftp_client.getresp() # NOQA except ftplib.error_perm: ftp_client.quit() options = { 'enabled': 'True', 'port': 0, 'banner': 'Test Banner', 'users': { 'test': 'test' }, 'protocol_specific_data': { 'max_attempts': 3, 'banner': 'test banner', 'syst_type': 'Test Type' } } ftp_capability = ftp.ftp(options, self.loop) server_coro = asyncio.start_server( ftp_capability.handle_session, '0.0.0.0', 8888, loop=self.loop) self.server = self.loop.run_until_complete(server_coro) ftp_task = self.loop.run_in_executor(None, ftp_login) self.loop.run_until_complete(ftp_task)
{ "pile_set_name": "Github" }
#!/usr/bin/env python # vim:fileencoding=utf-8 # License: GPLv3 Copyright: 2015, Kovid Goyal <kovid at kovidgoyal.net> from __future__ import absolute_import, division, print_function, unicode_literals store_version = 5 # Needed for dynamic plugin loading from contextlib import closing try: from urllib.parse import urlencode except ImportError: from urllib import urlencode from lxml import html from PyQt5.Qt import QUrl from calibre import browser from calibre.gui2 import open_url from calibre.gui2.store import StorePlugin from calibre.gui2.store.search_result import SearchResult SEARCH_BASE_URL = 'https://www.amazon.in/s/' SEARCH_BASE_QUERY = {'url': 'search-alias=digital-text'} DETAILS_URL = 'https://amazon.in/dp/' STORE_LINK = 'https://www.amazon.in' DRM_SEARCH_TEXT = 'Simultaneous Device Usage' DRM_FREE_TEXT = 'Unlimited' def get_user_agent(): return 'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko' def search_amazon(query, max_results=10, timeout=60, write_html_to=None, base_url=SEARCH_BASE_URL, base_query=SEARCH_BASE_QUERY, field_keywords='field-keywords' ): uquery = base_query.copy() uquery[field_keywords] = query def asbytes(x): if isinstance(x, type('')): x = x.encode('utf-8') return x uquery = {asbytes(k):asbytes(v) for k, v in uquery.items()} url = base_url + '?' + urlencode(uquery) br = browser(user_agent=get_user_agent()) counter = max_results with closing(br.open(url, timeout=timeout)) as f: raw = f.read() if write_html_to is not None: with open(write_html_to, 'wb') as f: f.write(raw) doc = html.fromstring(raw) try: results = doc.xpath('//div[@id="atfResults" and @class]')[0] except IndexError: return if 's-result-list-parent-container' in results.get('class', ''): data_xpath = "descendant-or-self::li[@class and contains(concat(' ', normalize-space(@class), ' '), ' s-result-item ')]" format_xpath = './/a[@title="Kindle Edition"]/@title' asin_xpath = '@data-asin' cover_xpath = "descendant-or-self::img[@class and contains(concat(' ', normalize-space(@class), ' '), ' s-access-image ')]/@src" title_xpath = "descendant-or-self::h2[@class and contains(concat(' ', normalize-space(@class), ' '), ' s-access-title ')]//text()" author_xpath = './/span[starts-with(text(), "by ")]/following-sibling::span//text()' price_xpath = ('descendant::div[@class="a-row a-spacing-none" and' ' not(span[contains(@class, "kindle-unlimited")])]//span[contains(@class, "s-price")]//text()') else: return for data in doc.xpath(data_xpath): if counter <= 0: break # Even though we are searching digital-text only Amazon will still # put in results for non Kindle books (author pages). Se we need # to explicitly check if the item is a Kindle book and ignore it # if it isn't. format = ''.join(data.xpath(format_xpath)) if 'kindle' not in format.lower(): continue # We must have an asin otherwise we can't easily reference the # book later. asin = data.xpath(asin_xpath) if asin: asin = asin[0] else: continue cover_url = ''.join(data.xpath(cover_xpath)) title = ''.join(data.xpath(title_xpath)) author = ''.join(data.xpath(author_xpath)) try: author = author.split('by ', 1)[1].split(" (")[0] except: pass price = ''.join(data.xpath(price_xpath)) counter -= 1 s = SearchResult() s.cover_url = cover_url.strip() s.title = title.strip() s.author = author.strip() s.price = price.strip() if s.price: s.price = '₹ ' + s.price s.detail_item = asin.strip() s.formats = 'Kindle' yield s class AmazonKindleStore(StorePlugin): def open(self, parent=None, detail_item=None, external=False): store_link = (DETAILS_URL + detail_item) if detail_item else STORE_LINK open_url(QUrl(store_link)) def search(self, query, max_results=10, timeout=60): for result in search_amazon(query, max_results=max_results, timeout=timeout): yield result def get_details(self, search_result, timeout): url = DETAILS_URL br = browser(user_agent=get_user_agent()) with closing(br.open(url + search_result.detail_item, timeout=timeout)) as nf: idata = html.fromstring(nf.read()) if idata.xpath('boolean(//div[@class="content"]//li/b[contains(text(), "' + DRM_SEARCH_TEXT + '")])'): if idata.xpath('boolean(//div[@class="content"]//li[contains(., "' + DRM_FREE_TEXT + '") and contains(b, "' + DRM_SEARCH_TEXT + '")])'): search_result.drm = SearchResult.DRM_UNLOCKED else: search_result.drm = SearchResult.DRM_UNKNOWN else: search_result.drm = SearchResult.DRM_LOCKED return True if __name__ == '__main__': import sys for result in search_amazon(' '.join(sys.argv[1:]), write_html_to='/t/amazon.html'): print(result)
{ "pile_set_name": "Github" }
// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package internal import ( "context" "encoding/json" "fmt" "io/ioutil" "golang.org/x/oauth2" "golang.org/x/oauth2/google" ) // Creds returns credential information obtained from DialSettings, or if none, then // it returns default credential information. func Creds(ctx context.Context, ds *DialSettings) (*google.Credentials, error) { if ds.Credentials != nil { return ds.Credentials, nil } if ds.CredentialsJSON != nil { return credentialsFromJSON(ctx, ds.CredentialsJSON, ds.Endpoint, ds.Scopes, ds.Audiences) } if ds.CredentialsFile != "" { data, err := ioutil.ReadFile(ds.CredentialsFile) if err != nil { return nil, fmt.Errorf("cannot read credentials file: %v", err) } return credentialsFromJSON(ctx, data, ds.Endpoint, ds.Scopes, ds.Audiences) } if ds.TokenSource != nil { return &google.Credentials{TokenSource: ds.TokenSource}, nil } cred, err := google.FindDefaultCredentials(ctx, ds.Scopes...) if err != nil { return nil, err } if len(cred.JSON) > 0 { return credentialsFromJSON(ctx, cred.JSON, ds.Endpoint, ds.Scopes, ds.Audiences) } // For GAE and GCE, the JSON is empty so return the default credentials directly. return cred, nil } // JSON key file type. const ( serviceAccountKey = "service_account" ) // credentialsFromJSON returns a google.Credentials based on the input. // // - If the JSON is a service account and no scopes provided, returns self-signed JWT auth flow // - Otherwise, returns OAuth 2.0 flow. func credentialsFromJSON(ctx context.Context, data []byte, endpoint string, scopes []string, audiences []string) (*google.Credentials, error) { cred, err := google.CredentialsFromJSON(ctx, data, scopes...) if err != nil { return nil, err } if len(data) > 0 && len(scopes) == 0 { var f struct { Type string `json:"type"` // The rest JSON fields are omitted because they are not used. } if err := json.Unmarshal(cred.JSON, &f); err != nil { return nil, err } if f.Type == serviceAccountKey { ts, err := selfSignedJWTTokenSource(data, endpoint, audiences) if err != nil { return nil, err } cred.TokenSource = ts } } return cred, err } func selfSignedJWTTokenSource(data []byte, endpoint string, audiences []string) (oauth2.TokenSource, error) { // Use the API endpoint as the default audience audience := endpoint if len(audiences) > 0 { // TODO(shinfan): Update golang oauth to support multiple audiences. if len(audiences) > 1 { return nil, fmt.Errorf("multiple audiences support is not implemented") } audience = audiences[0] } return google.JWTAccessTokenSourceFromJSON(data, audience) }
{ "pile_set_name": "Github" }
# Copyright (c) 2013 Thomas Nicholson <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. The names of the author(s) may not be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED # AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. from twisted.python import log from kippo.core.config import config import subprocess class Networking(): cfg = config() theIP = None theFakeIP = None def setupNetworking(self, theIP): if self.cfg.get('advNet', 'enabled') == 'true': self.theIP = theIP self.theFakeIP = self.getFakeIP(self.theIP) sp = self.runCommand('ip link add name honssh type dummy') result = sp.communicate() if sp.returncode != 0: if 'File exists' in result[0]: log.msg("[ADV-NET] - HonSSH Interface already exists, not re-adding") return self.addFakeIP() else: log.msg('[ADV-NET] - Error creating HonSSH Interface - Using client_addr: ' + result[0]) return self.cfg.get('honeypot', 'client_addr') else: sp = self.runCommand('ip link set honssh up') result = sp.communicate() if sp.returncode != 0: log.msg('[ADV-NET] - Error setting HonSSH Interface UP - Using client_addr: ' + result[0]) return self.cfg.get('honeypot', 'client_addr') else: log.msg("[ADV-NET] - HonSSH Interface created") return self.addFakeIP() else: log.msg("[ADV-NET] - Advanced Networking disabled - Using client_addr") return self.cfg.get('honeypot', 'client_addr') def addFakeIP(self): sp = self.runCommand('ip addr add ' + self.theFakeIP + '/32 dev honssh') result = sp.communicate() if sp.returncode != 0: if 'File exists' in result[0]: log.msg("[ADV-NET] - Fake IP Address already exists, not re-adding") return self.theFakeIP else: log.msg('[ADV-NET] - Error adding IP address to HonSSH Interface - Using client_addr: ' + result[0]) return self.cfg.get('honeypot', 'client_addr') else: sp = self.runCommand('iptables -t nat -A POSTROUTING -s ' + self.theFakeIP + '/32 -d ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -p tcp --dport 22 -j SNAT --to ' + self.theIP) result = sp.communicate() if sp.returncode != 0: log.msg('[ADV-NET] - Error creating POSTROUTING Rule - Using client_addr: ' + result[0]) return self.cfg.get('honeypot', 'client_addr') else: sp = self.runCommand('iptables -t nat -A PREROUTING -s ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -d ' + self.theIP +'/32 -p tcp --sport 22 -j DNAT --to ' + self.theFakeIP) result = sp.communicate() if sp.returncode != 0: log.msg('[ADV-NET] - Error creating PREROUTING Rule - Using client_addr: ' + result[0]) return self.cfg.get('honeypot', 'client_addr') else: log.msg("[ADV-NET] - HonSSH FakeIP and iptables rules added") return self.theFakeIP def removeFakeIP(self): sp = self.runCommand('ip addr del ' + self.theFakeIP + '/32 dev honssh') result = sp.communicate() if sp.returncode != 0: log.msg('[ADV-NET] - Error removing IP address to HonSSH Interface: ' + result[0]) sp = self.runCommand('iptables -t nat -D POSTROUTING -s ' + self.theFakeIP + '/32 -d ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -p tcp --dport 22 -j SNAT --to ' + self.theIP) result = sp.communicate() if sp.returncode != 0: log.msg('[ADV-NET] - Error removing POSTROUTING Rule: ' + result[0]) sp = self.runCommand('iptables -t nat -D PREROUTING -s ' + self.cfg.get('honeypot', 'honey_addr') + '/32 -d ' + self.theIP +'/32 -p tcp --sport 22 -j DNAT --to ' + self.theFakeIP) result = sp.communicate() if sp.returncode != 0: log.msg('[ADV-NET] - Error removing PREROUTING Rule: ' + result[0]) def removeNetworking(self, sessions): if self.cfg.get('advNet', 'enabled') == 'true': if len(sessions) == 0: self.removeFakeIP() sp = self.runCommand('ip link del dev honssh') result = sp.communicate() if sp.returncode != 0: log.msg("[ADV-NET] - Error removing HonSSH Interface: " + result[0]) else: found = False for s in sessions: session = sessions[s] if session.endIP == self.theIP: found = True break if not found: self.removeFakeIP() def getFakeIP(self, theIP): ipBits = theIP.split('.') for i in range(0, len(ipBits)): ipBits[i] = str(int(ipBits[i]) + 1) if ipBits[i] >= '255': ipBits[i] = '1' return '.'.join(ipBits) def runCommand(self, cmd): return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
{ "pile_set_name": "Github" }
/** * * @param str * @return {string} */ export function innerTextTrim(str: string): string;
{ "pile_set_name": "Github" }
/* Driver for ST STV0288 demodulator Copyright (C) 2006 Georg Acher, BayCom GmbH, acher (at) baycom (dot) de for Reel Multimedia Copyright (C) 2008 TurboSight.com, <[email protected]> Copyright (C) 2008 Igor M. Liplianin <[email protected]> Removed stb6000 specific tuner code and revised some procedures. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ #ifndef STV0288_H #define STV0288_H #include <linux/dvb/frontend.h> #include "dvb_frontend.h" struct stv0288_config { /* the demodulator's i2c address */ u8 demod_address; u8* inittab; /* minimum delay before retuning */ int min_delay_ms; int (*set_ts_params)(struct dvb_frontend *fe, int is_punctured); }; #if defined(CONFIG_DVB_STV0288) || (defined(CONFIG_DVB_STV0288_MODULE) && \ defined(MODULE)) extern struct dvb_frontend *stv0288_attach(const struct stv0288_config *config, struct i2c_adapter *i2c); #else static inline struct dvb_frontend *stv0288_attach(const struct stv0288_config *config, struct i2c_adapter *i2c) { printk(KERN_WARNING "%s: driver disabled by Kconfig\n", __func__); return NULL; } #endif /* CONFIG_DVB_STV0288 */ static inline int stv0288_writereg(struct dvb_frontend *fe, u8 reg, u8 val) { int r = 0; u8 buf[] = { reg, val }; if (fe->ops.write) r = fe->ops.write(fe, buf, 2); return r; } #endif /* STV0288_H */
{ "pile_set_name": "Github" }
<section> <h2 id="input-fallback"> Compatibility with <code>&lt;input type="text" /&gt;</code> </h2> <p class="alert alert-warning"> <a href="announcements-4.0.html#hidden-input" class="alert-link">Deprecated in Select2 4.0.</a> It is now encouraged to use the <code>&lt;select&gt;</code> tag instead. </p> <p> In past versions of Select2, a <code>&lt;select&gt;</code> element could only be used with a limited subset of options. An <code>&lt;input type="hidden" /&gt;</code> was required instead, which did not allow for a graceful fallback for users who did not have JavaScript enabled. Select2 now supports the <code>&lt;select&gt;</code> element for all options, so it is no longer required to use <code>&lt;input /&gt;</code> elements with Select2. </p> <dl class="dl-horizontal"> <dt>Adapter</dt> <dd> <code title="select2/data/base">DataAdapter</code> </dd> <dt>Decorator</dt> <dd> <code title="select2/compat/inputData">InputData</code> </dd> </dl> </section>
{ "pile_set_name": "Github" }
/* ***** BEGIN LICENSE BLOCK ***** * Distributed under the BSD license: * * Copyright (c) 2012, Ajax.org B.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ajax.org B.V. nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ define(function(require, exports, module) { "use strict"; var oop = require("../lib/oop"); var HtmlHighlightRules = require("./html_highlight_rules").HtmlHighlightRules; var ElixirHighlightRules = require("./elixir_highlight_rules").ElixirHighlightRules; var HtmlElixirHighlightRules = function() { HtmlHighlightRules.call(this); var startRules = [ { regex: "<%%|%%>", token: "constant.language.escape" }, { token : "comment.start.eex", regex : "<%#", push : [{ token : "comment.end.eex", regex: "%>", next: "pop", defaultToken:"comment" }] }, { token : "support.elixir_tag", regex : "<%+(?!>)[-=]?", push : "elixir-start" } ]; var endRules = [ { token : "support.elixir_tag", regex : "%>", next : "pop" }, { token: "comment", regex: "#(?:[^%]|%[^>])*" } ]; for (var key in this.$rules) this.$rules[key].unshift.apply(this.$rules[key], startRules); this.embedRules(ElixirHighlightRules, "elixir-", endRules, ["start"]); this.normalizeRules(); }; oop.inherits(HtmlElixirHighlightRules, HtmlHighlightRules); exports.HtmlElixirHighlightRules = HtmlElixirHighlightRules; });
{ "pile_set_name": "Github" }
/* Copyright The Flagger Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package v1beta1 import ( "context" "time" v1beta1 "github.com/weaveworks/flagger/pkg/apis/appmesh/v1beta1" scheme "github.com/weaveworks/flagger/pkg/client/clientset/versioned/scheme" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" rest "k8s.io/client-go/rest" ) // VirtualNodesGetter has a method to return a VirtualNodeInterface. // A group's client should implement this interface. type VirtualNodesGetter interface { VirtualNodes(namespace string) VirtualNodeInterface } // VirtualNodeInterface has methods to work with VirtualNode resources. type VirtualNodeInterface interface { Create(ctx context.Context, virtualNode *v1beta1.VirtualNode, opts v1.CreateOptions) (*v1beta1.VirtualNode, error) Update(ctx context.Context, virtualNode *v1beta1.VirtualNode, opts v1.UpdateOptions) (*v1beta1.VirtualNode, error) UpdateStatus(ctx context.Context, virtualNode *v1beta1.VirtualNode, opts v1.UpdateOptions) (*v1beta1.VirtualNode, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.VirtualNode, error) List(ctx context.Context, opts v1.ListOptions) (*v1beta1.VirtualNodeList, error) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.VirtualNode, err error) VirtualNodeExpansion } // virtualNodes implements VirtualNodeInterface type virtualNodes struct { client rest.Interface ns string } // newVirtualNodes returns a VirtualNodes func newVirtualNodes(c *AppmeshV1beta1Client, namespace string) *virtualNodes { return &virtualNodes{ client: c.RESTClient(), ns: namespace, } } // Get takes name of the virtualNode, and returns the corresponding virtualNode object, and an error if there is any. func (c *virtualNodes) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.VirtualNode, err error) { result = &v1beta1.VirtualNode{} err = c.client.Get(). Namespace(c.ns). Resource("virtualnodes"). Name(name). VersionedParams(&options, scheme.ParameterCodec). Do(ctx). Into(result) return } // List takes label and field selectors, and returns the list of VirtualNodes that match those selectors. func (c *virtualNodes) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.VirtualNodeList, err error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } result = &v1beta1.VirtualNodeList{} err = c.client.Get(). Namespace(c.ns). Resource("virtualnodes"). VersionedParams(&opts, scheme.ParameterCodec). Timeout(timeout). Do(ctx). Into(result) return } // Watch returns a watch.Interface that watches the requested virtualNodes. func (c *virtualNodes) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } opts.Watch = true return c.client.Get(). Namespace(c.ns). Resource("virtualnodes"). VersionedParams(&opts, scheme.ParameterCodec). Timeout(timeout). Watch(ctx) } // Create takes the representation of a virtualNode and creates it. Returns the server's representation of the virtualNode, and an error, if there is any. func (c *virtualNodes) Create(ctx context.Context, virtualNode *v1beta1.VirtualNode, opts v1.CreateOptions) (result *v1beta1.VirtualNode, err error) { result = &v1beta1.VirtualNode{} err = c.client.Post(). Namespace(c.ns). Resource("virtualnodes"). VersionedParams(&opts, scheme.ParameterCodec). Body(virtualNode). Do(ctx). Into(result) return } // Update takes the representation of a virtualNode and updates it. Returns the server's representation of the virtualNode, and an error, if there is any. func (c *virtualNodes) Update(ctx context.Context, virtualNode *v1beta1.VirtualNode, opts v1.UpdateOptions) (result *v1beta1.VirtualNode, err error) { result = &v1beta1.VirtualNode{} err = c.client.Put(). Namespace(c.ns). Resource("virtualnodes"). Name(virtualNode.Name). VersionedParams(&opts, scheme.ParameterCodec). Body(virtualNode). Do(ctx). Into(result) return } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *virtualNodes) UpdateStatus(ctx context.Context, virtualNode *v1beta1.VirtualNode, opts v1.UpdateOptions) (result *v1beta1.VirtualNode, err error) { result = &v1beta1.VirtualNode{} err = c.client.Put(). Namespace(c.ns). Resource("virtualnodes"). Name(virtualNode.Name). SubResource("status"). VersionedParams(&opts, scheme.ParameterCodec). Body(virtualNode). Do(ctx). Into(result) return } // Delete takes name of the virtualNode and deletes it. Returns an error if one occurs. func (c *virtualNodes) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("virtualnodes"). Name(name). Body(&opts). Do(ctx). Error() } // DeleteCollection deletes a collection of objects. func (c *virtualNodes) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { var timeout time.Duration if listOpts.TimeoutSeconds != nil { timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second } return c.client.Delete(). Namespace(c.ns). Resource("virtualnodes"). VersionedParams(&listOpts, scheme.ParameterCodec). Timeout(timeout). Body(&opts). Do(ctx). Error() } // Patch applies the patch and returns the patched virtualNode. func (c *virtualNodes) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.VirtualNode, err error) { result = &v1beta1.VirtualNode{} err = c.client.Patch(pt). Namespace(c.ns). Resource("virtualnodes"). Name(name). SubResource(subresources...). VersionedParams(&opts, scheme.ParameterCodec). Body(data). Do(ctx). Into(result) return }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <link rel="author" title="Koji Ishii" href="mailto:[email protected]"> <link rel="reviewer" title="Gérard Talbot" href="http://www.gtalbot.org/BrowserBugsSection/css21testsuite/"> <!-- 2015-05-01 --> <title>Embedded HTML document for background-size-document-root-vrl-008</title> <style> html { background-image: url("swatch-green.png"); background-repeat: no-repeat; background-size: 100% 100%; writing-mode: vertical-rl; border-left: white solid 20px; margin-right: 50px; padding-right: 50px; width: 50px; } </style> </head> </html>
{ "pile_set_name": "Github" }
set include= cd game asm2mak game makefile make clean cd ..\cgame asm2mak cgame makefile make clean cd ..\ui asm2mak ui makefile make clean cd ..
{ "pile_set_name": "Github" }
/***************************************************************************/ /* */ /* ftgasp.h */ /* */ /* Access of TrueType's `gasp' table (specification). */ /* */ /* Copyright 2007, 2008 by */ /* David Turner, Robert Wilhelm, and Werner Lemberg. */ /* */ /* This file is part of the FreeType project, and may only be used, */ /* modified, and distributed under the terms of the FreeType project */ /* license, LICENSE.TXT. By continuing to use, modify, or distribute */ /* this file you indicate that you have read the license and */ /* understand and accept it fully. */ /* */ /***************************************************************************/ #ifndef _FT_GASP_H_ #define _FT_GASP_H_ #include <ft2build.h> #include FT_FREETYPE_H /*************************************************************************** * * @section: * gasp_table * * @title: * Gasp Table * * @abstract: * Retrieving TrueType `gasp' table entries * * @description: * The function @FT_Get_Gasp can be used to query a TrueType or OpenType * font for specific entries in their `gasp' table, if any. This is * mainly useful when implementing native TrueType hinting with the * bytecode interpreter to duplicate the Windows text rendering results. */ /************************************************************************* * * @enum: * FT_GASP_XXX * * @description: * A list of values and/or bit-flags returned by the @FT_Get_Gasp * function. * * @values: * FT_GASP_NO_TABLE :: * This special value means that there is no GASP table in this face. * It is up to the client to decide what to do. * * FT_GASP_DO_GRIDFIT :: * Grid-fitting and hinting should be performed at the specified ppem. * This *really* means TrueType bytecode interpretation. * * FT_GASP_DO_GRAY :: * Anti-aliased rendering should be performed at the specified ppem. * * FT_GASP_SYMMETRIC_SMOOTHING :: * Smoothing along multiple axes must be used with ClearType. * * FT_GASP_SYMMETRIC_GRIDFIT :: * Grid-fitting must be used with ClearType's symmetric smoothing. * * @note: * `ClearType' is Microsoft's implementation of LCD rendering, partly * protected by patents. * * @since: * 2.3.0 */ #define FT_GASP_NO_TABLE -1 #define FT_GASP_DO_GRIDFIT 0x01 #define FT_GASP_DO_GRAY 0x02 #define FT_GASP_SYMMETRIC_SMOOTHING 0x08 #define FT_GASP_SYMMETRIC_GRIDFIT 0x10 /************************************************************************* * * @func: * FT_Get_Gasp * * @description: * Read the `gasp' table from a TrueType or OpenType font file and * return the entry corresponding to a given character pixel size. * * @input: * face :: The source face handle. * ppem :: The vertical character pixel size. * * @return: * Bit flags (see @FT_GASP_XXX), or @FT_GASP_NO_TABLE if there is no * `gasp' table in the face. * * @since: * 2.3.0 */ FT_EXPORT( FT_Int ) FT_Get_Gasp( FT_Face face, FT_UInt ppem ); /* */ #endif /* _FT_GASP_H_ */ /* END */
{ "pile_set_name": "Github" }
# 2.7.0 [All items](https://github.com/Azure/BatchExplorer/milestone/41?closed=1) ### bug: * Fix issue where the image selector when creating pools only showed verified images [\#2169](https://github.com/Azure/BatchExplorer/pull/2169) ### other: * Remove support for managed images kind of custom images as this will no longer be supported by latest API's. Customers should switch to using Shared Image Galleries, which are the latest form of custom image support (https://docs.microsoft.com/en-us/azure/batch/batch-sig-images) or can disable updates (introduced in version 2.6.0) for the short term. [\#2153](https://github.com/Azure/BatchExplorer/issues/2153) # 2.6.0 [All items](https://github.com/Azure/BatchExplorer/milestone/40?closed=1) ### feature: * Display Unusable Node Banner on Pools Page [\#2153](https://github.com/Azure/BatchExplorer/issues/2153) * Disable Auto-update with Command Line [\#2152](https://github.com/Azure/BatchExplorer/issues/2152) ### bug: * PyInstaller Vulnerability [\#2155](https://github.com/Azure/BatchExplorer/issues/2155) * Governance Updates [\#2154](https://github.com/Azure/BatchExplorer/issues/2154) ### other: # 2.5.0 [All items](https://github.com/Azure/BatchExplorer/milestone/39?closed=1) ### feature: * Change Error Messages Inner Details to be shown by default [\#2140](https://github.com/Azure/BatchExplorer/issues/2140) ### bug: * Account create fails with 'Must specify a location' [\#2135](https://github.com/Azure/BatchExplorer/issues/2135) * When listing tasks with OData, pagination may not be followed to termination [\#2127](https://github.com/Azure/BatchExplorer/issues/2127) * When attempting to RDP/SSH to a node you must update expiresOn property otherwise you get a spinning wheel [\#2119](https://github.com/Azure/BatchExplorer/issues/2119) * When Creating a JobPrep or JobRelease task you must update retentionTime to pass validation [\#2118](https://github.com/Azure/BatchExplorer/issues/2118) ### other: * EULA text still displays BatchLabs text [\#2141](https://github.com/Azure/BatchExplorer/issues/2141) ### Fixes * Vulnerabilities with dependencies * Fix issue where the Python server would not start due to dependency collisions. # 2.4.0 [All items](https://github.com/Azure/BatchExplorer/milestone/38?closed=1) ### feature: * StartTask WaitForSuccess should be defaulted to true [\#2105](https://github.com/Azure/BatchExplorer/issues/2105) * Unable to filter for failed tasks [\#2084](https://github.com/Azure/BatchExplorer/issues/2084) ### bug: * Required caused save to be disabled even though populated [\#2103](https://github.com/Azure/BatchExplorer/issues/2103) * No Results Displayed if User Did Not Have a SIG Image. [\#2102](https://github.com/Azure/BatchExplorer/issues/2102) * Start time and End time in Job execution information are empty [\#2088](https://github.com/Azure/BatchExplorer/issues/2088) * Batch Explorers Unlimited retention time actually sets value to 7d [\#2083](https://github.com/Azure/BatchExplorer/issues/2083) ### other: * Release/prepare 2.4.0 [\#2108](https://github.com/Azure/BatchExplorer/issues/2108) * Update @angular dependencies [\#2107](https://github.com/Azure/BatchExplorer/issues/2107) * Update Batch API [\#2106](https://github.com/Azure/BatchExplorer/issues/2106) * Update python dependency [\#2104](https://github.com/Azure/BatchExplorer/issues/2104) # 2.3.0 [All items](https://github.com/Azure/BatchExplorer/milestone/37?closed=1) ### feature: ### bug: * Task filter not applied when refresh clicked * Unable to filter for "offline" nodes ### other: # 2.2.0 [All items](https://github.com/Azure/BatchExplorer/milestone/36?closed=1) ### feature: * Add SharedImageGallery support to the custom images blade of pool create. * Users can now specify custom Azure environments ### bug: * Promo VMSize's now display projected pricing correctly. ### other: # 2.1.2 ### Bug fixes * Cannot connect to a node as an Admin [\#2033](https://github.com/Azure/BatchExplorer/issues/2033) * Sort task by runtime fix for running tasks [\#2034](https://github.com/Azure/BatchExplorer/issues/2034) # 2.1.1 ### Bug fixes * Cannot view any pool when creating a job [\#2029](https://github.com/Azure/BatchExplorer/issues/2029) # 2.1.0 [All items](https://github.com/Azure/BatchExplorer/milestone/35?closed=1) ### Features: * Allow key bindings to be edited by the user [\#2009](https://github.com/Azure/BatchExplorer/issues/2009) * Enable Keyboard shortcuts for pool/job operations [\#1997](https://github.com/Azure/BatchExplorer/issues/1997) * Show cost per pool [\#1993](https://github.com/Azure/BatchExplorer/issues/1993) * Node connect experience merge configure page [\#1991](https://github.com/Azure/BatchExplorer/issues/1991) * Allow updating local batch account properties [\#1951](https://github.com/Azure/BatchExplorer/issues/1951) * New job created in NCJ adds to the bottom of the quick-list [\#1277](https://github.com/Azure/BatchExplorer/issues/1277) ### Bug fixes: * OS Family 6 (Windows Serer 2019) is not displayed correctly [\#2027](https://github.com/Azure/BatchExplorer/issues/2027) * Clone pool not setting app package name [\#2019](https://github.com/Azure/BatchExplorer/issues/2019) * Pressing keys while leaving the window break keyboard shortcuts [\#2014](https://github.com/Azure/BatchExplorer/issues/2014) ### Others: * Page to see key bindings [\#2006](https://github.com/Azure/BatchExplorer/issues/2006) # 2.0.4 ### Fixes * Vulnerabilities with dependencies # 2.0.3 ### Bug fixes * Recent templates from "my library" are a bit too long [\#1969](https://github.com/Azure/BatchExplorer/issues/1969) * Job schedule with autopool fail to display [\#1995](https://github.com/Azure/BatchExplorer/issues/1995) * Pool graphs One Day and One Week option both have value of 1 day [\#1999](https://github.com/Azure/BatchExplorer/issues/1999) * Microsoft portfolio settings not persisted [\#2004](https://github.com/Azure/BatchExplorer/issues/2004) # 2.0.2 ### Bug fixes * **Vulnerabilities** with js-yaml. [\#1990](https://github.com/Azure/BatchExplorer/issues/1990) * Node counts graphs don't recover from errors(e.g. Connection died). [\#1989](https://github.com/Azure/BatchExplorer/issues/1989) * Null exception in resource files properties [\#1984](https://github.com/Azure/BatchExplorer/issues/1984) * Only loads permissions for first selected batch account [\#1987](https://github.com/Azure/BatchExplorer/issues/1987) # 2.0.1 ### Bug fixes * Null route when clicking on node or task that isn't loaded yet [\#1983](https://github.com/Azure/BatchExplorer/issues/1983) * Open a local template file null expection [\#1980](https://github.com/Azure/BatchExplorer/issues/1980) * Null pointer exception in job schedule details [\#1978](https://github.com/Azure/BatchExplorer/issues/1978) * Null currentTab in VTab Component [\#1976](https://github.com/Azure/BatchExplorer/issues/1976) # 2.0.0 [All items](https://github.com/Azure/BatchExplorer/milestone/32?closed=1) ### Features: * Sort jobs by end time and keep last sort order [\#1966](https://github.com/Azure/BatchExplorer/issues/1966) * Subscription list sorted alphabetically [\#1963](https://github.com/Azure/BatchExplorer/issues/1963) * Local Template library show full path to file [\#1943](https://github.com/Azure/BatchExplorer/issues/1943) * Create batch account location picker use provider specific locations [\#1902](https://github.com/Azure/BatchExplorer/issues/1902) * Add common environment settings support for job [\#1896](https://github.com/Azure/BatchExplorer/issues/1896) * Local template library only shows .template.json files [\#1894](https://github.com/Azure/BatchExplorer/issues/1894) * Update to new management api version [\#1892](https://github.com/Azure/BatchExplorer/issues/1892) * Support setting data disk for batch pools [\#1887](https://github.com/Azure/BatchExplorer/issues/1887) * Ability to abort the resize of a pool [\#1884](https://github.com/Azure/BatchExplorer/issues/1884) * Add back option to run a single template without adding a local library folder [\#1883](https://github.com/Azure/BatchExplorer/issues/1883) * New settings UI [\#1881](https://github.com/Azure/BatchExplorer/issues/1881) * Add column keyboard navigation for tables for accessibility [\#1878](https://github.com/Azure/BatchExplorer/issues/1878) * Windows user account interactive mode [\#1868](https://github.com/Azure/BatchExplorer/issues/1868) * Unify filesystem interfaces [\#1866](https://github.com/Azure/BatchExplorer/issues/1866) * Support the new type of resource files(Storage container) [\#1839](https://github.com/Azure/BatchExplorer/issues/1839) * Update Batch api to latest version 2018-12-01.8.0 [\#1838](https://github.com/Azure/BatchExplorer/issues/1838) * Global app utc vs local time selector [\#1837](https://github.com/Azure/BatchExplorer/issues/1837) * Switch account monitoring to average now that value are correct [\#1835](https://github.com/Azure/BatchExplorer/issues/1835) * Migrate from momentjs to luxon [\#1833](https://github.com/Azure/BatchExplorer/issues/1833) * File group creation disabled if no batch account selected [\#1750](https://github.com/Azure/BatchExplorer/issues/1750) * Display multi instance settings on the sub task [\#1554](https://github.com/Azure/BatchExplorer/issues/1554) * Batch account favourites should be user specific. [\#250](https://github.com/Azure/BatchExplorer/issues/250) ### Bug fixes: * CTRL+Click not selecting quick-list items [\#1970](https://github.com/Azure/BatchExplorer/issues/1970) * Dashboard app package table has URL not name [\#1957](https://github.com/Azure/BatchExplorer/issues/1957) * Local template open in default editor should not open a copy [\#1944](https://github.com/Azure/BatchExplorer/issues/1944) * Daily cost graph showing corrupt data [\#1933](https://github.com/Azure/BatchExplorer/issues/1933) * Cant view images from task outputs [\#1928](https://github.com/Azure/BatchExplorer/issues/1928) * Bugs in Application Packages [\#1926](https://github.com/Azure/BatchExplorer/issues/1926) * Insider build unable to download (stable) update [\#1919](https://github.com/Azure/BatchExplorer/issues/1919) * CSS for favorite picker is broken [\#1914](https://github.com/Azure/BatchExplorer/issues/1914) * Issue with switching azure environment(National cloud) [\#1909](https://github.com/Azure/BatchExplorer/issues/1909) * Pool quota out of sync with pool list [\#1906](https://github.com/Azure/BatchExplorer/issues/1906) * Stale pool statistics / graphs [\#1505](https://github.com/Azure/BatchExplorer/issues/1505) ### Others: * Switch from bunyan to winston for logging [\#1865](https://github.com/Azure/BatchExplorer/issues/1865) * New user configuration/settings system [\#1841](https://github.com/Azure/BatchExplorer/issues/1841) # 0.19.2 ### Bug fixes * Issue when using branch with / in github-data.source.branch setting [\#1870](https://github.com/Azure/BatchExplorer/issues/1870) * Issue with listing subscriptions when having more than 50 [\#1872](https://github.com/Azure/BatchExplorer/issues/1872) * Observable from main process are not being cleanup when windows refresh or close [\#1874](https://github.com/Azure/BatchExplorer/issues/1874) * Issues with job statistics when switching sorting [\#1859](https://github.com/Azure/BatchExplorer/issues/1859) * Authentication window should handle load errors [\#1862](https://github.com/Azure/BatchExplorer/issues/1862) * Fix issues with Job statistics page [\#1843](https://github.com/Azure/BatchExplorer/issues/1843) * Download button has disappeared for files [\#1861](https://github.com/Azure/BatchExplorer/issues/1861) * Null not handled in file loader properties [\#1857](https://github.com/Azure/BatchExplorer/issues/1857) * Add / edit start task won't load when user account present on pool create [\#1855](https://github.com/Azure/BatchExplorer/issues/1855) * Linux deb package not installable [\#1852](https://github.com/Azure/BatchExplorer/issues/1852) # 0.19.1 ### Bug fixes * Drilldown into tasks no longer works when filtered [\#1843](https://github.com/Azure/BatchExplorer/issues/1843) * Create job schedule from job is broken [\#1844](https://github.com/Azure/BatchExplorer/issues/1844) * Check for updates throws uncaught errors sometimes [\#1847](https://github.com/Azure/BatchExplorer/issues/1847) * Null pointer exception in pool picker when using Local Batch account [\#1850](https://github.com/Azure/BatchExplorer/issues/1850) # 0.19.0 [All items](https://github.com/Azure/BatchExplorer/milestone/27?closed=1) ### Feature: * VNet picker should alllow removing selection [\#1822](https://github.com/Azure/BatchExplorer/issues/1822) * Show Resource Group of Batch Account [\#1818](https://github.com/Azure/BatchExplorer/issues/1818) * Multi delete nodes support [\#1812](https://github.com/Azure/BatchExplorer/issues/1812) * VM size picker alternative if can't load the vm sizes(Local account) [\#1810](https://github.com/Azure/BatchExplorer/issues/1810) * Setup new secure storage [\#1808](https://github.com/Azure/BatchExplorer/issues/1808) * Redesign of the gallery to simplify navigation [\#1791](https://github.com/Azure/BatchExplorer/issues/1791) * Auto add file group output container [\#1790](https://github.com/Azure/BatchExplorer/issues/1790) * Refactor VM size picker with filter [\#1783](https://github.com/Azure/BatchExplorer/issues/1783) * Add a link to the pool/node in the task summary [\#1776](https://github.com/Azure/BatchExplorer/issues/1776) * Rescale should wait for resize operation to be completed when disabling autoscale [\#1754](https://github.com/Azure/BatchExplorer/issues/1754) * Provide actual cost of batch account [\#1748](https://github.com/Azure/BatchExplorer/issues/1748) * New pool os picker design [\#1735](https://github.com/Azure/BatchExplorer/issues/1735) * Ability to type the file path in the file explorer [\#1702](https://github.com/Azure/BatchExplorer/issues/1702) * File viewer redesign to be more extensible [\#1700](https://github.com/Azure/BatchExplorer/issues/1700) * File viewer syntax highlighting [\#1699](https://github.com/Azure/BatchExplorer/issues/1699) * Local template library [\#1696](https://github.com/Azure/BatchExplorer/issues/1696) * Add a regular check for updates while the app is open [\#1656](https://github.com/Azure/BatchExplorer/issues/1656) * Add validation on container length(Between 3 and 63 characters) [\#1641](https://github.com/Azure/BatchExplorer/issues/1641) * Need to be able to resize quick-list panel [\#1544](https://github.com/Azure/BatchExplorer/issues/1544) * Manage certificates and app packages for existing pools. [\#1334](https://github.com/Azure/BatchExplorer/issues/1334) ### Bug fixes: * Recent template not working [\#1813](https://github.com/Azure/BatchExplorer/issues/1813) * Drag and drop upload is broken [\#1801](https://github.com/Azure/BatchExplorer/issues/1801) * Insider build icon is showing electron icon [\#1793](https://github.com/Azure/BatchExplorer/issues/1793) * Unable to report issue through program [\#1779](https://github.com/Azure/BatchExplorer/issues/1779) * Leaving the node file explorer with the stdout.txt file open appends content over and over [\#1778](https://github.com/Azure/BatchExplorer/issues/1778) * Green on light blue is very hard to read [\#1774](https://github.com/Azure/BatchExplorer/issues/1774) * Favourites are not persisted any more [\#1771](https://github.com/Azure/BatchExplorer/issues/1771) * Navigating to a job for the first time make an extra call with undefined jobId [\#1770](https://github.com/Azure/BatchExplorer/issues/1770) * Viewing a folder with lots of files in it (>10k) errors [\#1766](https://github.com/Azure/BatchExplorer/issues/1766) * Prep task status is always failure even when completed [\#1765](https://github.com/Azure/BatchExplorer/issues/1765) * Allow for multiple failed tasks to be rescheduled [\#1763](https://github.com/Azure/BatchExplorer/issues/1763) * High contrast theme [\#1762](https://github.com/Azure/BatchExplorer/issues/1762) * Clicking item in quick-list doesn't work on the first click. [\#1757](https://github.com/Azure/BatchExplorer/issues/1757) * Can't view images from task node file explorer [\#1756](https://github.com/Azure/BatchExplorer/issues/1756) * Cannot view text/log files in file explorer [\#1741](https://github.com/Azure/BatchExplorer/issues/1741) * 2 errors overlay when task node doesnt exist [\#1740](https://github.com/Azure/BatchExplorer/issues/1740) * Add local batch account with IP [\#1685](https://github.com/Azure/BatchExplorer/issues/1685) ### Other: * Remove flags as this is not compliant with Microsoft policy [\#1806](https://github.com/Azure/BatchExplorer/issues/1806) * Migrate out of @angular/http [\#1745](https://github.com/Azure/BatchExplorer/issues/1745) * Sort xliffs translations alphabetically [\#1731](https://github.com/Azure/BatchExplorer/issues/1731) * Virtual scroll switch to custom ngFor directive [\#1710](https://github.com/Azure/BatchExplorer/issues/1710) * Azure DevOps switch to ubuntu pool(Linux preview deprecated) [\#1695](https://github.com/Azure/BatchExplorer/issues/1695) * Release step make resilient to storage error [\#1689](https://github.com/Azure/BatchExplorer/issues/1689) * Setup tree shaking [\#1670](https://github.com/Azure/BatchExplorer/issues/1670) * Replace all new Error to be typed [\#1661](https://github.com/Azure/BatchExplorer/issues/1661) * Redesign how the upload works by having a addFile property on file navigator [\#1292](https://github.com/Azure/BatchExplorer/issues/1292) * Add CI to check the bundled app(.exe, .app) is working [\#580](https://github.com/Azure/BatchExplorer/issues/580) # 0.18.4 ### Bug fixes * Task exit code filter switch between include and exclude error [\#1687](https://github.com/Azure/BatchExplorer/issues/1687) * Error in pool container picker with null registries [\#1690](https://github.com/Azure/BatchExplorer/issues/1690) * Can't view content of prep/release task files [\#1692](https://github.com/Azure/BatchExplorer/issues/1692) * Issue when rescaling a pool and toggling autoscale [\#1751](https://github.com/Azure/BatchExplorer/issues/1751) # 0.18.3 ### Bug fixes * Increase pool quota quick fix not working [\#1667](https://github.com/Azure/BatchExplorer/issues/1667) * Can't export task to csv first time opening the statistics blade [\#1673](https://github.com/Azure/BatchExplorer/issues/1673) * Error on toggle filter button [\#1671](https://github.com/Azure/BatchExplorer/issues/1671) * Error when copying property which value is not a string(int) [\#1675](https://github.com/Azure/BatchExplorer/issues/1675) * Can't create a pool with custom images [\#1677](https://github.com/Azure/BatchExplorer/issues/1677) * Upload node logs not tracking upload on national clouds [\#1680](https://github.com/Azure/BatchExplorer/issues/1680) * Quota usages not showing up [\#1683](https://github.com/Azure/BatchExplorer/issues/v) # 0.18.2 ### Bug fixes * Unique windows show() nul pointer exception [\#1644](https://github.com/Azure/BatchExplorer/issues/1644) * Reimage node is calling reboot [\#1646](https://github.com/Azure/BatchExplorer/issues/1646) * Error loading metrics create null pointer exception [\#1648](https://github.com/Azure/BatchExplorer/issues/1648) * View readme of NCJ application not working [\#1652](https://github.com/Azure/BatchExplorer/issues/1652) * Cloning of Batch Pools does not work [\#1650](https://github.com/Azure/BatchExplorer/issues/1650) * Error when button component doesn't return an observable [\#1654](https://github.com/Azure/BatchExplorer/issues/1654) * Selecting checkbox doesn't add a tick to the checkbox [\#1658](https://github.com/Azure/BatchExplorer/issues/1658) # 0.18.1 ### Bug fixes * Crash on pool configuration [\#1638](https://github.com/Azure/BatchExplorer/issues/1638) # 0.18.0 [All items](https://github.com/Azure/BatchExplorer/milestone/24?closed=1) ### Fetures: * Disable/reenable scheduling on nodes [\#1629](https://github.com/Azure/BatchExplorer/issues/1629) * Show flags of the country where the account is located [\#1626](https://github.com/Azure/BatchExplorer/pull/1627) * Add link to privacy statement in application [\#1618](https://github.com/Azure/BatchExplorer/issues/1618) * Integrate telemetry for crash reporting and user actions [\#1610](https://github.com/Azure/BatchExplorer/issues/1610) * Mac application signing [\#1600](https://github.com/Azure/BatchExplorer/issues/1600) * Upgrade to the new Azure Batch api version 2018-08-01.7.0 [\#1581](https://github.com/Azure/BatchExplorer/issues/1581) * Background task manager v2 [\#1371](https://github.com/Azure/BatchExplorer/issues/1371) * Support multi instance tasks [\#1329](https://github.com/Azure/BatchExplorer/issues/1329) * Support sorting tasks [\#1328](https://github.com/Azure/BatchExplorer/issues/1328) * Support additional Gallery repositories [\#955](https://github.com/Azure/BatchExplorer/issues/955) * BatchExplorer should display what environment it is connected to [\#1555](https://github.com/Azure/BatchExplorer/issues/1555) * Need to display license picker for custom image [\#1575](https://github.com/Azure/BatchExplorer/issues/1575) * Support client side sorting for tables and quick list [\#1573](https://github.com/Azure/BatchExplorer/issues/1573) * Pull application license data from BatchPricing.softwares [\#1563](https://github.com/Azure/BatchExplorer/issues/1563) ### Bug fixes: * Certificate list blank [\#1624](https://github.com/Azure/BatchExplorer/issues/1624) * Account favourites not showing up when one is invalid [\#1619](https://github.com/Azure/BatchExplorer/issues/1619) * Cloning tasks having issue with user identity [\#1616](https://github.com/Azure/BatchExplorer/issues/1616) * Using an invalid URL for a Batch Account causes Add Pool sidebar to behave oddly [\#1613](https://github.com/Azure/BatchExplorer/issues/1613) * Issue with any POST request on local batch accounts [\#1607](https://github.com/Azure/BatchExplorer/issues/1607) * Don't let add duplicate local accounts [\#1605](https://github.com/Azure/BatchExplorer/issues/1605) * Container settings are required when editing start task but should be optional [\#1603](https://github.com/Azure/BatchExplorer/issues/1603) * Memory leak around pool node counts [\#1592](https://github.com/Azure/BatchExplorer/issues/1592) * Typo on the job action confirmation [\#1587](https://github.com/Azure/BatchExplorer/issues/1587) * File explorer view files with \ in name on linux [\#808](https://github.com/Azure/BatchExplorer/issues/808) * GOVT cloud metrics charts are failing [\#1550](https://github.com/Azure/BatchExplorer/issues/1550) * Account dashboard resources cards too large when no items [\#1541](https://github.com/Azure/BatchExplorer/issues/1541) * NcjTemplateService issues when templates not loaded [\#1390](https://github.com/Azure/BatchExplorer/issues/1390) * quick-list account loading appears after you click on the screen. [\#1560](https://github.com/Azure/BatchExplorer/issues/1560) * Investigate why upload progress is no longer being reported when uploading filegroup data [\#1567](https://github.com/Azure/BatchExplorer/issues/1567) * Node configuration don't call getRemoteLoginSettings every 5 seconds [\#1580](https://github.com/Azure/BatchExplorer/issues/1580) * Form components disappear after selecting a rendering image then another one. [\#1576](https://github.com/Azure/BatchExplorer/issues/1576) * In progress forms CSS busted [\#1566](https://github.com/Azure/BatchExplorer/issues/1566) ### Accessiblity * Account list favorite not accessible [\#1626](https://github.com/Azure/BatchExplorer/issues/1626) ### Other: * Setup a new swagger validator to validate models [\#1632](https://github.com/Azure/BatchExplorer/issues/1632) * Switch to node 10 [\#1377](https://github.com/Azure/BatchExplorer/issues/1377) # 0.17.3 ### Minor feature: * Display storage account URL in the Credentials and code samples dialog [\#1556](https://github.com/Azure/BatchExplorer/issues/1556) ### Bug fixes: * Cannot upload files to file group in govt cloud [\#1557](https://github.com/Azure/BatchExplorer/issues/1557) * Cannot login to GOVT cloud [\#1548](https://github.com/Azure/BatchExplorer/issues/1548) * Pricing broken, due to api breaking change [\#1562](https://github.com/Azure/BatchExplorer/issues/1562) * Opening BatchExplorer in Govt cloud opens 2 application windows [\#1561](https://github.com/Azure/BatchExplorer/issues/1561) * Not persisting the last login and the last selected cloud [\#1542](https://github.com/Azure/BatchExplorer/issues/1542) * Caching issue on national clouds [\#1559](https://github.com/Azure/BatchExplorer/issues/1559) # 0.17.1 [All items](https://github.com/Azure/BatchExplorer/milestone/25?closed=1) ### Bug fixes: * Task outputs is broken when not using autostorage account [\#1522](https://github.com/Azure/BatchExplorer/issues/1522) * Cannot connect to Windows Cloud Service node [\#1529](https://github.com/Azure/BatchExplorer/issues/1529) * Users should be able to see password used to connect to remote node [\#1532](https://github.com/Azure/BatchExplorer/issues/1532) ### Other * Task properties pool and node should be links [\#1523](https://github.com/Azure/BatchExplorer/issues/1523) # 0.17.0 [All items](https://github.com/Azure/BatchExplorer/milestone/21?closed=1) ### Feature: * Improve experience for finding failed task [\#829](https://github.com/Azure/BatchExplorer/issues/829) * Make title bar inline with app [\#162](https://github.com/Azure/BatchExplorer/issues/162) * Add ability to remove pinned items from drop down [\#1379](https://github.com/Azure/BatchExplorer/issues/1379) * Create a job-id advanced type that validates the job-id doesn't exist. [\#1330](https://github.com/Azure/BatchExplorer/issues/1330) * Task Dependencies show task state with icon and color [\#1503](https://github.com/Azure/BatchExplorer/issues/1503) * Pool bar charts for node states [\#1502](https://github.com/Azure/BatchExplorer/issues/1502) * Add task runtime to task grid [\#1501](https://github.com/Azure/BatchExplorer/issues/1501) * Support internationalization [\#1499](https://github.com/Azure/BatchExplorer/issues/1499) * Duration picker v2 [\#1431](https://github.com/Azure/BatchExplorer/issues/1431) * Node Connect redesign to unify windows and linux experience [\#1492](https://github.com/Azure/BatchExplorer/issues/1492) * Task progress not exposing validity of task count api [\#1475](https://github.com/Azure/BatchExplorer/issues/1475) * Ability to override the BatchLabs-data branch that we pull templates from [\#1474](https://github.com/Azure/BatchExplorer/issues/1474) * Use select query for task list to improve performance [\#1468](https://github.com/Azure/BatchExplorer/issues/1468) * Batch Account URI should have https:// prefix [\#1435](https://github.com/Azure/BatchExplorer/issues/1435) * Task table column layout a little funky [\#1422](https://github.com/Azure/BatchExplorer/issues/1422) * BatchLabs: App splited in features that are can be enabled and disabled [\#1449](https://github.com/Azure/BatchExplorer/issues/1449) * BatchLabs one click node connect [\#1452](https://github.com/Azure/BatchExplorer/issues/1452) ### Bug fixes: * Uncaught exception for container pools with no container images and/or registries specified [\#1512](https://github.com/Azure/BatchExplorer/issues/1512) * Task timeline doesn't cancel requests when leaving component [\#1472](https://github.com/Azure/BatchExplorer/issues/1472) * Pool from Windows managed image displays as Linux [\#1436](https://github.com/Azure/BatchExplorer/issues/1436) ### Accessibility: * Server error component is not keyboard accessible [\#1426](https://github.com/Azure/BatchExplorer/issues/1426) * Images tags are missing alt attributes [\#1482](https://github.com/Azure/BatchExplorer/issues/1482) * Tags are not accessible via keyboard [\#1420](https://github.com/Azure/BatchExplorer/issues/1420) * Notification not keyboard accessible [\#1424](https://github.com/Azure/BatchExplorer/issues/1424) ### Other: * Rename BatchLabs to BatchExplorer [\#1497](https://github.com/Azure/BatchExplorer/issues/1497) * CSS quicklist error notice in incorrect place for pools, schedules and certs [\#1510](https://github.com/Azure/BatchExplorer/issues/1510) * Add some dependency injection logic in client process [\#1470](https://github.com/Azure/BatchExplorer/issues/1470) * Monaco editor switch to webpack [\#1156](https://github.com/Azure/BatchExplorer/issues/1156) # 0.16.2 [All items](https://github.com/Azure/BatchExplorer/milestone/23closed=1) ### Bug fixes: * Unable to terminate multiple tasks [\#1457](https://github.com/Azure/BatchExplorer/issues/1457) * Pool with more then 2500 nodes does not show heatmap [\#1484](https://github.com/Azure/BatchExplorer/issues/1484) * Window asking for proxy credentials is showing a blank screen. [\#1489](https://github.com/Azure/BatchExplorer/issues/1489) # 0.16.1 [All items](https://github.com/Azure/BatchExplorer/milestone/22closed=1) ### Bug fixes: * App insights docs has wrong environment variables names [\#1443](https://github.com/Azure/BatchExplorer/issues/1443) * Deleting 1 folder from a file group deletes ALL that match it! [\#1440](https://github.com/Azure/BatchExplorer/issues/1440) * Application package state equals version string [\#1442](https://github.com/Azure/BatchExplorer/issues/1442) * Transitioning a pool from fixed scaling to auto-scaling never enables the "Save" button [\#1441](https://github.com/Azure/BatchExplorer/issues/1441) # 0.16.0 [All items](https://github.com/Azure/BatchExplorer/milestone/19?closed=1) ### Features: * Show in the footer if BatchExplorer is connected to the internet [\#1408](https://github.com/Azure/BatchExplorer/issues/1408) * BatchExplorer not very helpfull when not able to loads tenants(Bad proxy for example) [\#1403](https://github.com/Azure/BatchExplorer/issues/1403) * Refreshing account list should also refresh subscriptions [\#1398](https://github.com/Azure/BatchExplorer/issues/1398) * Decode URL parameters passed to application [\#1364](https://github.com/Azure/BatchExplorer/issues/1364) * File group directory picker should validate directories exists [\#1362](https://github.com/Azure/BatchExplorer/issues/1362) * Batch insight show a line for each node on the pool wide graph [\#1359](https://github.com/Azure/BatchExplorer/issues/1359) * Add disk usage graphs for batch-insights [\#1357](https://github.com/Azure/BatchExplorer/issues/1357) * Python server: Add more detail to error handling [\#1355](https://github.com/Azure/BatchExplorer/issues/1355) * Change waiting for start task default to true [\#1349](https://github.com/Azure/BatchExplorer/issues/1349) * Resizable columns for table [\#1346](https://github.com/Azure/BatchExplorer/issues/1346) * Show that a filter is selected in quicklist [\#1335](https://github.com/Azure/BatchExplorer/issues/1335) * Use new commands design to implement bl-command-buttons [\#1319](https://github.com/Azure/BatchExplorer/issues/1319) * Add and update metadata for entities [\#1318](https://github.com/Azure/BatchExplorer/issues/1318) * Read file support different encoding [\#875](https://github.com/Azure/BatchExplorer/issues/875) ### Bug fixes: * Unable to resize pool using Batch Explorer on mac. [\#1413](https://github.com/Azure/BatchExplorer/issues/1413) * Opening 2 batchlabs instance is broken [\#1411](https://github.com/Azure/BatchExplorer/issues/1411) * BatchExplorer doesn't handle ProxyEnable setting in registry [\#1385](https://github.com/Azure/BatchExplorer/issues/1385) * Running task graph seems to be in a weird state [\#1382](https://github.com/Azure/BatchExplorer/issues/1382) * After ~1 hour of intensive use, app grinds to a halt [\#1369](https://github.com/Azure/BatchExplorer/issues/1369) * Storage explorer download folder has failed three times in a row [\#1368](https://github.com/Azure/BatchExplorer/issues/1368) * Tasks quick-list wont scroll [\#1367](https://github.com/Azure/BatchExplorer/issues/1367) * Navigate to Data before batch account loaded throws an error. [\#1361](https://github.com/Azure/BatchExplorer/issues/1361) * Delete many from quick-list doesn't remove items from list [\#1360](https://github.com/Azure/BatchExplorer/issues/1360) * User identity picker doesn't show up current value [\#1353](https://github.com/Azure/BatchExplorer/issues/1353) * Opening ms-batchlabs:// link to a template with file group picker open crash [\#1344](https://github.com/Azure/BatchExplorer/issues/1344) * Figure out why call to ratecard API is failing for pool pricing [\#1333](https://github.com/Azure/BatchExplorer/issues/1333) * Pressing enter on editable table remove the row [\#1327](https://github.com/Azure/BatchExplorer/issues/1327) ### Accessibility First few steps towards an accessible BatchExplorer * Navigation dropdowns not accessible via keyboard [\#1401](https://github.com/Azure/BatchExplorer/issues/1401) * Breadcrumb is not accessible via keyboard [\#1400](https://github.com/Azure/BatchExplorer/issues/1400) * Main navigation keyboard navigation and focus style [\#1395](https://github.com/Azure/BatchExplorer/issues/1395) ### Other: * Misleading documentation enable-app-insights-doc [\#1348](https://github.com/Azure/BatchExplorer/issues/1348) * Make it clear for that search field only support startswith [\#1326](https://github.com/Azure/BatchExplorer/issues/1326) # 0.15.2 Fixes: * 3rd subscription in account list is always disabled [\#1374](https://github.com/Azure/BatchExplorer/issues/1374) # 0.15.1 * Enable browsing files of a offline node * Change offline node color * Allow custom image and docker container # 0.15.0 [All items](https://github.com/Azure/BatchExplorer/milestone/17?closed=1) ### feature: * List context menu redesign(multi select support) [\#1300](https://github.com/Azure/BatchExplorer/issues/1300) * Select support disable [\#1295](https://github.com/Azure/BatchExplorer/issues/1295) * Add getting started scripts for aztk and doAzureParallel [\#1281](https://github.com/Azure/BatchExplorer/issues/1281) * Provide a setting to disable auto update when quiting [\#1267](https://github.com/Azure/BatchExplorer/issues/1267) * Login window and account loading indicator. [\#1265](https://github.com/Azure/BatchExplorer/issues/1265) * Allow user to provide proxy settings [\#1263](https://github.com/Azure/BatchExplorer/issues/1263) * Add ability to change the priority of a job [\#1260](https://github.com/Azure/BatchExplorer/issues/1260) * Improve the sidebar bookmark dropdown [\#1253](https://github.com/Azure/BatchExplorer/issues/1253) * New flex table layout ignores set width [\#1239](https://github.com/Azure/BatchExplorer/issues/1239) * File explorer ability to create folder [\#1234](https://github.com/Azure/BatchExplorer/issues/1234) * Data save last container type selection(Filegroup vs all) [\#1233](https://github.com/Azure/BatchExplorer/issues/1233) * Show task running time on completed task in task list [\#1231](https://github.com/Azure/BatchExplorer/issues/1231) * New select dropdown [\#1220](https://github.com/Azure/BatchExplorer/issues/1220) * Allow to get the template for gallery application [\#1218](https://github.com/Azure/BatchExplorer/issues/1218) * Opened form dropdown. close with middle click [\#1217](https://github.com/Azure/BatchExplorer/issues/1217) * Resize Pool options for node termination (like Portal) [\#1212](https://github.com/Azure/BatchExplorer/issues/1212) * Add file extension support to file-in-file-group advanced type [\#1209](https://github.com/Azure/BatchExplorer/issues/1209) * Expand on plugin parameters to automatically set up file group sync [\#1204](https://github.com/Azure/BatchExplorer/issues/1204) * Add a certificate reference to a pool [\#1194](https://github.com/Azure/BatchExplorer/issues/1194) * Passing a list of folders and or files from a rendering application plugin to pre-populate the file group creation form from the submit NCJ template page. [\#1180](https://github.com/Azure/BatchExplorer/issues/1180) * Don't limit data tab to auto storage account [\#1173](https://github.com/Azure/BatchExplorer/issues/1173) * Support Patching JobSchedules [\#1170](https://github.com/Azure/BatchExplorer/issues/1170) * Batch Account Certificates Experience [\#1165](https://github.com/Azure/BatchExplorer/issues/1165) * Refresh folder in file explorer should remove removed items. [\#874](https://github.com/Azure/BatchExplorer/issues/874) ### bug: * Prod build is borken with the new Commands [\#1311](https://github.com/Azure/BatchExplorer/issues/1311) * Spelling mistake on release website [\#1310](https://github.com/Azure/BatchExplorer/issues/1310) * Job progress doughnut renders funny when target node count is less than running nodes [\#1307](https://github.com/Azure/BatchExplorer/issues/1307) * Missing timestamp after generating credentials to connect to node [\#1304](https://github.com/Azure/BatchExplorer/issues/1304) * View node files for prep tasks file contents is truncated. [\#1302](https://github.com/Azure/BatchExplorer/issues/1302) * Deleting folder is broken with new storageAccountId [\#1290](https://github.com/Azure/BatchExplorer/issues/1290) * Fix pinning file groups to work with the new path [\#1289](https://github.com/Azure/BatchExplorer/issues/1289) * Viewing prep and release tasks for job shows node doesn't exist when it does. [\#1288](https://github.com/Azure/BatchExplorer/issues/1288) * NCJ file group selector shows all containers ... [\#1276](https://github.com/Azure/BatchExplorer/issues/1276) * Quotas not updated when switching Batch accounts [\#1269](https://github.com/Azure/BatchExplorer/issues/1269) * Select dropdown not showing when parent has overflow hidden [\#1261](https://github.com/Azure/BatchExplorer/issues/1261) * bl-select always defaults to focusFirstOption() [\#1258](https://github.com/Azure/BatchExplorer/issues/1258) * Bugs with storage containers. [\#1243](https://github.com/Azure/BatchExplorer/issues/1243) * Typo Internal Ip "Adress" should be "Address" [\#1240](https://github.com/Azure/BatchExplorer/issues/1240) * Create new file group name validation not showing details [\#1235](https://github.com/Azure/BatchExplorer/issues/1235) * NCJ load a template without metadata crash [\#1232](https://github.com/Azure/BatchExplorer/issues/1232) * Local Template encoded with UTF-8-BOM fails to parse. [\#1226](https://github.com/Azure/BatchExplorer/issues/1226) * Non Batch API error message passed to ServerError will miss actual error message [\#1224](https://github.com/Azure/BatchExplorer/issues/1224) * Auto pool not working for local template [\#1219](https://github.com/Azure/BatchExplorer/issues/1219) * Allow optional/empty fields in job/pool templates [\#1082](https://github.com/Azure/BatchExplorer/issues/1082) * Occasionally selecting a Batch account doesn't populate the jobs and pool from the selected account. [\#653](https://github.com/Azure/BatchExplorer/issues/653) ### other: * New form field should support hints and error [\#1279](https://github.com/Azure/BatchExplorer/issues/1279) * Update batchlabs website to point to azure storage builds [\#1275](https://github.com/Azure/BatchExplorer/issues/1275) * New input design [\#1273](https://github.com/Azure/BatchExplorer/issues/1273) * Searching always show current item [\#1246](https://github.com/Azure/BatchExplorer/issues/1246) * Gallery breadcrumb is still market [\#1227](https://github.com/Azure/BatchExplorer/issues/1227) * BatchExplorer auto update wait to be downloaded before quit and install [\#1206](https://github.com/Azure/BatchExplorer/issues/1206) # 0.14.1 ### Hot fixes * Selecting an item when the filter is open would not close the filter [\#1207](https://github.com/Azure/BatchExplorer/issues/1207) # 0.14.0 [All items](https://github.com/Azure/BatchExplorer/milestone/16?closed=1) ### Feature: * Add MS and NCS_V3 sizes to vm size picker. [\#1191](https://github.com/Azure/BatchExplorer/issues/1191) * Allow to pick custom user accounts when adding a task [\#1188](https://github.com/Azure/BatchExplorer/issues/1188) * Hide persisted files explorer when no container found [\#1185](https://github.com/Azure/BatchExplorer/issues/1185) * Drag and Drop support for local NCJ templates. [\#1179](https://github.com/Azure/BatchExplorer/issues/1179) * Dedicated page for account monitoring metrics [\#1149](https://github.com/Azure/BatchExplorer/issues/1149) * Upload node logs [\#1148](https://github.com/Azure/BatchExplorer/issues/1148) * Show app insights per node [\#1144](https://github.com/Azure/BatchExplorer/issues/1144) * Ability to delete a batch account [\#1133](https://github.com/Azure/BatchExplorer/issues/1133) * Handle multiple folder uploads for a single file group [\#1129](https://github.com/Azure/BatchExplorer/issues/1129) * File explorer right click download only works for containers [\#1120](https://github.com/Azure/BatchExplorer/issues/1120) * Support non-public Azure clouds [\#1116](https://github.com/Azure/BatchExplorer/issues/1116) * Make list and table use virtual scroll [\#1100](https://github.com/Azure/BatchExplorer/issues/1100) * Give an option to request more quota [\#1097](https://github.com/Azure/BatchExplorer/issues/1097) * File explorer keyboard navigation [\#1062](https://github.com/Azure/BatchExplorer/issues/1062) * Show quotas on respective pages [\#1048](https://github.com/Azure/BatchExplorer/issues/1048) * Ability to create a batch account [\#1022](https://github.com/Azure/BatchExplorer/issues/1022) * VNet support [\#1018](https://github.com/Azure/BatchExplorer/issues/1018) * Job schedule support [\#1008](https://github.com/Azure/BatchExplorer/issues/1008) * Create Blender plugin to test BL Custom Protocol [\#953](https://github.com/Azure/BatchExplorer/issues/953) * Support command line parameters to support custom workflow [\#856](https://github.com/Azure/BatchExplorer/issues/856) * More finely grained upload progress for file groups. [\#707](https://github.com/Azure/BatchExplorer/issues/707) * Support for adding custom image to pools [\#434](https://github.com/Azure/BatchExplorer/issues/434) ### Bug: * Batch account with no autostorage add pool doesn't show user accounts and start task picker [\#1190](https://github.com/Azure/BatchExplorer/issues/1190) * Getting a 400 error when trying to load files from Node. [\#1181](https://github.com/Azure/BatchExplorer/issues/1181) * NCJ Local templates seem to have stopped working [\#1171](https://github.com/Azure/BatchExplorer/issues/1171) * Data not being disposed correctly when switching accounts [\#1169](https://github.com/Azure/BatchExplorer/issues/1169) * Account details not disposing of the data [\#1167](https://github.com/Azure/BatchExplorer/issues/1167) * Pool heatmap is not updating [\#1162](https://github.com/Azure/BatchExplorer/issues/1162) * Clicking on the account details before done loading redirect to app packages [\#1157](https://github.com/Azure/BatchExplorer/issues/1157) * Clicking on the task filter from job details crash [\#1155](https://github.com/Azure/BatchExplorer/issues/1155) * Application license picker validation bug [\#1153](https://github.com/Azure/BatchExplorer/issues/1153) * Table formatting on account overview incorrect [\#1136](https://github.com/Azure/BatchExplorer/issues/1136) * Issue with list loading and changedetection [\#1131](https://github.com/Azure/BatchExplorer/issues/1131) * Deleting folder from file group deletes every file in file group. [\#1126](https://github.com/Azure/BatchExplorer/issues/1126) * Refresh button change detection issue [\#1122](https://github.com/Azure/BatchExplorer/issues/1122) * Data details(slow) change detection not triggering correctly [\#1119](https://github.com/Azure/BatchExplorer/issues/1119) * File too large to preview UI messed up [\#1113](https://github.com/Azure/BatchExplorer/issues/1113) * Update existing file-group not working [\#1111](https://github.com/Azure/BatchExplorer/issues/1111) * Heatmap change detection issue [\#1095](https://github.com/Azure/BatchExplorer/issues/1095) * Error message after updating [\#1083](https://github.com/Azure/BatchExplorer/issues/1083) * Should disable "add a pool button" before vmSize is initialized [\#985](https://github.com/Azure/BatchExplorer/issues/985) * Task advanced filter is not applying [\#1158](https://github.com/Azure/BatchExplorer/issues/1158) * Fix tool tip for account credentials dialog and show ARM resource ID for Account [\#1201](https://github.com/Azure/BatchExplorer/issues/1201) ### Usability: * Do not show the pool os information is using custom image [\#1192](https://github.com/Azure/BatchExplorer/issues/1192) * Pool cpu usage individual cpu is confusing [\#1145](https://github.com/Azure/BatchExplorer/issues/1145) * Account charts are a bit confusing [\#1138](https://github.com/Azure/BatchExplorer/issues/1138) ### Other: * Remove all import from "electron" [\#1140](https://github.com/Azure/BatchExplorer/issues/1140) * Complete making @batch-flask independent [\#1109](https://github.com/Azure/BatchExplorer/issues/1109) * Move logger to @batch-flask package [\#1108](https://github.com/Azure/BatchExplorer/issues/1108) * Create a new @batch-flask folder to simulate a package [\#1106](https://github.com/Azure/BatchExplorer/issues/1106) * Connect to Node - always says creds valid for 24 hours on node [\#1085](https://github.com/Azure/BatchExplorer/issues/1085) * Update EULA, license and thirdpartynotices [\#1105](https://github.com/Azure/BatchExplorer/issues/1105) * Redesign table component [\#1101](https://github.com/Azure/BatchExplorer/issues/1101) * Gallery tab still has "Market" breadcrumb & title [\#1076](https://github.com/Azure/BatchExplorer/issues/1076) # 0.13.1 Hot fixes: * Logs being in the wrong folder [#1087](https://github.com/Azure/BatchExplorer/issues/1087) * Proxy settings crashing if not in the expected format * Error popup after updating [#1083](https://github.com/Azure/BatchExplorer/issues/1083) * Fix auto update # 0.13.0 [All items](https://github.com/Azure/BatchExplorer/milestone/15?closed=1) ### feature: * Ctrl+Shift+N for new window [\#1046](https://github.com/Azure/BatchExplorer/issues/1046) * BatchExplorer behind proxy [\#1015](https://github.com/Azure/BatchExplorer/issues/1015) * Metadata property should display as pre [\#1010](https://github.com/Azure/BatchExplorer/issues/1010) ### bug: * Log out button doesnt work [\#1068](https://github.com/Azure/BatchExplorer/issues/1068) * Stale task list [\#1065](https://github.com/Azure/BatchExplorer/issues/1065) * File explorer folder last modified invalid date [\#1061](https://github.com/Azure/BatchExplorer/issues/1061) * BatchExplorer doesn't use nextLink to retrieve all the subscriptions [\#1057](https://github.com/Azure/BatchExplorer/issues/1057) * Pool vm size picker get weird spacing [\#1055](https://github.com/Azure/BatchExplorer/issues/1055) * Error message box should scale to message size [\#1053](https://github.com/Azure/BatchExplorer/issues/1053) * Authentication page hides behind app when you are not logged in [\#1043](https://github.com/Azure/BatchExplorer/issues/1043) * Pool picker pools disappear after switched to a different batch account. [\#1038](https://github.com/Azure/BatchExplorer/issues/1038) * App protocol handler with session_id causes weird redraw issue [\#1037](https://github.com/Azure/BatchExplorer/issues/1037) * Splash screen goes behind the app half way through loading [\#1035](https://github.com/Azure/BatchExplorer/issues/1035) * Job Statistics: Graph fails to load "Loading Tasks. This can take a long time" [\#873](https://github.com/Azure/BatchExplorer/issues/873) ### other: # 0.12.4 ### Hot fix: * Error redeem auth code for a token... [\#1044](https://github.com/Azure/BatchExplorer/issues/1044) * Updater appears to be broken [\#1042](https://github.com/Azure/BatchExplorer/issues/1042) # 0.12.3 ### Hot fix: * File-group/container issue with adding more files(Disabled for non file group for now) [\#1033](https://github.com/Azure/BatchExplorer/issues/1033) * Storage Container Search Broken [\#1039](https://github.com/Azure/BatchExplorer/issues/1039) # 0.12.2 ### Hot fix: * Update Electron to fix vulnerability [\#1030](https://github.com/Azure/BatchExplorer/issues/1030) # 0.12.1 ### Hot fix: * Subscriptions not loading if not cached [\#1027](https://github.com/Azure/BatchExplorer/issues/1027) # 0.12.0 [All items](https://github.com/Azure/BatchExplorer/milestone/14?closed=1) ### Feature: * Data view should show all blob container not just file group [\#1006](https://github.com/Azure/BatchExplorer/issues/1006) * Rbac permission support. Disable action in batchlabs if user doesn't have write permission [\#1000](https://github.com/Azure/BatchExplorer/issues/1000) * Make app single instance [\#998](https://github.com/Azure/BatchExplorer/issues/998) * Stop prompting aad login window when refresh token are still valid [\#990](https://github.com/Azure/BatchExplorer/issues/990) * Enable AOT compilation to improve loading time [\#986](https://github.com/Azure/BatchExplorer/issues/986) * Cache batch accounts to improve initial loading time [\#982](https://github.com/Azure/BatchExplorer/issues/982) * Provide sample code to get started with shared key credentials entered [\#980](https://github.com/Azure/BatchExplorer/issues/980) * Account credentials access [\#970](https://github.com/Azure/BatchExplorer/issues/970) * Support for inbound endpoints [\#965](https://github.com/Azure/BatchExplorer/issues/965) * Make a open component in a new window [\#74](https://github.com/Azure/BatchExplorer/issues/74) * Update the theming system to use json instead of scss [\#1012](https://github.com/Azure/BatchExplorer/issues/1012) ### Other: * Implement a new promise base communication from renderer to main process [\#1004](https://github.com/Azure/BatchExplorer/issues/1004) * Add code coverage [\#987](https://github.com/Azure/BatchExplorer/issues/987) * Extract AAD logic to be outside of the angular service into the node environment [\#963](https://github.com/Azure/BatchExplorer/issues/963) # 0.11.0 [All items](https://github.com/Azure/BatchExplorer/milestone/12?closed=1) ### feature: * Register batchlabs default protocol to open from the browser [\#934](https://github.com/Azure/BatchExplorer/issues/934) * Batch Explorer should show a clear error when it cannot connect to its python web service [\#923](https://github.com/Azure/BatchExplorer/issues/923) * Implement a footer for the app and move some of the dropdown from the header [\#901](https://github.com/Azure/BatchExplorer/issues/901) * Show current quota usage on the account page [\#799](https://github.com/Azure/BatchExplorer/issues/799) * File explorer download a folder with right click [\#657](https://github.com/Azure/BatchExplorer/issues/657) * Goto directly to an entity doesn't show the entity in the quicklist [\#199](https://github.com/Azure/BatchExplorer/issues/199) * Export entities to template to allow cloning after deleted [\#19](https://github.com/Azure/BatchExplorer/issues/19) * NCJ advanced type for generating a container SAS [\#757](https://github.com/Azure/BatchExplorer/issues/757) ### bug: * Shortcut "cmd+H" is not supported on macOS [\#948](https://github.com/Azure/BatchExplorer/issues/948) * Pricing is broken [\#857](https://github.com/Azure/BatchExplorer/issues/857) * Pool estimated cost take rendering license into account [\#684](https://github.com/Azure/BatchExplorer/issues/684) ### other: * Application package icons need updating [\#939](https://github.com/Azure/BatchExplorer/issues/939) * Tweak quick search ui [\#924](https://github.com/Azure/BatchExplorer/issues/924) * List multi select should change color when losing focus [\#31](https://github.com/Azure/BatchExplorer/issues/31) # 0.10.2 ### Bug: * VM Size selector broken [\#940](https://github.com/Azure/BatchExplorer/issues/940) # 0.10.1 [All items](https://github.com/Azure/BatchExplorer/milestone/13?closed=1) ### Bug: * Nodes with start task failed state don't show the files [\#929](https://github.com/Azure/BatchExplorer/issues/929) * OS Family Not Reported on Pool Correctly [\#927](https://github.com/Azure/BatchExplorer/issues/927) * Error reading job prep-task [\#926](https://github.com/Azure/BatchExplorer/issues/926) # 0.10.0 [All items](https://github.com/Azure/BatchExplorer/milestone/11?closed=1) ### Feature: * Move breadcrumb in the header [\#906](https://github.com/Azure/BatchExplorer/issues/906) * Create Pool/Job/Task monaco json editor intellisense [\#888](https://github.com/Azure/BatchExplorer/issues/888) * Log viewer should switch to monaco editor [\#882](https://github.com/Azure/BatchExplorer/issues/882) * Pause notification dismiss timeout when hovering the notification [\#879](https://github.com/Azure/BatchExplorer/issues/879) * Allow to pick expiry time for user when connecting to a node [\#878](https://github.com/Azure/BatchExplorer/issues/878) * Node files display message when node is not available [\#876](https://github.com/Azure/BatchExplorer/issues/876) * Move from Codemirror to Monaco editor [\#870](https://github.com/Azure/BatchExplorer/issues/870) * Make notification stay longer on the screen [\#848](https://github.com/Azure/BatchExplorer/issues/848) * Ability to write json payload in the create forms and submit instead of UI [\#844](https://github.com/Azure/BatchExplorer/issues/844) * Allow users to create empty file groups [\#826](https://github.com/Azure/BatchExplorer/issues/826) ### Bug: * Data upload in file group is not working [\#912](https://github.com/Azure/BatchExplorer/issues/912) * Create empty file-group doesn't validate container name [\#905](https://github.com/Azure/BatchExplorer/issues/905) * CSS for "forms in progress" needs updating and fonts made readable and consistent [\#904](https://github.com/Azure/BatchExplorer/issues/904) * Switching fast between pools crash UI [\#898](https://github.com/Azure/BatchExplorer/issues/898) * CSS bug when too many files in task outputs file explorer [\#893](https://github.com/Azure/BatchExplorer/issues/893) * Account quota not updating when refreshing [\#885](https://github.com/Azure/BatchExplorer/issues/885) * Missing SKU details about Linux N series VM [\#872](https://github.com/Azure/BatchExplorer/issues/872) ### Other: * Prepare release 0.10.0 [\#915](https://github.com/Azure/BatchExplorer/issues/915) * Useragent should include OS [\#895](https://github.com/Azure/BatchExplorer/issues/895) * Should we integrate Application Insights into Batch Explorer? [\#824](https://github.com/Azure/BatchExplorer/issues/824) * Refactor rx-list-proxy to a new system [\#814](https://github.com/Azure/BatchExplorer/issues/814) * Suggest using iconography instead of a label for the breadcrumb bar [\#696](https://github.com/Azure/BatchExplorer/issues/696) * Ability to pin Jobs, Tasks, or Pools. [\#456](https://github.com/Azure/BatchExplorer/issues/456) * Add typing to RxProxy options [\#204](https://github.com/Azure/BatchExplorer/issues/204) * Perf counter support [\#112](https://github.com/Azure/BatchExplorer/issues/112) # 0.9.0 [All items](https://github.com/Azure/BatchExplorer/milestone/10?closed=1) ### Feature: * Make start task command line textbox wrap [\#847](https://github.com/Azure/BatchExplorer/issues/847) * Command line properties is often too long to be displayed in properties. [\#837](https://github.com/Azure/BatchExplorer/issues/837) * Show a link to the logs folder to help debug [\#836](https://github.com/Azure/BatchExplorer/issues/836) * Auto delete package versions when deleting an application package [\#831](https://github.com/Azure/BatchExplorer/issues/831) * Display the application version [\#820](https://github.com/Azure/BatchExplorer/issues/820) * Add evaluate autoscale formula [\#817](https://github.com/Azure/BatchExplorer/issues/817) * Add compute node errors banner [\#816](https://github.com/Azure/BatchExplorer/issues/816) * Job create experience more details [\#794](https://github.com/Azure/BatchExplorer/issues/794) * Upgrade to the new azure-batch sdk that work in the browser env [\#792](https://github.com/Azure/BatchExplorer/issues/792) * Add context menu to app packages quick-list [\#776](https://github.com/Azure/BatchExplorer/issues/776) * Allow file group creation from NCJ job submission page [\#761](https://github.com/Azure/BatchExplorer/issues/761) ### Bug: * Can't add a task when job is disabled [\#864](https://github.com/Azure/BatchExplorer/issues/864) * Can't preview image on Windows or Linux nodes [\#853](https://github.com/Azure/BatchExplorer/issues/853) * Disable "reimage node" option for nodes in IaaS pool [\#852](https://github.com/Azure/BatchExplorer/issues/852) * User Identity not showing up in start task [\#849](https://github.com/Azure/BatchExplorer/issues/849) * Adding a new task seems to produce blank list in the table [\#841](https://github.com/Azure/BatchExplorer/issues/841) * When deleting job from the details card the css overlay is not removed. [\#828](https://github.com/Azure/BatchExplorer/issues/828) * Failed to upload file groups for classic storage accounts [\#819](https://github.com/Azure/BatchExplorer/issues/819) ### Other: * Getting ready for version 0.9.0 [\#866](https://github.com/Azure/BatchExplorer/issues/866) * Set user agent to BatchExplorer for all request [\#861](https://github.com/Azure/BatchExplorer/issues/861) * Add suport for maxWallClockTime in the create job experience [\#839](https://github.com/Azure/BatchExplorer/issues/839) * Refactor rx-entity-proxy to a new system [\#795](https://github.com/Azure/BatchExplorer/issues/795) * Make an about page [\#279](https://github.com/Azure/BatchExplorer/issues/279) # 0.8.0 [All items](https://github.com/Azure/BatchExplorer/milestone/9?closed=1) ### Feature: * Ncj app gallery [\#786](https://github.com/Azure/BatchExplorer/issues/786) * Task output messages confusing for customers [\#769](https://github.com/Azure/BatchExplorer/issues/769) * Allow specifying resize timeout on pool create [\#764](https://github.com/Azure/BatchExplorer/issues/764) * Notification actions [\#750](https://github.com/Azure/BatchExplorer/issues/750) * Enable edit start task from the node and reboot [\#749](https://github.com/Azure/BatchExplorer/issues/749) * Allow delete folder/file from the file group context menu. [\#733](https://github.com/Azure/BatchExplorer/issues/733) * Ability to resize the tree view in the file explorer(Movable splitter) [\#724](https://github.com/Azure/BatchExplorer/issues/724) * Find a way to surface prep and release task failures [\#708](https://github.com/Azure/BatchExplorer/issues/708) ### Bug: * Form picker(Start task) reset to empty from when clicking cancel [\#801](https://github.com/Azure/BatchExplorer/issues/801) * Typo in pool configuration [\#798](https://github.com/Azure/BatchExplorer/issues/798) * Creating a pool without changing the resizeTimeout gives an error [\#796](https://github.com/Azure/BatchExplorer/issues/796) * Pool start task failed quickfix not doing anything [\#788](https://github.com/Azure/BatchExplorer/issues/788) * AAD refresh token expired/revoke doesn't refresh the app. [\#783](https://github.com/Azure/BatchExplorer/issues/783) * File group download only downloading files at the root [\#780](https://github.com/Azure/BatchExplorer/issues/780) * After deleting application, overlay is not removed [\#777](https://github.com/Azure/BatchExplorer/issues/777) * File groups not listing all the files [\#751](https://github.com/Azure/BatchExplorer/issues/751) * Memory leak in app [\#745](https://github.com/Azure/BatchExplorer/issues/745) * Fix spelling of completition [\#742](https://github.com/Azure/BatchExplorer/issues/742) * Copy and paste doesn't work on osx prod build [\#727](https://github.com/Azure/BatchExplorer/issues/727) * Misleading message "The files for the specified task have been cleaned from the node." [\#689](https://github.com/Azure/BatchExplorer/issues/689) ### Other: * Organize summary card for all entities [\#754](https://github.com/Azure/BatchExplorer/issues/754) * Disable tab animations [\#747](https://github.com/Azure/BatchExplorer/issues/747) * show subscription name in the account details subtitle [\#740](https://github.com/Azure/BatchExplorer/issues/740) * Make quick list more compact [\#735](https://github.com/Azure/BatchExplorer/issues/735) * Make the details take the full height and scrolling happens in tabs content [\#730](https://github.com/Azure/BatchExplorer/issues/730) * Refactor server error to work better with all different inputs [\#694](https://github.com/Azure/BatchExplorer/issues/694) * Remove storage node proxy [\#685](https://github.com/Azure/BatchExplorer/issues/685) # 0.7.0 [All items](https://github.com/Azure/BatchExplorer/milestone/8?closed=1) ### Features: * Ability to view third party notice from UI [\#690](https://github.com/Azure/BatchExplorer/issues/690) * Command line input for task improvement [\#670](https://github.com/Azure/BatchExplorer/issues/670) * Add files to a file group with drag and drop [\#651](https://github.com/Azure/BatchExplorer/issues/651) * Add refresh shortcut to work in prod build [\#647](https://github.com/Azure/BatchExplorer/issues/647) * User identity for task [\#639](https://github.com/Azure/BatchExplorer/issues/639) * Clean up excessive console errors when task logs are not available on node [\#631](https://github.com/Azure/BatchExplorer/issues/631) * Add 3ds max to the application license picker [\#627](https://github.com/Azure/BatchExplorer/issues/627) * Job tasks running time graph sorting/grouping of x axis [\#624](https://github.com/Azure/BatchExplorer/issues/624) * Add charts on the job home page(when no jobs selected) [\#621](https://github.com/Azure/BatchExplorer/issues/621) * Feature: File explorer [\#614](https://github.com/Azure/BatchExplorer/issues/614) * Make an install command to help people getting started(windows) [\#610](https://github.com/Azure/BatchExplorer/issues/610) * Add more charts for a job [\#473](https://github.com/Azure/BatchExplorer/issues/473) * Settings page [\#472](https://github.com/Azure/BatchExplorer/issues/472) * Tree view for files [\#466](https://github.com/Azure/BatchExplorer/issues/466) * Provide built app for download [\#405](https://github.com/Azure/BatchExplorer/issues/405) * Smart card support for windows [\#271](https://github.com/Azure/BatchExplorer/issues/271) ### Bugs: * Heatmap display bug when resizing window or pool resize [\#715](https://github.com/Azure/BatchExplorer/issues/715) * Exit code is not showing in the task table list [\#712](https://github.com/Azure/BatchExplorer/issues/712) * Job preparation and release task having styling issues [\#709](https://github.com/Azure/BatchExplorer/issues/709) * Progress getting lost if file group name is too large [\#704](https://github.com/Azure/BatchExplorer/issues/704) * File explorer not reading files from storage account. [\#702](https://github.com/Azure/BatchExplorer/issues/702) * Job graph is overflowing vertically when in running prod [\#697](https://github.com/Azure/BatchExplorer/issues/697) * File explorer long file/folder name wrapping bug [\#668](https://github.com/Azure/BatchExplorer/issues/668) * Autoscale formula not updating [\#665](https://github.com/Azure/BatchExplorer/issues/665) * Profile settings throws an error for user settings [\#661](https://github.com/Azure/BatchExplorer/issues/661) * Profile menu item forces navigation to dashboard and reload when closed. [\#660](https://github.com/Azure/BatchExplorer/issues/660) * File explorer improve errors on task outputs [\#654](https://github.com/Azure/BatchExplorer/issues/654) * UI gets into a bad state if you navigate to a start task which has an environment variable with no value [\#646](https://github.com/Azure/BatchExplorer/issues/646) * Task id needs to be truncated in the table [\#645](https://github.com/Azure/BatchExplorer/issues/645) * run elevated not set when running tasks with autoUser in admin mode [\#638](https://github.com/Azure/BatchExplorer/issues/638) * BatchExplorer ghost process after closing prod app [\#633](https://github.com/Azure/BatchExplorer/issues/633) * Detailed information should be shown if an error occurs during allocation [\#618](https://github.com/Azure/BatchExplorer/issues/618) * Splash screen not showing in packaged distributable [\#616](https://github.com/Azure/BatchExplorer/issues/616) * Graph hover text [\#608](https://github.com/Azure/BatchExplorer/issues/608) * Grammar in task running time graph [\#607](https://github.com/Azure/BatchExplorer/issues/607) * Handle forbidden 403 errors [\#577](https://github.com/Azure/BatchExplorer/issues/577) * Cannot read a blob from a file group with a full path. [\#561](https://github.com/Azure/BatchExplorer/issues/561) ### Other: * Update readme to prepare for the release [\#692](https://github.com/Azure/BatchExplorer/issues/692) * ThirdPartyNotice generator [\#682](https://github.com/Azure/BatchExplorer/issues/682) * Log python stdout and stderr to file [\#678](https://github.com/Azure/BatchExplorer/issues/678) * Find an open port for the python server to connect to [\#676](https://github.com/Azure/BatchExplorer/issues/676) * Switch to es6 [\#641](https://github.com/Azure/BatchExplorer/issues/641) * Table selection/activation improvement [\#626](https://github.com/Azure/BatchExplorer/issues/626) * Upload file group as a background task [\#615](https://github.com/Azure/BatchExplorer/issues/615) # Version 0.6.0(Beta) [All items](https://github.com/Azure/BatchExplorer/milestone/6?closed=1) ### Features * Show pool estimated pricing [\#595](https://github.com/Azure/BatchExplorer/issues/595) * Added graphs for the job [\#591](https://github.com/Azure/BatchExplorer/issues/591) * Download a file group(NCJ) [\#589](https://github.com/Azure/BatchExplorer/issues/589) * File picker inside a file group(NCJ) [\#571](https://github.com/Azure/BatchExplorer/issues/571) * File group picker(NCJ) [\#569](https://github.com/Azure/BatchExplorer/issues/569) * File group UI(NJC) [\#530](https://github.com/Azure/BatchExplorer/issues/530) * Delete a node [\#554](https://github.com/Azure/BatchExplorer/issues/554) * Propose to delete the job with the same id as the pool you are trying to delete [\#543](https://github.com/Azure/BatchExplorer/issues/543) * Preview of files(node or storage uploaded) is more efficient with caching [\#519](https://github.com/Azure/BatchExplorer/issues/519) * Make metadata editable [\#513](https://github.com/Azure/BatchExplorer/issues/513) * Application license picker(Maya, 3ds Max) [\#498](https://github.com/Azure/BatchExplorer/issues/498) * Right click functionatlities on the heatmap [\#487](https://github.com/Azure/BatchExplorer/issues/487) [Many bug fixes](https://github.com/Azure/BatchExplorer/issues?q=is%3Aissue+milestone%3A0.6.0+is%3Aclosed+label%3Abug) # Version 0.5.0(Beta) [All items](https://github.com/Azure/BatchExplorer/milestone/5?closed=1) ### Features * Link Storage account in Batch Explorer [\#385](https://github.com/Azure/BatchExplorer/issues/385) * New actions buttons [\#408](https://github.com/Azure/BatchExplorer/issues/408) * Low priority VMs [\#414](https://github.com/Azure/BatchExplorer/issues/414) * Details now refresh automatically every 10 seconds [\#428](https://github.com/Azure/BatchExplorer/issues/428) * Show batch account quotas [\#413](https://github.com/Azure/BatchExplorer/issues/413) * Job show manager task details [\#447](https://github.com/Azure/BatchExplorer/issues/447) * Preview images(and gif) and code files in labs [\#417](https://github.com/Azure/BatchExplorer/issues/417) * Setup python support for ncj [\#439](https://github.com/Azure/BatchExplorer/issues/439) * Task output quick add otherfiles for debug [\#184](https://github.com/Azure/BatchExplorer/issues/184) * Job prep/release task status read experience [\#429](https://github.com/Azure/BatchExplorer/issues/429) * Start task failed show error banner on node details [\#476](https://github.com/Azure/BatchExplorer/issues/476) # Version 0.4.0(Beta) [All items](https://github.com/Azure/BatchExplorer/milestone/3?closed=1) ### Features * Added a new multi picker control [\#358](https://github.com/Azure/BatchExplorer/issues/358) * Added user accounts support at pool creation using the multi picker [\#359](https://github.com/Azure/BatchExplorer/issues/359) * Update enabled/disabled properties icon to be less confusing [\#354](https://github.com/Azure/BatchExplorer/issues/354) * Pool start task can now use the useridentity selecition. [\#356](https://github.com/Azure/BatchExplorer/issues/354) * Move tasks tab to be first in the tab list [\#375](https://github.com/Azure/BatchExplorer/issues/375) * Made a new editable table control and update resource files to use it [\#376](https://github.com/Azure/BatchExplorer/issues/376) * New environment settings picker for tasks and start task [\#355](https://github.com/Azure/BatchExplorer/issues/355) * Improve account home page with a quick access to pools, jobs and applications [\#310](https://github.com/Azure/BatchExplorer/issues/310) * Account list now allow to filter by multiple subscription(Last selection is saved) [\#352](https://github.com/Azure/BatchExplorer/issues/352) * Use chached value to display entity(Job, Pool, etc.) immediately when selected in the list [\#382](https://github.com/Azure/BatchExplorer/issues/382) * Added a few more missing fields to the pool creation [\#357](https://github.com/Azure/BatchExplorer/issues/357) * Added loading icon for account list on first load [\#340](https://github.com/Azure/BatchExplorer/issues/340) * Added a packaging flow to be able to make an exe [\#364](https://github.com/Azure/BatchExplorer/issues/364) * Improve dates and timespan field in configuration [\#396](https://github.com/Azure/BatchExplorer/issues/396) * Listen to electron error events to show a recovery window [\#337](https://github.com/Azure/BatchExplorer/issues/337) ### Fixes * Edit start task cannot cancel [\#367](https://github.com/Azure/BatchExplorer/issues/367) * Fix bug where graphs keeps history when switching between pools [\#353](https://github.com/Azure/BatchExplorer/issues/353) * Fix unwanted form submit when pressing enter [\#393](https://github.com/Azure/BatchExplorer/issues/393) * Fix configuration tabs having a nested scrollbar [\#397](https://github.com/Azure/BatchExplorer/issues/397) * Fix list not having focus after click [\#400](https://github.com/Azure/BatchExplorer/issues/400) # Version 0.3.1(Beta) [All items](https://github.com/Azure/BatchExplorer/milestone/4?closed=1) ### Fixes * Fix error when cloning a pool not using autoscale forumla [\#342](https://github.com/Azure/BatchExplorer/issues/342) * UI bug in the pool nodes preview(Font size is off) [\#332](https://github.com/Azure/BatchExplorer/issues/332) * Application edit form missed in the new form refactor [\#334](https://github.com/Azure/BatchExplorer/issues/334) # Version 0.3.0(Beta) [All items](https://github.com/Azure/BatchExplorer/milestone/2?closed=1) ### Features * Autoscale forumla support with option to save forumla [\#321](https://github.com/Azure/BatchExplorer/issues/321) * Big work on the form UI(Also added pool start task picker) - Section and picker [\#321](https://github.com/Azure/BatchExplorer/issues/4) - Form error sticky at the bottom not to miss it [\#317](https://github.com/Azure/BatchExplorer/issues/317) * Read/Save files to azure storage UX [\#110](https://github.com/Azure/BatchExplorer/issues/110) * New VM size picker as a sortable table [\#292](https://github.com/Azure/BatchExplorer/issues/292) * New pool picker for the job create experience [\#284](https://github.com/Azure/BatchExplorer/issues/284) * New OS picker for the pool create experience [\#278](https://github.com/Azure/BatchExplorer/issues/278) * Added refresh account button [\#289](https://github.com/Azure/BatchExplorer/issues/289) ### Fixes * Bug with max results [\#295](https://github.com/Azure/BatchExplorer/issues/295) [\#297](https://github.com/Azure/BatchExplorer/issues/297) [\#299](https://github.com/Azure/BatchExplorer/issues/299) # Version 0.2.0(Beta) [All items](https://github.com/Azure/BatchExplorer/milestone/1?closed=1) ### Features * Production build [PR 173](https://github.com/Azure/BatchExplorer/pull/173) * Improve the VM size experience to show info about each vm size [PR 277](https://github.com/Azure/BatchExplorer/pull/277) * Load all the VM sizes [PR 275](https://github.com/Azure/BatchExplorer/pull/275) * Load all account on start: improve account selection experience by removing the need to click on the subscription first [PR 273](https://github.com/Azure/BatchExplorer/pull/273) * Creating a new entity will add it to the query cache(i.e. Adding a pool then switching to jobs list then back to pool should still show the added pool in the list) [PR 272](https://github.com/Azure/BatchExplorer/pull/272) * Splash screen show progress [PR 270](https://github.com/Azure/BatchExplorer/pull/270) * Updated application icon [PR 266](https://github.com/Azure/BatchExplorer/pull/266) * Clone entities should keep attributes not in form[PR 262](https://github.com/Azure/BatchExplorer/pull/262) * Added yarn [PR 260](https://github.com/Azure/BatchExplorer/pull/260) ### Fixes * Fix node files `Load more` always showing [PR 268](https://github.com/Azure/BatchExplorer/pull/268) # Version 0.1.0(Beta) Initial version ### Features * Login with azure active directory(Giving access to user subscriptions and applications) * Browse pools, node, jobs, tasks, applications * Basic creationg of pools, jobs and tasks * Upload new applications and packages * Graphs for status of pools(heatmap, nodes availables, running tasks) * Many error banner helper with quick fixes options(e.g. Task timeout) * Much more [All closed issues](https://github.com/Azure/BatchExplorer/issues?q=is%3Aissue+is%3Aclosed)
{ "pile_set_name": "Github" }
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd; /** * A convenience exception wrapper. Contains the original exception, if any. * Also, contains a severity number (int). Zero implies no severity. The higher * the number the greater the severity. * * @author Donald A. Leckie * @version $Revision$, $Date$ * @since August 30, 2002 */ public class PMDException extends Exception { private static final long serialVersionUID = 6938647389367956874L; private int severity; /** * Creates a new PMD exception with the specified message. * * @param message * the message */ public PMDException(String message) { super(message); } /** * Creates a new PMD exception with the specified message and the given * reason as root cause. * * @param message * the message * @param reason * the root cause */ public PMDException(String message, Exception reason) { super(message, reason); } public void setSeverity(int severity) { this.severity = severity; } public int getSeverity() { return severity; } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>displayName</key> <string>Slider</string> <key>ordering</key> <integer>13000</integer> <key>description</key> <string>Allows the user to visually select a value</string> <key>inheritsFrom</key> <string>CCControl</string> <key>canHaveChildren</key> <false/> <key>className</key> <string>CCSlider</string> <key>editorClassName</key> <string>CCBPSlider</string> <key>propertiesOverridden</key> <array> <dict> <key>readOnly</key> <true/> <key>type</key> <string>Size</string> <key>name</key> <string>contentSize</string> <key>displayName</key> <string>Content size</string> </dict> <dict> <key>default</key> <array> <integer>200</integer> <integer>10</integer> <integer>0</integer> <integer>0</integer> </array> <key>affectsProperties</key> <array> <string>maxSize</string> <string>contentSize</string> </array> <key>readOnly</key> <false/> <key>type</key> <string>Size</string> <key>name</key> <string>preferredSize</string> <key>displayName</key> <string>Preferred size</string> </dict> <dict> <key>affectsProperties</key> <array> <string>preferredSize</string> <string>contentSize</string> </array> <key>readOnly</key> <true/> <key>type</key> <string>Size</string> <key>name</key> <string>maxSize</string> <key>displayName</key> <string>Max size</string> </dict> </array> <key>properties</key> <array> <dict> <key>displayName</key> <string>CCSlider</string> <key>name</key> <string>CCSlider</string> <key>type</key> <string>Separator</string> <key>dontSetInEditor</key> <true/> </dict> <dict> <key>default</key> <array> <string></string> <string>Resources/SliderBgNormal.png</string> </array> <key>affectsProperties</key> <array> <string>contentSize</string> </array> <key>displayName</key> <string>Background</string> <key>type</key> <string>SpriteFrame</string> <key>name</key> <string>backgroundSpriteFrame|Normal</string> </dict> <dict> <key>default</key> <array> <string></string> <string>Resources/SliderHandle.png</string> </array> <key>affectsProperties</key> <array> <string>contentSize</string> </array> <key>displayName</key> <string>Handle</string> <key>type</key> <string>SpriteFrame</string> <key>name</key> <string>handleSpriteFrame|Normal</string> </dict> <dict> <key>displayName</key> <string>Highlighted state</string> <key>name</key> <string>Highlighted state</string> <key>type</key> <string>SeparatorSub</string> <key>dontSetInEditor</key> <true/> </dict> <dict> <key>default</key> <array> <string></string> <string>Resources/SliderBgHighlighted.png</string> </array> <key>affectsProperties</key> <array> <string>contentSize</string> </array> <key>displayName</key> <string>Background</string> <key>type</key> <string>SpriteFrame</string> <key>name</key> <string>backgroundSpriteFrame|Highlighted</string> </dict> <dict> <key>default</key> <array> <string></string> <string></string> </array> <key>affectsProperties</key> <array> <string>contentSize</string> </array> <key>displayName</key> <string>Handle</string> <key>type</key> <string>SpriteFrame</string> <key>name</key> <string>handleSpriteFrame|Highlighted</string> </dict> <dict> <key>displayName</key> <string>Disabled state</string> <key>name</key> <string>Disabled state</string> <key>type</key> <string>SeparatorSub</string> <key>dontSetInEditor</key> <true/> </dict> <dict> <key>default</key> <array> <string></string> <string></string> </array> <key>affectsProperties</key> <array> <string>contentSize</string> </array> <key>displayName</key> <string>Background</string> <key>type</key> <string>SpriteFrame</string> <key>name</key> <string>backgroundSpriteFrame|Disabled</string> </dict> <dict> <key>default</key> <array> <string></string> <string></string> </array> <key>affectsProperties</key> <array> <string>contentSize</string> </array> <key>displayName</key> <string>Handle</string> <key>type</key> <string>SpriteFrame</string> <key>name</key> <string>handleSpriteFrame|Disabled</string> </dict> </array> </dict> </plist>
{ "pile_set_name": "Github" }
/** * Tag model module. * @file 标签数据模型 * @module model/tag * @author biaochenxuying <https://github.com/biaochenxuying> */ const { mongoose } = require('../core/mongodb.js'); const autoIncrement = require('mongoose-auto-increment'); // 标签模型 const tagSchema = new mongoose.Schema({ // 标签名称 name: { type: String, required: true, validate: /\S+/ }, // 标签描述 desc: String, // 图标 icon: String, // 发布日期 create_time: { type: Date, default: Date.now }, // 最后修改日期 update_time: { type: Date, default: Date.now }, }); // 自增ID插件配置 tagSchema.plugin(autoIncrement.plugin, { model: 'Tag', field: 'id', startAt: 1, incrementBy: 1, }); // 标签模型 module.exports = mongoose.model('Tag', tagSchema);
{ "pile_set_name": "Github" }
/* replaceTextBetween - Replaces a section of text in the middle of a file.. */ /* Copyright (C) 2012 The Regents of the University of California * See README in this or parent directory for licensing information. */ #include "common.h" #include "linefile.h" #include "hash.h" #include "options.h" #include "obscure.h" /* Command line variables. */ boolean withEnds; void usage() /* Explain usage and exit. */ { errAbort( "replaceTextBetween - Replaces a section of text in the middle of a file.\n" "usage:\n" " replaceTextBetween before after mainFile insertFile\n" "where before and after are strings that bracket the text to replace in mainFile.\n" "options:\n" " -withEnds - If set include the before and after strings in the replaced portion\n" ); } static struct optionSpec options[] = { {"withEnds", OPTION_BOOLEAN}, {NULL, 0}, }; void replaceTextBetween(char *start, char *end, char *outerFile, char *middleFile) /* replaceTextBetween - Replaces a section of text in the middle of a file.. */ { /* Read outer file into memory. */ char *outer; size_t outerSize; readInGulp(outerFile, &outer, &outerSize); /* Figure out the boundaries of the region we want to replace. */ char *s = stringIn(start, outer); if (s == NULL) errAbort("Can't find '%s' in %s", start, outerFile); char *e = stringIn(end, s); if (e == NULL) errAbort("Can't find '%s' in %s", end, outerFile); if (withEnds) { e += strlen(end); } else { s += strlen(start); } /* Read middle file into memory. */ char *middle; size_t middleSize; readInGulp(middleFile, &middle, &middleSize); /* Write out file in three parts. */ int startSize = s - outer; mustWrite(stdout, outer, startSize); mustWrite(stdout, middle, middleSize); int endSize = outer + outerSize - e; mustWrite(stdout, e, endSize); } int main(int argc, char *argv[]) /* Process command line. */ { optionInit(&argc, argv, options); withEnds = optionExists("withEnds"); if (argc != 5) usage(); replaceTextBetween(argv[1], argv[2], argv[3], argv[4]); return 0; }
{ "pile_set_name": "Github" }
<?php namespace Bigcommerce\Api\Resources; use Bigcommerce\Api\Resource; use Bigcommerce\Api\Client; /** * A product review. */ class ProductGoogleProductSearch extends Resource { }
{ "pile_set_name": "Github" }
package io.quarkus.deployment.pkg.steps; import java.io.BufferedReader; import java.io.File; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayDeque; import java.util.Deque; import java.util.concurrent.CountDownLatch; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Substrate prints incomprehensible and useless 'potential call paths' that look like stack traces * <p> * This class intercepts them and prints meaningful output instead, so users don't waste hours going on wild goose * chases */ public final class ErrorReplacingProcessReader implements Runnable { private static final String LINE_START = "Call path from entry point to "; private final InputStream inputStream; private final File reportdir; private final CountDownLatch doneLatch; private ReportAnalyzer reportAnalyzer; public ErrorReplacingProcessReader(InputStream inputStream, File reportdir, CountDownLatch doneLatch) { this.inputStream = inputStream; this.reportdir = reportdir; this.doneLatch = doneLatch; } @Override public void run() { try { Deque<String> fullBuffer = new ArrayDeque<>(); boolean buffering = false; try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { for (String line = reader.readLine(); line != null; line = reader.readLine()) { if (line.startsWith(LINE_START)) { buffering = true; } if (buffering) { fullBuffer.add(line); } else { System.err.println(line); } } File reportFile = null; if (reportdir.exists()) { File[] files = reportdir.listFiles(); if (files != null) { for (File j : files) { if (j.getName().startsWith("call_tree")) { reportFile = j; break; } } } } if (reportFile == null) { for (String j : fullBuffer) { System.err.println(j); } } else { while (!fullBuffer.isEmpty()) { String line = fullBuffer.pop(); if (line.startsWith(LINE_START)) { handleErrorState(reportFile, line, fullBuffer); } else { System.err.println(line); } } } } catch (Exception e) { e.printStackTrace(); } } finally { doneLatch.countDown(); } } private void handleErrorState(File report, String firstLine, Deque<String> queue) { System.err.println(firstLine); String remainder = firstLine.substring(LINE_START.length()); Matcher m = Pattern.compile("([^(]*).*").matcher(remainder); if (!m.find()) { return; } String line = ""; while (!queue.isEmpty()) { line = queue.pop(); if (line.trim().startsWith("at")) { System.err.println(line); } else { break; } } System.err.println("--------------------------------------------------------------------------------------------"); System.err.println("-- WARNING: The above stack trace is not a real stack trace, it is a theoretical call tree---"); System.err.println("-- If an interface has multiple implementations SVM will just display one potential call ---"); System.err.println("-- path to the interface. This is often meaningless, and what you actually need to know is---"); System.err.println("-- the path to the constructor of the object that implements this interface. ---"); System.err.println("-- Quarkus has attempted to generate a more meaningful call flow analysis below ---"); System.err.println("---------------------------------------------------------------------------------------------\n"); try { String fullName = m.group(1); int index = fullName.lastIndexOf('.'); String clazz = fullName.substring(0, index); String method = fullName.substring(index + 1); if (reportAnalyzer == null) { reportAnalyzer = new ReportAnalyzer(report.getAbsolutePath()); } System.err.println(reportAnalyzer.analyse(clazz, method)); } catch (Exception e) { e.printStackTrace(); } System.err.println(line); } }
{ "pile_set_name": "Github" }
[ { "cards": { "Epic": [ { "fieldIdentifier": "System.Id" }, { "fieldIdentifier": "System.Title" }, { "fieldIdentifier": "System.AssignedTo", "displayFormat": "AvatarAndFullName" }, { "fieldIdentifier": "System.Tags" }, { "fieldIdentifier": "System.State" } ] }, "BoardName": "Epics" }, { "cards": { "Feature": [ { "fieldIdentifier": "System.Id" }, { "fieldIdentifier": "System.Title" }, { "fieldIdentifier": "System.AssignedTo", "displayFormat": "AvatarAndFullName" }, { "fieldIdentifier": "System.Tags" }, { "fieldIdentifier": "System.State" } ] }, "BoardName": "Features" }, { "cards": { "User Story": [ { "fieldIdentifier": "System.Id" }, { "fieldIdentifier": "System.Title" }, { "fieldIdentifier": "System.AssignedTo", "displayFormat": "AvatarAndFullName" }, { "fieldIdentifier": "System.Tags" }, { "fieldIdentifier": "System.State" }, { "fieldIdentifier": "Microsoft.VSTS.Scheduling.StoryPoints" } ] }, "BoardName": "Stories" } ]
{ "pile_set_name": "Github" }
#!/bin/sh # Copyright (c) 2004, Intel Corporation. All rights reserved. # Created by: crystal.xiong REMOVE-THIS AT intel DOT com # This file is licensed under the GPL license. For the full content # of this license, see the COPYING file at the top level of this # source tree. # Test pthread_attr_getschedpolicy returns 0 on success. # This is tested implicitly via assertion 2. echo "Tested implicitly via assertion 2. See output for status" exit 0
{ "pile_set_name": "Github" }
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import print_function from __future__ import division from __future__ import absolute_import from google.appengine.ext import ndb from dashboard import alerts from dashboard.api import api_request_handler from dashboard.api import utils as api_utils from dashboard.common import descriptor from dashboard.common import timing from dashboard.common import utils from dashboard.models import anomaly from dashboard.models import graph_data from dashboard.models import histogram # These limits should prevent DeadlineExceededErrors. # TODO(benjhayden): Find a better strategy for staying under the deadline. DIAGNOSTICS_QUERY_LIMIT = 10000 HISTOGRAMS_QUERY_LIMIT = 1000 ROWS_QUERY_LIMIT = 20000 COLUMNS_REQUIRING_ROWS = {'timestamp', 'revisions', 'annotations'}.union(descriptor.STATISTICS) class Timeseries2Handler(api_request_handler.ApiRequestHandler): def _CheckUser(self): pass def Post(self): desc = descriptor.Descriptor( test_suite=self.request.get('test_suite'), measurement=self.request.get('measurement'), bot=self.request.get('bot'), test_case=self.request.get('test_case'), statistic=self.request.get('statistic', None), build_type=self.request.get('build_type')) min_revision = self.request.get('min_revision') min_revision = int(min_revision) if min_revision else None max_revision = self.request.get('max_revision') max_revision = int(max_revision) if max_revision else None query = TimeseriesQuery( desc, self.request.get('columns').split(','), min_revision, max_revision, api_utils.ParseISO8601(self.request.get('min_timestamp', None)), api_utils.ParseISO8601(self.request.get('max_timestamp', None))) try: result = query.FetchSync() except AssertionError: # The caller has requested internal-only data but is not authorized. raise api_request_handler.NotFoundError return result class TimeseriesQuery(object): def __init__(self, desc, columns, min_revision=None, max_revision=None, min_timestamp=None, max_timestamp=None): self._descriptor = desc self._columns = columns self._min_revision = min_revision self._max_revision = max_revision self._min_timestamp = min_timestamp self._max_timestamp = max_timestamp self._statistic_columns = [] self._unsuffixed_test_metadata_keys = [] self._test_keys = [] self._test_metadata_keys = [] self._units = None self._improvement_direction = None self._data = {} self._private = False @property def private(self): return self._private @timing.TimeWall('fetch') @timing.TimeCpu('fetch') def FetchSync(self): return self.FetchAsync().get_result() @ndb.tasklet def FetchAsync(self): """Returns {units, improvement_direction, data}. Raises: api_request_handler.NotFoundError when the timeseries cannot be found. AssertionError when an external user requests internal-only data. """ # Always need to query TestMetadata even if the caller doesn't need units # and improvement_direction because Row doesn't have internal_only. # Use tasklets so that they can process the data as it arrives. self._CreateTestKeys() futures = [self._FetchTests()] if COLUMNS_REQUIRING_ROWS.intersection(self._columns): futures.append(self._FetchRows()) elif 'histogram' in self._columns: futures.append(self._FetchHistograms()) if 'alert' in self._columns: self._ResolveTimestamps() futures.append(self._FetchAlerts()) if 'diagnostics' in self._columns: self._ResolveTimestamps() futures.append(self._FetchDiagnostics()) yield futures raise ndb.Return({ 'units': self._units, 'improvement_direction': self._improvement_direction, 'data': [[datum.get(col) for col in self._columns] for _, datum in sorted(self._data.items())], }) def _ResolveTimestamps(self): if self._min_timestamp and self._min_revision is None: self._min_revision = self._ResolveTimestamp(self._min_timestamp) if self._max_timestamp and self._max_revision is None: self._max_revision = self._ResolveTimestamp(self._max_timestamp) def _ResolveTimestamp(self, timestamp): query = graph_data.Row.query( graph_data.Row.parent_test.IN(self._test_keys), graph_data.Row.timestamp <= timestamp) query = query.order(-graph_data.Row.timestamp) row_keys = query.fetch(1, keys_only=True) if not row_keys: return None return row_keys[0].integer_id() def _CreateTestKeys(self): desc = self._descriptor.Clone() self._statistic_columns = [ col for col in self._columns if col in descriptor.STATISTICS ] if desc.statistic and desc.statistic not in self._statistic_columns: self._statistic_columns.append(desc.statistic) desc.statistic = None unsuffixed_test_paths = desc.ToTestPathsSync() self._unsuffixed_test_metadata_keys = [ utils.TestMetadataKey(path) for path in unsuffixed_test_paths ] test_paths = [] for statistic in self._statistic_columns: desc.statistic = statistic test_paths.extend(desc.ToTestPathsSync()) self._test_metadata_keys = [ utils.TestMetadataKey(path) for path in test_paths ] self._test_metadata_keys.extend(self._unsuffixed_test_metadata_keys) test_paths.extend(unsuffixed_test_paths) test_old_keys = [utils.OldStyleTestKey(path) for path in test_paths] self._test_keys = test_old_keys + self._test_metadata_keys @ndb.tasklet def _FetchTests(self): # Don't fetch OldStyleTestKeys. The Test model has been removed. Only fetch # TestMetadata entities. with timing.WallTimeLogger('fetch_tests'): tests = yield [key.get_async() for key in self._test_metadata_keys] tests = [test for test in tests if test] if not tests: raise api_request_handler.NotFoundError improvement_direction = None for test in tests: if test.internal_only: self._private = True test_desc = yield descriptor.Descriptor.FromTestPathAsync( utils.TestPath(test.key)) # The unit for 'count' statistics is trivially always 'count'. Callers # certainly want the units of the measurement, which is the same as the # units of the 'avg' and 'std' statistics. if self._units is None or test_desc.statistic != 'count': self._units = test.units improvement_direction = test.improvement_direction if improvement_direction == anomaly.DOWN: self._improvement_direction = 'down' elif improvement_direction == anomaly.UP: self._improvement_direction = 'up' else: self._improvement_direction = None def _Datum(self, revision): return self._data.setdefault(revision, {'revision': revision}) @ndb.tasklet def _FetchRows(self): yield [self._FetchRowsForTest(test_key) for test_key in self._test_keys] @staticmethod def Round(x): return round(x, 6) def _RowQueryProjection(self, statistic): limit = ROWS_QUERY_LIMIT projection = None # revisions and annotations are not in any index, so a projection query # can't get them. if 'revisions' in self._columns or 'annotations' in self._columns: return projection, limit # Disable projection queries for timestamp for now. There's just an index # for ascending revision, not descending revision with timestamp. if 'timestamp' in self._columns: return projection, limit # There is no index like (parent_test, -timestamp, revision, value): self._ResolveTimestamps() # If statistic is not None, then project value. _CreateTestKeys will # generate test keys for the other statistics in columns if there are any. # If statistic is None and the only statistic is avg, then project value. # If statistic is None and there are multiple statistics, then fetch full # Row entities because we might need their 'error' aka 'std' or # 'd_'-prefixed statistics. if ((statistic is not None) or (','.join(self._statistic_columns) == 'avg')): projection = ['revision', 'value'] if 'timestamp' in self._columns: projection.append('timestamp') limit = None return projection, limit @ndb.tasklet def _FetchRowsForTest(self, test_key): test_desc = yield descriptor.Descriptor.FromTestPathAsync( utils.TestPath(test_key)) projection, limit = self._RowQueryProjection(test_desc.statistic) query = graph_data.Row.query(projection=projection) query = query.filter(graph_data.Row.parent_test == test_key) query = self._FilterRowQuery(query) with timing.WallTimeLogger('fetch_test'): rows = yield query.fetch_async(limit) with timing.CpuTimeLogger('rows'): for row in rows: # Sometimes the dev environment just ignores some filters. if self._min_revision and row.revision < self._min_revision: continue if self._min_timestamp and row.timestamp < self._min_timestamp: continue if self._max_revision and row.revision > self._max_revision: continue if self._max_timestamp and row.timestamp > self._max_timestamp: continue datum = self._Datum(row.revision) if test_desc.statistic is None: datum['avg'] = self.Round(row.value) if hasattr(row, 'error') and row.error: datum['std'] = self.Round(row.error) else: datum[test_desc.statistic] = self.Round(row.value) for stat in self._statistic_columns: if hasattr(row, 'd_' + stat): datum[stat] = self.Round(getattr(row, 'd_' + stat)) if 'timestamp' in self._columns: datum['timestamp'] = row.timestamp.isoformat() if 'revisions' in self._columns: datum['revisions'] = { attr: value for attr, value in row.to_dict().items() if attr.startswith('r_') } if 'annotations' in self._columns: datum['annotations'] = { attr: value for attr, value in row.to_dict().items() if attr.startswith('a_') } if 'histogram' in self._columns and test_desc.statistic == None: with timing.WallTimeLogger('fetch_histograms'): yield [self._FetchHistogram(test_key, row.revision) for row in rows] def _FilterRowQuery(self, query): if self._min_revision or self._max_revision: if self._min_revision: query = query.filter(graph_data.Row.revision >= self._min_revision) if self._max_revision: query = query.filter(graph_data.Row.revision <= self._max_revision) query = query.order(-graph_data.Row.revision) elif self._min_timestamp or self._max_timestamp: if self._min_timestamp: query = query.filter(graph_data.Row.timestamp >= self._min_timestamp) if self._max_timestamp: query = query.filter(graph_data.Row.timestamp <= self._max_timestamp) query = query.order(-graph_data.Row.timestamp) else: query = query.order(-graph_data.Row.revision) return query @ndb.tasklet def _FetchAlerts(self): anomalies, _, _ = yield anomaly.Anomaly.QueryAsync( test_keys=self._test_keys, max_start_revision=self._max_revision, min_end_revision=self._min_revision) for alert in anomalies: if alert.internal_only: self._private = True datum = self._Datum(alert.end_revision) # TODO(benjhayden) bisect_status datum['alert'] = alerts.AnomalyDicts([alert], v2=True)[0] @ndb.tasklet def _FetchHistograms(self): yield [self._FetchHistogramsForTest(test) for test in self._test_keys] @ndb.tasklet def _FetchHistogramsForTest(self, test): query = graph_data.Row.query(graph_data.Row.parent_test == test) query = self._FilterRowQuery(query) with timing.WallTimeLogger('fetch_histograms'): row_keys = yield query.fetch_async(HISTOGRAMS_QUERY_LIMIT, keys_only=True) yield [ self._FetchHistogram(test, row_key.integer_id()) for row_key in row_keys ] @ndb.tasklet def _FetchHistogram(self, test, revision): query = histogram.Histogram.query( histogram.Histogram.test == utils.TestMetadataKey(test), histogram.Histogram.revision == revision) hist = yield query.get_async() if hist is None: return if hist.internal_only: self._private = True self._Datum(hist.revision)['histogram'] = hist.data @ndb.tasklet def _FetchDiagnostics(self): with timing.WallTimeLogger('fetch_diagnosticss'): yield [ self._FetchDiagnosticsForTest(test) for test in self._unsuffixed_test_metadata_keys ] @ndb.tasklet def _FetchDiagnosticsForTest(self, test): query = histogram.SparseDiagnostic.query( histogram.SparseDiagnostic.test == test) if self._min_revision: query = query.filter( histogram.SparseDiagnostic.start_revision >= self._min_revision) if self._max_revision: query = query.filter( histogram.SparseDiagnostic.start_revision <= self._max_revision) query = query.order(-histogram.SparseDiagnostic.start_revision) diagnostics = yield query.fetch_async(DIAGNOSTICS_QUERY_LIMIT) for diag in diagnostics: if diag.internal_only: self._private = True datum = self._Datum(diag.start_revision) datum_diags = datum.setdefault('diagnostics', {}) datum_diags[diag.name] = diag.data
{ "pile_set_name": "Github" }
#pragma once #include <signal.h> #include <string> #include <string> #include <fstream> #include <sstream> #include <vector> #inlcude <mutex> #include <glog/logging.h> #include "avProcError.h" ////////////////////////////////////////////////////////////////////////////////////////// namespace ff_dynamic { using ::std::mutex; using ::std::string; using ::std::vector; using ::std::fstream; using ::std::stringstream; class DAVReloader { public: static DAVReloader & getDAVReloaderInstance(){ static DAVReloader s_instance; return s_instance; } int initReloader(const string & configPath) { if (m_bInit == true) return 0; if (configPath.empty) return 0; m_configLoadPath = configPath; // register sighup struct sigaction hup; memset(&hup, 0, sizeof(avReloader)); //sigaddset(&avReloader.sa_mask, SIGHUP); //sigprocmask(SIG_BLOCK, &avReloader.sa_mask, NULL); hup.sa_flags = 0; hup.sa_handler = [](int signo){ DAVReloader & reloader = DAVReloader::getDAVReloaderInstance(); if (!reloader.isInitialized()) { LOG(ERROR) << "[DAVReloader] " << "Reloader not initialized, but load required."; return; } reloader->loadConfigContentAct(); }; sigaction(SIGHUP, &hup, 0); m_bInit = true; return 0; } string readLoadedContent() { std::lock_guard<mutex> lock(m_reloadLock); if (m_bLoadNeeded == false) return string(); m_bNewLoad = false; return m_configContent.str(); } inline bool isInitialized() { std::lock_guard<mutex> lock(m_reloadLock); return m_bInit; } inline bool hasNewLoad(){ std::lock_guard<mutex> lock(m_reloadLock); return m_bNewLoad; } private: int loadConfigContentAct() { std::lock_guard<mutex> lock(m_reloadLock); m_configContent.clear(); std::ifstream file(m_configLoadpath); if (file) { file.seekg(0, std::ios::end); const std::streampos length = file.tellg(); file.seekg(0, std::ios::beg); std::vector<char> buffer(length); file.read(&buffer[0], length); m_configContent.swap(stringstream()); m_configContent.rdbuf()->pubsetbuf(&buffer[0],length); } else { LOG(ERROR) << "[DAVReloader] " << m_configLoadPath << " not found"; return AVERROR(ENOENT); } file.close(); m_bNewLoad = true; return 0; } private: DAVReloader(const DAVReloader &) = delete; void operator=(const DAVReloader &) = delete; bool m_bInit = false; mutex m_reloadLock; string m_configLoadPath; string m_configContent; }; } /// namespace
{ "pile_set_name": "Github" }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra.attribute.calculator.arithmeticCalculator; import com.gs.fw.common.mithra.attribute.Attribute; import com.gs.fw.common.mithra.attribute.NumericAttribute; import com.gs.fw.common.mithra.attribute.calculator.AbstractSingleAttributeCalculator; import com.gs.fw.common.mithra.attribute.calculator.WrappedProcedureAndContext; import com.gs.fw.common.mithra.attribute.calculator.procedure.NullHandlingProcedure; import com.gs.fw.common.mithra.attribute.calculator.procedure.BigDecimalProcedure; import com.gs.fw.common.mithra.finder.SqlQuery; import com.gs.fw.common.mithra.finder.ToStringContext; import com.gs.fw.common.mithra.util.HashUtil; import com.gs.fw.common.mithra.util.BigDecimalUtil; import com.gs.fw.common.mithra.extractor.BigDecimalExtractor; import java.math.BigDecimal; public class ConstMultiplicationCalculatorBigDecimal extends AbstractSingleAttributeCalculator { private BigDecimal multiplicand; private int scale; private int precision; public ConstMultiplicationCalculatorBigDecimal(NumericAttribute attribute, BigDecimal multiplicand) { super(attribute); this.multiplicand = multiplicand; this.scale = BigDecimalUtil.calculateProductScale(attribute.getScale(), multiplicand.scale()); this.precision = BigDecimalUtil.calculateProductPrecision(attribute.getPrecision(), multiplicand.precision()); } public int getScale() { return this.scale; } public int getPrecision() { return this.precision; } public int intValueOf(Object o) { return this.bigDecimalValueOf(o).intValue(); } public float floatValueOf(Object o) { return this.bigDecimalValueOf(o).floatValue(); } public long longValueOf(Object o) { return this.bigDecimalValueOf(o).longValue(); } public double doubleValueOf(Object o) { return this.bigDecimalValueOf(o).doubleValue(); } public BigDecimal bigDecimalValueOf(Object o) { return ((BigDecimalExtractor)this.attribute).bigDecimalValueOf(o).multiply(multiplicand); } public String getFullyQualifiedCalculatedExpression(SqlQuery query) { return this.attribute.getFullyQualifiedLeftHandExpression(query) + " * " + multiplicand; } public void appendToString(ToStringContext toStringContext) { toStringContext.append("("); ((Attribute)this.attribute).zAppendToString(toStringContext); toStringContext.append("*"); toStringContext.append(multiplicand.toString()); toStringContext.append(")"); } public boolean equals(Object obj) { if (this == obj) return true; if (obj instanceof ConstMultiplicationCalculatorBigDecimal) { ConstMultiplicationCalculatorBigDecimal other = (ConstMultiplicationCalculatorBigDecimal) obj; return this.attribute.equals(other.attribute) && this.multiplicand.equals(other.multiplicand); } return false; } public int hashCode() { return HashUtil.combineHashes(0x3742A274 ^ this.attribute.hashCode(), HashUtil.hash(multiplicand)); } public boolean execute(double object, Object context) { WrappedProcedureAndContext realContext = (WrappedProcedureAndContext) context; return ((BigDecimalProcedure)realContext.getWrappedProcedure()).execute((BigDecimal.valueOf(object)).multiply(multiplicand), realContext.getWrappedContext()); } public boolean executeForNull(Object context) { WrappedProcedureAndContext realContext = (WrappedProcedureAndContext) context; return ((NullHandlingProcedure)realContext.getWrappedProcedure()).executeForNull(realContext.getWrappedContext()); } public boolean execute(int object, Object context) { WrappedProcedureAndContext realContext = (WrappedProcedureAndContext) context; return ((BigDecimalProcedure)realContext.getWrappedProcedure()).execute((BigDecimal.valueOf(object)).multiply(multiplicand), realContext.getWrappedContext()); } public boolean execute(float object, Object context) { WrappedProcedureAndContext realContext = (WrappedProcedureAndContext) context; return ((BigDecimalProcedure)realContext.getWrappedProcedure()).execute((BigDecimal.valueOf(object)).multiply(multiplicand), realContext.getWrappedContext()); } public boolean execute(long object, Object context) { WrappedProcedureAndContext realContext = (WrappedProcedureAndContext) context; return ((BigDecimalProcedure)realContext.getWrappedProcedure()).execute((BigDecimal.valueOf(object)).multiply(multiplicand), realContext.getWrappedContext()); } public boolean execute(BigDecimal object, Object context) { WrappedProcedureAndContext realContext = (WrappedProcedureAndContext) context; return ((BigDecimalProcedure)realContext.getWrappedProcedure()).execute(object.multiply(multiplicand), realContext.getWrappedContext()); } }
{ "pile_set_name": "Github" }
/*********************************************************************** * Copyright (c) 2013-2020 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.tools.utils import java.util.regex.Pattern import com.beust.jcommander.{IStringConverter, IStringConverterFactory} class GeoMesaIStringConverterFactory extends IStringConverterFactory { import GeoMesaIStringConverterFactory.ConverterMap override def getConverter[T](forType: Class[T]): Class[_ <: IStringConverter[T]] = ConverterMap.getOrElse(forType, null).asInstanceOf[Class[IStringConverter[T]]] } object GeoMesaIStringConverterFactory { val ConverterMap: Map[Class[_], Class[_ <: IStringConverter[_]]] = Map[Class[_], Class[_ <: IStringConverter[_]]]( classOf[Pattern] -> classOf[JPatternConverter] ) } class JPatternConverter extends IStringConverter[Pattern] { override def convert(value: String): Pattern = Pattern.compile(value) }
{ "pile_set_name": "Github" }
/* * [The "BSD licence"] * Copyright (c) 2010 Ben Gruver (JesusFreke) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jf.dexlib.Code.Analysis; import org.jf.dexlib.TypeIdItem; import java.io.IOException; import java.io.Writer; import java.util.HashMap; import static org.jf.dexlib.Code.Analysis.ClassPath.ClassDef; public class RegisterType { private final static HashMap<RegisterType, RegisterType> internedRegisterTypes = new HashMap<RegisterType, RegisterType>(); public final Category category; public final ClassDef type; private RegisterType(Category category, ClassDef type) { assert ((category == Category.Reference || category == Category.UninitRef || category == Category.UninitThis) && type != null) || ((category != Category.Reference && category != Category.UninitRef && category != Category.UninitThis) && type == null); this.category = category; this.type = type; } @Override public String toString() { return "(" + category.name() + (type==null?"":("," + type.getClassType())) + ")"; } public void writeTo(Writer writer) throws IOException { writer.write('('); writer.write(category.name()); if (type != null) { writer.write(','); writer.write(type.getClassType()); } writer.write(')'); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RegisterType that = (RegisterType) o; if (category != that.category) return false; if (type != null ? !type.equals(that.type) : that.type != null) return false; return true; } @Override public int hashCode() { int result = category.hashCode(); result = 31 * result + (type != null ? type.hashCode() : 0); return result; } public static enum Category { //the Unknown category denotes a register type that hasn't been determined yet Unknown, Uninit, Null, One, Boolean, Byte, PosByte, Short, PosShort, Char, Integer, Float, LongLo, LongHi, DoubleLo, DoubleHi, //the UninitRef category is used after a new-instance operation, and before the corresponding <init> is called UninitRef, //the UninitThis category is used the "this" register inside an <init> method, before the superclass' <init> //method is called UninitThis, Reference, //This is used when there are multiple incoming execution paths that have incompatible register types. For //example if the register's type is an Integer on one incomming code path, but is a Reference type on another //incomming code path. There is no register type that can hold either an Integer or a Reference. Conflicted; //this table is used when merging register types. For example, if a particular register can be either a Byte //or a Char, then the "merged" type of that register would be Integer, because it is the "smallest" type can //could hold either type of value. protected static Category[][] mergeTable = { /* Unknown Uninit Null One, Boolean Byte PosByte Short PosShort Char Integer, Float, LongLo LongHi DoubleLo DoubleHi UninitRef UninitThis Reference Conflicted*/ /*Unknown*/ {Unknown, Uninit, Null, One, Boolean, Byte, PosByte, Short, PosShort, Char, Integer, Float, LongLo, LongHi, DoubleLo, DoubleHi, UninitRef, UninitThis, Reference, Conflicted}, /*Uninit*/ {Uninit, Uninit, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Null*/ {Null, Conflicted, Null, Boolean, Boolean, Byte, PosByte, Short, PosShort, Char, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Reference, Conflicted}, /*One*/ {One, Conflicted, Boolean, One, Boolean, Byte, PosByte, Short, PosShort, Char, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Boolean*/ {Boolean, Conflicted, Boolean, Boolean, Boolean, Byte, PosByte, Short, PosShort, Char, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Byte*/ {Byte, Conflicted, Byte, Byte, Byte, Byte, Byte, Short, Short, Integer, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*PosByte*/ {PosByte, Conflicted, PosByte, PosByte, PosByte, Byte, PosByte, Short, PosShort, Char, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Short*/ {Short, Conflicted, Short, Short, Short, Short, Short, Short, Short, Integer, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*PosShort*/ {PosShort, Conflicted, PosShort, PosShort, PosShort, Short, PosShort, Short, PosShort, Char, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Char*/ {Char, Conflicted, Char, Char, Char, Integer, Char, Integer, Char, Char, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Integer*/ {Integer, Conflicted, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*Float*/ {Float, Conflicted, Float, Float, Float, Float, Float, Float, Float, Float, Integer, Float, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*LongLo*/ {LongLo, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, LongLo, Conflicted, LongLo, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*LongHi*/ {LongHi, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, LongHi, Conflicted, LongHi, Conflicted, Conflicted, Conflicted, Conflicted}, /*DoubleLo*/ {DoubleLo, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, LongLo, Conflicted, DoubleLo, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*DoubleHi*/ {DoubleHi, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, LongHi, Conflicted, DoubleHi, Conflicted, Conflicted, Conflicted, Conflicted}, /*UninitRef*/ {UninitRef, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted}, /*UninitThis*/ {UninitThis, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, UninitThis, Conflicted, Conflicted}, /*Reference*/ {Reference, Conflicted, Reference, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Reference, Conflicted}, /*Conflicted*/ {Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted, Conflicted} }; //this table is used to denote whether a given value type can be assigned to a "slot" of a certain type. For //example, to determine if you can assign a Boolean value to a particular array "slot", where the array is an //array of Integers, you would look up assignmentTable[Boolean.ordinal()][Integer.ordinal()] //Note that not all slot types in the table are expected to be used. For example, it doesn't make sense to //check if a value can be assigned to an uninitialized reference slot - because there is no such thing. protected static boolean[][] assigmentTable = { /* Unknown Uninit Null One, Boolean Byte PosByte Short PosShort Char Integer, Float, LongLo LongHi DoubleLo DoubleHi UninitRef UninitThis Reference Conflicted |slot type*/ /*Unknown*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false}, /*Uninit*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false}, /*Null*/ {false, false, true, false, true, true, true, true, true, true, true, true, false, false, false, false, false, false, true, false}, /*One*/ {false, false, false, true, true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false}, /*Boolean*/ {false, false, false, false, true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false}, /*Byte*/ {false, false, false, false, false, true, false, true, true, false, true, true, false, false, false, false, false, false, false, false}, /*PosByte*/ {false, false, false, false, false, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false}, /*Short*/ {false, false, false, false, false, false, false, true, false, false, true, true, false, false, false, false, false, false, false, false}, /*PosShort*/ {false, false, false, false, false, false, false, true, true, true, true, true, false, false, false, false, false, false, false, false}, /*Char*/ {false, false, false, false, false, false, false, false, false, true, true, true, false, false, false, false, false, false, false, false}, /*Integer*/ {false, false, false, false, false, false, false, false, false, false, true, true, false, false, false, false, false, false, false, false}, /*Float*/ {false, false, false, false, false, false, false, false, false, false, true, true, false, false, false, false, false, false, false, false}, /*LongLo*/ {false, false, false, false, false, false, false, false, false, false, false, false, true, false, true, false, false, false, false, false}, /*LongHi*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, true, false, false, false, false}, /*DoubleLo*/ {false, false, false, false, false, false, false, false, false, false, false, false, true, false, true, false, false, false, false, false}, /*DoubleHi*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, true, false, false, false, false}, /*UninitRef*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false}, /*UninitThis*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false}, /*Reference*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true, false}, /*Conflicted*/ {false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false} /*----------*/ /*value type*/ }; } public static RegisterType getRegisterTypeForType(String type) { switch (type.charAt(0)) { case 'V': throw new ValidationException("The V type can only be used as a method return type"); case 'Z': return getRegisterType(Category.Boolean, null); case 'B': return getRegisterType(Category.Byte, null); case 'S': return getRegisterType(Category.Short, null); case 'C': return getRegisterType(Category.Char, null); case 'I': return getRegisterType(Category.Integer, null); case 'F': return getRegisterType(Category.Float, null); case 'J': return getRegisterType(Category.LongLo, null); case 'D': return getRegisterType(Category.DoubleLo, null); case 'L': case '[': return getRegisterType(Category.Reference, ClassPath.getClassDef(type)); default: throw new RuntimeException("Invalid type: " + type); } } public static RegisterType getRegisterTypeForTypeIdItem(TypeIdItem typeIdItem) { return getRegisterTypeForType(typeIdItem.getTypeDescriptor()); } public static RegisterType getWideRegisterTypeForTypeIdItem(TypeIdItem typeIdItem, boolean firstRegister) { if (typeIdItem.getRegisterCount() == 1) { throw new RuntimeException("Cannot use this method for non-wide register type: " + typeIdItem.getTypeDescriptor()); } switch (typeIdItem.getTypeDescriptor().charAt(0)) { case 'J': if (firstRegister) { return getRegisterType(Category.LongLo, null); } else { return getRegisterType(Category.LongHi, null); } case 'D': if (firstRegister) { return getRegisterType(Category.DoubleLo, null); } else { return getRegisterType(Category.DoubleHi, null); } default: throw new RuntimeException("Invalid type: " + typeIdItem.getTypeDescriptor()); } } public static RegisterType getRegisterTypeForLiteral(long literalValue) { if (literalValue < -32768) { return getRegisterType(Category.Integer, null); } if (literalValue < -128) { return getRegisterType(Category.Short, null); } if (literalValue < 0) { return getRegisterType(Category.Byte, null); } if (literalValue == 0) { return getRegisterType(Category.Null, null); } if (literalValue == 1) { return getRegisterType(Category.One, null); } if (literalValue < 128) { return getRegisterType(Category.PosByte, null); } if (literalValue < 32768) { return getRegisterType(Category.PosShort, null); } if (literalValue < 65536) { return getRegisterType(Category.Char, null); } return getRegisterType(Category.Integer, null); } public RegisterType merge(RegisterType type) { if (type == null || type == this) { return this; } Category mergedCategory = Category.mergeTable[this.category.ordinal()][type.category.ordinal()]; ClassDef mergedType = null; if (mergedCategory == Category.Reference) { if (this.type instanceof ClassPath.UnresolvedClassDef || type.type instanceof ClassPath.UnresolvedClassDef) { mergedType = ClassPath.getUnresolvedObjectClassDef(); } else { mergedType = ClassPath.getCommonSuperclass(this.type, type.type); } } else if (mergedCategory == Category.UninitRef || mergedCategory == Category.UninitThis) { if (this.category == Category.Unknown) { return type; } assert type.category == Category.Unknown; return this; } return RegisterType.getRegisterType(mergedCategory, mergedType); } public boolean canBeAssignedTo(RegisterType slotType) { if (Category.assigmentTable[this.category.ordinal()][slotType.category.ordinal()]) { if (this.category == Category.Reference && slotType.category == Category.Reference) { if (!slotType.type.isInterface()) { return this.type.extendsClass(slotType.type); } //for verification, we assume all objects implement all interfaces, so we don't verify the type if //slotType is an interface } return true; } return false; } public static RegisterType getUnitializedReference(ClassDef classType) { //We always create a new RegisterType instance for an uninit ref. Each unique uninit RegisterType instance //is used to track a specific uninitialized reference, so that if multiple registers contain the same //uninitialized reference, then they can all be upgraded to an initialized reference when the appropriate //<init> is invoked return new RegisterType(Category.UninitRef, classType); } public static RegisterType getRegisterType(Category category, ClassDef classType) { RegisterType newRegisterType = new RegisterType(category, classType); RegisterType internedRegisterType = internedRegisterTypes.get(newRegisterType); if (internedRegisterType == null) { internedRegisterTypes.put(newRegisterType, newRegisterType); return newRegisterType; } return internedRegisterType; } }
{ "pile_set_name": "Github" }
<testcase> <info> <keywords> HTTP HTTP GET globbing {} list </keywords> </info> # Server-side <reply> <data1> HTTP/1.1 200 OK Funny-head: yesyes Content-Length: 15 the number one </data1> <data2> HTTP/1.1 200 OK Funny-head: yesyes Content-Length: 16 two is nice too </data2> </reply> # Client-side <client> <server> http </server> <name> multiple requests using {}{} in the URL </name> <command> "%HOSTIP:%HTTPPORT/{1235,1235}{0001,0002}" </command> </client> # Verify data after the test has been "shot" <verify> <strip> ^User-Agent:.* </strip> <protocol> GET /12350001 HTTP/1.1 User-Agent: curl/7.8.1-pre3 (sparc-sun-solaris2.7) libcurl 7.8.1-pre3 (OpenSSL 0.9.6a) (krb4 enabled) Host: %HOSTIP:%HTTPPORT Accept: */* GET /12350002 HTTP/1.1 User-Agent: curl/7.8.1-pre3 (sparc-sun-solaris2.7) libcurl 7.8.1-pre3 (OpenSSL 0.9.6a) (krb4 enabled) Host: %HOSTIP:%HTTPPORT Accept: */* GET /12350001 HTTP/1.1 User-Agent: curl/7.8.1-pre3 (sparc-sun-solaris2.7) libcurl 7.8.1-pre3 (OpenSSL 0.9.6a) (krb4 enabled) Host: %HOSTIP:%HTTPPORT Accept: */* GET /12350002 HTTP/1.1 User-Agent: curl/7.8.1-pre3 (sparc-sun-solaris2.7) libcurl 7.8.1-pre3 (OpenSSL 0.9.6a) (krb4 enabled) Host: %HOSTIP:%HTTPPORT Accept: */* </protocol> <stdout> --_curl_--%HOSTIP:%HTTPPORT/12350001 HTTP/1.1 200 OK Funny-head: yesyes Content-Length: 15 the number one --_curl_--%HOSTIP:%HTTPPORT/12350002 HTTP/1.1 200 OK Funny-head: yesyes Content-Length: 16 two is nice too --_curl_--%HOSTIP:%HTTPPORT/12350001 HTTP/1.1 200 OK Funny-head: yesyes Content-Length: 15 the number one --_curl_--%HOSTIP:%HTTPPORT/12350002 HTTP/1.1 200 OK Funny-head: yesyes Content-Length: 16 two is nice too </stdout> </verify> </testcase>
{ "pile_set_name": "Github" }
1,1,0,0,1,1,0,0,1,0,0,0
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8" ?> <Configuration shutdownHook="disable"> <Appenders> <Console name="console" target="SYSTEM_OUT"> <PatternLayout pattern="%highlight{%d %p [%c] - &lt;%m&gt;%n}" /> </Console> <SplunkAppender name="casSplunk"> <AppenderRef ref="console" /> </SplunkAppender> </Appenders> <Loggers> <Logger name="org.apereo.cas" level="info" additivity="false"> <AppenderRef ref="casSplunk"/> </Logger> <Root level="error"> <AppenderRef ref="casSplunk"/> </Root> </Loggers> </Configuration>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <selector xmlns:android="http://schemas.android.com/apk/res/android"> <item android:state_pressed="true"> <shape> <!-- <solid android:color="@color/camerasdk_action_bar_press"/> --> <solid android:color="@color/camerasdk_action_bar"/> </shape> </item> <!-- <item> <shape> <solid android:color="@color/camerasdk_action_bar"/> </shape> </item> --> </selector>
{ "pile_set_name": "Github" }
/* * * Copyright 2016 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package stats import ( "context" "net" ) // ConnTagInfo defines the relevant information needed by connection context tagger. type ConnTagInfo struct { // RemoteAddr is the remote address of the corresponding connection. RemoteAddr net.Addr // LocalAddr is the local address of the corresponding connection. LocalAddr net.Addr } // RPCTagInfo defines the relevant information needed by RPC context tagger. type RPCTagInfo struct { // FullMethodName is the RPC method in the format of /package.service/method. FullMethodName string // FailFast indicates if this RPC is failfast. // This field is only valid on client side, it's always false on server side. FailFast bool } // Handler defines the interface for the related stats handling (e.g., RPCs, connections). type Handler interface { // TagRPC can attach some information to the given context. // The context used for the rest lifetime of the RPC will be derived from // the returned context. TagRPC(context.Context, *RPCTagInfo) context.Context // HandleRPC processes the RPC stats. HandleRPC(context.Context, RPCStats) // TagConn can attach some information to the given context. // The returned context will be used for stats handling. // For conn stats handling, the context used in HandleConn for this // connection will be derived from the context returned. // For RPC stats handling, // - On server side, the context used in HandleRPC for all RPCs on this // connection will be derived from the context returned. // - On client side, the context is not derived from the context returned. TagConn(context.Context, *ConnTagInfo) context.Context // HandleConn processes the Conn stats. HandleConn(context.Context, ConnStats) }
{ "pile_set_name": "Github" }
/* Copyright 1994 Digital Equipment Corporation. This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. The author may be reached as [email protected] or Digital Equipment Corporation, 550 King Street, Littleton MA 01460. ========================================================================= */ /* ** DC21040 CSR<1..15> Register Address Map */ #define DE4X5_BMR iobase+(0x000 << lp->bus) /* Bus Mode Register */ #define DE4X5_TPD iobase+(0x008 << lp->bus) /* Transmit Poll Demand Reg */ #define DE4X5_RPD iobase+(0x010 << lp->bus) /* Receive Poll Demand Reg */ #define DE4X5_RRBA iobase+(0x018 << lp->bus) /* RX Ring Base Address Reg */ #define DE4X5_TRBA iobase+(0x020 << lp->bus) /* TX Ring Base Address Reg */ #define DE4X5_STS iobase+(0x028 << lp->bus) /* Status Register */ #define DE4X5_OMR iobase+(0x030 << lp->bus) /* Operation Mode Register */ #define DE4X5_IMR iobase+(0x038 << lp->bus) /* Interrupt Mask Register */ #define DE4X5_MFC iobase+(0x040 << lp->bus) /* Missed Frame Counter */ #define DE4X5_APROM iobase+(0x048 << lp->bus) /* Ethernet Address PROM */ #define DE4X5_BROM iobase+(0x048 << lp->bus) /* Boot ROM Register */ #define DE4X5_SROM iobase+(0x048 << lp->bus) /* Serial ROM Register */ #define DE4X5_MII iobase+(0x048 << lp->bus) /* MII Interface Register */ #define DE4X5_DDR iobase+(0x050 << lp->bus) /* Data Diagnostic Register */ #define DE4X5_FDR iobase+(0x058 << lp->bus) /* Full Duplex Register */ #define DE4X5_GPT iobase+(0x058 << lp->bus) /* General Purpose Timer Reg.*/ #define DE4X5_GEP iobase+(0x060 << lp->bus) /* General Purpose Register */ #define DE4X5_SISR iobase+(0x060 << lp->bus) /* SIA Status Register */ #define DE4X5_SICR iobase+(0x068 << lp->bus) /* SIA Connectivity Register */ #define DE4X5_STRR iobase+(0x070 << lp->bus) /* SIA TX/RX Register */ #define DE4X5_SIGR iobase+(0x078 << lp->bus) /* SIA General Register */ /* ** EISA Register Address Map */ #define EISA_ID iobase+0x0c80 /* EISA ID Registers */ #define EISA_ID0 iobase+0x0c80 /* EISA ID Register 0 */ #define EISA_ID1 iobase+0x0c81 /* EISA ID Register 1 */ #define EISA_ID2 iobase+0x0c82 /* EISA ID Register 2 */ #define EISA_ID3 iobase+0x0c83 /* EISA ID Register 3 */ #define EISA_CR iobase+0x0c84 /* EISA Control Register */ #define EISA_REG0 iobase+0x0c88 /* EISA Configuration Register 0 */ #define EISA_REG1 iobase+0x0c89 /* EISA Configuration Register 1 */ #define EISA_REG2 iobase+0x0c8a /* EISA Configuration Register 2 */ #define EISA_REG3 iobase+0x0c8f /* EISA Configuration Register 3 */ #define EISA_APROM iobase+0x0c90 /* Ethernet Address PROM */ /* ** PCI/EISA Configuration Registers Address Map */ #define PCI_CFID iobase+0x0008 /* PCI Configuration ID Register */ #define PCI_CFCS iobase+0x000c /* PCI Command/Status Register */ #define PCI_CFRV iobase+0x0018 /* PCI Revision Register */ #define PCI_CFLT iobase+0x001c /* PCI Latency Timer Register */ #define PCI_CBIO iobase+0x0028 /* PCI Base I/O Register */ #define PCI_CBMA iobase+0x002c /* PCI Base Memory Address Register */ #define PCI_CBER iobase+0x0030 /* PCI Expansion ROM Base Address Reg. */ #define PCI_CFIT iobase+0x003c /* PCI Configuration Interrupt Register */ #define PCI_CFDA iobase+0x0040 /* PCI Driver Area Register */ #define PCI_CFDD iobase+0x0041 /* PCI Driver Dependent Area Register */ #define PCI_CFPM iobase+0x0043 /* PCI Power Management Area Register */ /* ** EISA Configuration Register 0 bit definitions */ #define ER0_BSW 0x80 /* EISA Bus Slave Width, 1: 32 bits */ #define ER0_BMW 0x40 /* EISA Bus Master Width, 1: 32 bits */ #define ER0_EPT 0x20 /* EISA PREEMPT Time, 0: 23 BCLKs */ #define ER0_ISTS 0x10 /* Interrupt Status (X) */ #define ER0_LI 0x08 /* Latch Interrupts */ #define ER0_INTL 0x06 /* INTerrupt Level */ #define ER0_INTT 0x01 /* INTerrupt Type, 0: Level, 1: Edge */ /* ** EISA Configuration Register 1 bit definitions */ #define ER1_IAM 0xe0 /* ISA Address Mode */ #define ER1_IAE 0x10 /* ISA Addressing Enable */ #define ER1_UPIN 0x0f /* User Pins */ /* ** EISA Configuration Register 2 bit definitions */ #define ER2_BRS 0xc0 /* Boot ROM Size */ #define ER2_BRA 0x3c /* Boot ROM Address <16:13> */ /* ** EISA Configuration Register 3 bit definitions */ #define ER3_BWE 0x40 /* Burst Write Enable */ #define ER3_BRE 0x04 /* Burst Read Enable */ #define ER3_LSR 0x02 /* Local Software Reset */ /* ** PCI Configuration ID Register (PCI_CFID). The Device IDs are left ** shifted 8 bits to allow detection of DC21142 and DC21143 variants with ** the configuration revision register step number. */ #define CFID_DID 0xff00 /* Device ID */ #define CFID_VID 0x00ff /* Vendor ID */ #define DC21040_DID 0x0200 /* Unique Device ID # */ #define DC21040_VID 0x1011 /* DC21040 Manufacturer */ #define DC21041_DID 0x1400 /* Unique Device ID # */ #define DC21041_VID 0x1011 /* DC21041 Manufacturer */ #define DC21140_DID 0x0900 /* Unique Device ID # */ #define DC21140_VID 0x1011 /* DC21140 Manufacturer */ #define DC2114x_DID 0x1900 /* Unique Device ID # */ #define DC2114x_VID 0x1011 /* DC2114[23] Manufacturer */ /* ** Chipset defines */ #define DC21040 DC21040_DID #define DC21041 DC21041_DID #define DC21140 DC21140_DID #define DC2114x DC2114x_DID #define DC21142 (DC2114x_DID | 0x0010) #define DC21143 (DC2114x_DID | 0x0030) #define DC2114x_BRK 0x0020 /* CFRV break between DC21142 & DC21143 */ #define is_DC21040 ((vendor == DC21040_VID) && (device == DC21040_DID)) #define is_DC21041 ((vendor == DC21041_VID) && (device == DC21041_DID)) #define is_DC21140 ((vendor == DC21140_VID) && (device == DC21140_DID)) #define is_DC2114x ((vendor == DC2114x_VID) && (device == DC2114x_DID)) #define is_DC21142 ((vendor == DC2114x_VID) && (device == DC21142)) #define is_DC21143 ((vendor == DC2114x_VID) && (device == DC21143)) /* ** PCI Configuration Command/Status Register (PCI_CFCS) */ #define CFCS_DPE 0x80000000 /* Detected Parity Error (S) */ #define CFCS_SSE 0x40000000 /* Signal System Error (S) */ #define CFCS_RMA 0x20000000 /* Receive Master Abort (S) */ #define CFCS_RTA 0x10000000 /* Receive Target Abort (S) */ #define CFCS_DST 0x06000000 /* DEVSEL Timing (S) */ #define CFCS_DPR 0x01000000 /* Data Parity Report (S) */ #define CFCS_FBB 0x00800000 /* Fast Back-To-Back (S) */ #define CFCS_SEE 0x00000100 /* System Error Enable (C) */ #define CFCS_PER 0x00000040 /* Parity Error Response (C) */ #define CFCS_MO 0x00000004 /* Master Operation (C) */ #define CFCS_MSA 0x00000002 /* Memory Space Access (C) */ #define CFCS_IOSA 0x00000001 /* I/O Space Access (C) */ /* ** PCI Configuration Revision Register (PCI_CFRV) */ #define CFRV_BC 0xff000000 /* Base Class */ #define CFRV_SC 0x00ff0000 /* Subclass */ #define CFRV_RN 0x000000f0 /* Revision Number */ #define CFRV_SN 0x0000000f /* Step Number */ #define BASE_CLASS 0x02000000 /* Indicates Network Controller */ #define SUB_CLASS 0x00000000 /* Indicates Ethernet Controller */ #define STEP_NUMBER 0x00000020 /* Increments for future chips */ #define REV_NUMBER 0x00000003 /* 0x00, 0x01, 0x02, 0x03: Rev in Step */ #define CFRV_MASK 0xffff0000 /* Register mask */ /* ** PCI Configuration Latency Timer Register (PCI_CFLT) */ #define CFLT_BC 0x0000ff00 /* Latency Timer bits */ /* ** PCI Configuration Base I/O Address Register (PCI_CBIO) */ #define CBIO_MASK -128 /* Base I/O Address Mask */ #define CBIO_IOSI 0x00000001 /* I/O Space Indicator (RO, value is 1) */ /* ** PCI Configuration Card Information Structure Register (PCI_CCIS) */ #define CCIS_ROMI 0xf0000000 /* ROM Image */ #define CCIS_ASO 0x0ffffff8 /* Address Space Offset */ #define CCIS_ASI 0x00000007 /* Address Space Indicator */ /* ** PCI Configuration Subsystem ID Register (PCI_SSID) */ #define SSID_SSID 0xffff0000 /* Subsystem ID */ #define SSID_SVID 0x0000ffff /* Subsystem Vendor ID */ /* ** PCI Configuration Expansion ROM Base Address Register (PCI_CBER) */ #define CBER_MASK 0xfffffc00 /* Expansion ROM Base Address Mask */ #define CBER_ROME 0x00000001 /* ROM Enable */ /* ** PCI Configuration Interrupt Register (PCI_CFIT) */ #define CFIT_MXLT 0xff000000 /* MAX_LAT Value (0.25us periods) */ #define CFIT_MNGT 0x00ff0000 /* MIN_GNT Value (0.25us periods) */ #define CFIT_IRQP 0x0000ff00 /* Interrupt Pin */ #define CFIT_IRQL 0x000000ff /* Interrupt Line */ /* ** PCI Configuration Power Management Area Register (PCI_CFPM) */ #define SLEEP 0x80 /* Power Saving Sleep Mode */ #define SNOOZE 0x40 /* Power Saving Snooze Mode */ #define WAKEUP 0x00 /* Power Saving Wakeup */ #define PCI_CFDA_DSU 0x41 /* 8 bit Configuration Space Address */ #define PCI_CFDA_PSM 0x43 /* 8 bit Configuration Space Address */ /* ** DC21040 Bus Mode Register (DE4X5_BMR) */ #define BMR_RML 0x00200000 /* [Memory] Read Multiple */ #define BMR_DBO 0x00100000 /* Descriptor Byte Ordering (Endian) */ #define BMR_TAP 0x000e0000 /* Transmit Automatic Polling */ #define BMR_DAS 0x00010000 /* Diagnostic Address Space */ #define BMR_CAL 0x0000c000 /* Cache Alignment */ #define BMR_PBL 0x00003f00 /* Programmable Burst Length */ #define BMR_BLE 0x00000080 /* Big/Little Endian */ #define BMR_DSL 0x0000007c /* Descriptor Skip Length */ #define BMR_BAR 0x00000002 /* Bus ARbitration */ #define BMR_SWR 0x00000001 /* Software Reset */ /* Timings here are for 10BASE-T/AUI only*/ #define TAP_NOPOLL 0x00000000 /* No automatic polling */ #define TAP_200US 0x00020000 /* TX automatic polling every 200us */ #define TAP_800US 0x00040000 /* TX automatic polling every 800us */ #define TAP_1_6MS 0x00060000 /* TX automatic polling every 1.6ms */ #define TAP_12_8US 0x00080000 /* TX automatic polling every 12.8us */ #define TAP_25_6US 0x000a0000 /* TX automatic polling every 25.6us */ #define TAP_51_2US 0x000c0000 /* TX automatic polling every 51.2us */ #define TAP_102_4US 0x000e0000 /* TX automatic polling every 102.4us */ #define CAL_NOUSE 0x00000000 /* Not used */ #define CAL_8LONG 0x00004000 /* 8-longword alignment */ #define CAL_16LONG 0x00008000 /* 16-longword alignment */ #define CAL_32LONG 0x0000c000 /* 32-longword alignment */ #define PBL_0 0x00000000 /* DMA burst length = amount in RX FIFO */ #define PBL_1 0x00000100 /* 1 longword DMA burst length */ #define PBL_2 0x00000200 /* 2 longwords DMA burst length */ #define PBL_4 0x00000400 /* 4 longwords DMA burst length */ #define PBL_8 0x00000800 /* 8 longwords DMA burst length */ #define PBL_16 0x00001000 /* 16 longwords DMA burst length */ #define PBL_32 0x00002000 /* 32 longwords DMA burst length */ #define DSL_0 0x00000000 /* 0 longword / descriptor */ #define DSL_1 0x00000004 /* 1 longword / descriptor */ #define DSL_2 0x00000008 /* 2 longwords / descriptor */ #define DSL_4 0x00000010 /* 4 longwords / descriptor */ #define DSL_8 0x00000020 /* 8 longwords / descriptor */ #define DSL_16 0x00000040 /* 16 longwords / descriptor */ #define DSL_32 0x00000080 /* 32 longwords / descriptor */ /* ** DC21040 Transmit Poll Demand Register (DE4X5_TPD) */ #define TPD 0x00000001 /* Transmit Poll Demand */ /* ** DC21040 Receive Poll Demand Register (DE4X5_RPD) */ #define RPD 0x00000001 /* Receive Poll Demand */ /* ** DC21040 Receive Ring Base Address Register (DE4X5_RRBA) */ #define RRBA 0xfffffffc /* RX Descriptor List Start Address */ /* ** DC21040 Transmit Ring Base Address Register (DE4X5_TRBA) */ #define TRBA 0xfffffffc /* TX Descriptor List Start Address */ /* ** Status Register (DE4X5_STS) */ #define STS_GPI 0x04000000 /* General Purpose Port Interrupt */ #define STS_BE 0x03800000 /* Bus Error Bits */ #define STS_TS 0x00700000 /* Transmit Process State */ #define STS_RS 0x000e0000 /* Receive Process State */ #define STS_NIS 0x00010000 /* Normal Interrupt Summary */ #define STS_AIS 0x00008000 /* Abnormal Interrupt Summary */ #define STS_ER 0x00004000 /* Early Receive */ #define STS_FBE 0x00002000 /* Fatal Bus Error */ #define STS_SE 0x00002000 /* System Error */ #define STS_LNF 0x00001000 /* Link Fail */ #define STS_FD 0x00000800 /* Full-Duplex Short Frame Received */ #define STS_TM 0x00000800 /* Timer Expired (DC21041) */ #define STS_ETI 0x00000400 /* Early Transmit Interrupt */ #define STS_AT 0x00000400 /* AUI/TP Pin */ #define STS_RWT 0x00000200 /* Receive Watchdog Time-Out */ #define STS_RPS 0x00000100 /* Receive Process Stopped */ #define STS_RU 0x00000080 /* Receive Buffer Unavailable */ #define STS_RI 0x00000040 /* Receive Interrupt */ #define STS_UNF 0x00000020 /* Transmit Underflow */ #define STS_LNP 0x00000010 /* Link Pass */ #define STS_ANC 0x00000010 /* Autonegotiation Complete */ #define STS_TJT 0x00000008 /* Transmit Jabber Time-Out */ #define STS_TU 0x00000004 /* Transmit Buffer Unavailable */ #define STS_TPS 0x00000002 /* Transmit Process Stopped */ #define STS_TI 0x00000001 /* Transmit Interrupt */ #define EB_PAR 0x00000000 /* Parity Error */ #define EB_MA 0x00800000 /* Master Abort */ #define EB_TA 0x01000000 /* Target Abort */ #define EB_RES0 0x01800000 /* Reserved */ #define EB_RES1 0x02000000 /* Reserved */ #define TS_STOP 0x00000000 /* Stopped */ #define TS_FTD 0x00100000 /* Fetch Transmit Descriptor */ #define TS_WEOT 0x00200000 /* Wait for End Of Transmission */ #define TS_QDAT 0x00300000 /* Queue skb data into TX FIFO */ #define TS_RES 0x00400000 /* Reserved */ #define TS_SPKT 0x00500000 /* Setup Packet */ #define TS_SUSP 0x00600000 /* Suspended */ #define TS_CLTD 0x00700000 /* Close Transmit Descriptor */ #define RS_STOP 0x00000000 /* Stopped */ #define RS_FRD 0x00020000 /* Fetch Receive Descriptor */ #define RS_CEOR 0x00040000 /* Check for End of Receive Packet */ #define RS_WFRP 0x00060000 /* Wait for Receive Packet */ #define RS_SUSP 0x00080000 /* Suspended */ #define RS_CLRD 0x000a0000 /* Close Receive Descriptor */ #define RS_FLUSH 0x000c0000 /* Flush RX FIFO */ #define RS_QRFS 0x000e0000 /* Queue RX FIFO into RX Skb */ #define INT_CANCEL 0x0001ffff /* For zeroing all interrupt sources */ /* ** Operation Mode Register (DE4X5_OMR) */ #define OMR_SC 0x80000000 /* Special Capture Effect Enable */ #define OMR_RA 0x40000000 /* Receive All */ #define OMR_SDP 0x02000000 /* SD Polarity - MUST BE ASSERTED */ #define OMR_SCR 0x01000000 /* Scrambler Mode */ #define OMR_PCS 0x00800000 /* PCS Function */ #define OMR_TTM 0x00400000 /* Transmit Threshold Mode */ #define OMR_SF 0x00200000 /* Store and Forward */ #define OMR_HBD 0x00080000 /* HeartBeat Disable */ #define OMR_PS 0x00040000 /* Port Select */ #define OMR_CA 0x00020000 /* Capture Effect Enable */ #define OMR_BP 0x00010000 /* Back Pressure */ #define OMR_TR 0x0000c000 /* Threshold Control Bits */ #define OMR_ST 0x00002000 /* Start/Stop Transmission Command */ #define OMR_FC 0x00001000 /* Force Collision Mode */ #define OMR_OM 0x00000c00 /* Operating Mode */ #define OMR_FDX 0x00000200 /* Full Duplex Mode */ #define OMR_FKD 0x00000100 /* Flaky Oscillator Disable */ #define OMR_PM 0x00000080 /* Pass All Multicast */ #define OMR_PR 0x00000040 /* Promiscuous Mode */ #define OMR_SB 0x00000020 /* Start/Stop Backoff Counter */ #define OMR_IF 0x00000010 /* Inverse Filtering */ #define OMR_PB 0x00000008 /* Pass Bad Frames */ #define OMR_HO 0x00000004 /* Hash Only Filtering Mode */ #define OMR_SR 0x00000002 /* Start/Stop Receive */ #define OMR_HP 0x00000001 /* Hash/Perfect Receive Filtering Mode */ #define TR_72 0x00000000 /* Threshold set to 72 (128) bytes */ #define TR_96 0x00004000 /* Threshold set to 96 (256) bytes */ #define TR_128 0x00008000 /* Threshold set to 128 (512) bytes */ #define TR_160 0x0000c000 /* Threshold set to 160 (1024) bytes */ #define OMR_DEF (OMR_SDP) #define OMR_SIA (OMR_SDP | OMR_TTM) #define OMR_SYM (OMR_SDP | OMR_SCR | OMR_PCS | OMR_HBD | OMR_PS) #define OMR_MII_10 (OMR_SDP | OMR_TTM | OMR_PS) #define OMR_MII_100 (OMR_SDP | OMR_HBD | OMR_PS) /* ** DC21040 Interrupt Mask Register (DE4X5_IMR) */ #define IMR_GPM 0x04000000 /* General Purpose Port Mask */ #define IMR_NIM 0x00010000 /* Normal Interrupt Summary Mask */ #define IMR_AIM 0x00008000 /* Abnormal Interrupt Summary Mask */ #define IMR_ERM 0x00004000 /* Early Receive Mask */ #define IMR_FBM 0x00002000 /* Fatal Bus Error Mask */ #define IMR_SEM 0x00002000 /* System Error Mask */ #define IMR_LFM 0x00001000 /* Link Fail Mask */ #define IMR_FDM 0x00000800 /* Full-Duplex (Short Frame) Mask */ #define IMR_TMM 0x00000800 /* Timer Expired Mask (DC21041) */ #define IMR_ETM 0x00000400 /* Early Transmit Interrupt Mask */ #define IMR_ATM 0x00000400 /* AUI/TP Switch Mask */ #define IMR_RWM 0x00000200 /* Receive Watchdog Time-Out Mask */ #define IMR_RSM 0x00000100 /* Receive Stopped Mask */ #define IMR_RUM 0x00000080 /* Receive Buffer Unavailable Mask */ #define IMR_RIM 0x00000040 /* Receive Interrupt Mask */ #define IMR_UNM 0x00000020 /* Underflow Interrupt Mask */ #define IMR_ANM 0x00000010 /* Autonegotiation Complete Mask */ #define IMR_LPM 0x00000010 /* Link Pass */ #define IMR_TJM 0x00000008 /* Transmit Time-Out Jabber Mask */ #define IMR_TUM 0x00000004 /* Transmit Buffer Unavailable Mask */ #define IMR_TSM 0x00000002 /* Transmission Stopped Mask */ #define IMR_TIM 0x00000001 /* Transmit Interrupt Mask */ /* ** Missed Frames and FIFO Overflow Counters (DE4X5_MFC) */ #define MFC_FOCO 0x10000000 /* FIFO Overflow Counter Overflow Bit */ #define MFC_FOC 0x0ffe0000 /* FIFO Overflow Counter Bits */ #define MFC_OVFL 0x00010000 /* Missed Frames Counter Overflow Bit */ #define MFC_CNTR 0x0000ffff /* Missed Frames Counter Bits */ #define MFC_FOCM 0x1ffe0000 /* FIFO Overflow Counter Mask */ /* ** DC21040 Ethernet Address PROM (DE4X5_APROM) */ #define APROM_DN 0x80000000 /* Data Not Valid */ #define APROM_DT 0x000000ff /* Address Byte */ /* ** DC21041 Boot/Ethernet Address ROM (DE4X5_BROM) */ #define BROM_MODE 0x00008000 /* MODE_1: 0, MODE_0: 1 (read only) */ #define BROM_RD 0x00004000 /* Read from Boot ROM */ #define BROM_WR 0x00002000 /* Write to Boot ROM */ #define BROM_BR 0x00001000 /* Select Boot ROM when set */ #define BROM_SR 0x00000800 /* Select Serial ROM when set */ #define BROM_REG 0x00000400 /* External Register Select */ #define BROM_DT 0x000000ff /* Data Byte */ /* ** DC21041 Serial/Ethernet Address ROM (DE4X5_SROM, DE4X5_MII) */ #define MII_MDI 0x00080000 /* MII Management Data In */ #define MII_MDO 0x00060000 /* MII Management Mode/Data Out */ #define MII_MRD 0x00040000 /* MII Management Define Read Mode */ #define MII_MWR 0x00000000 /* MII Management Define Write Mode */ #define MII_MDT 0x00020000 /* MII Management Data Out */ #define MII_MDC 0x00010000 /* MII Management Clock */ #define MII_RD 0x00004000 /* Read from MII */ #define MII_WR 0x00002000 /* Write to MII */ #define MII_SEL 0x00000800 /* Select MII when RESET */ #define SROM_MODE 0x00008000 /* MODE_1: 0, MODE_0: 1 (read only) */ #define SROM_RD 0x00004000 /* Read from Boot ROM */ #define SROM_WR 0x00002000 /* Write to Boot ROM */ #define SROM_BR 0x00001000 /* Select Boot ROM when set */ #define SROM_SR 0x00000800 /* Select Serial ROM when set */ #define SROM_REG 0x00000400 /* External Register Select */ #define SROM_DT 0x000000ff /* Data Byte */ #define DT_OUT 0x00000008 /* Serial Data Out */ #define DT_IN 0x00000004 /* Serial Data In */ #define DT_CLK 0x00000002 /* Serial ROM Clock */ #define DT_CS 0x00000001 /* Serial ROM Chip Select */ #define MII_PREAMBLE 0xffffffff /* MII Management Preamble */ #define MII_TEST 0xaaaaaaaa /* MII Test Signal */ #define MII_STRD 0x06 /* Start of Frame+Op Code: use low nibble */ #define MII_STWR 0x0a /* Start of Frame+Op Code: use low nibble */ #define MII_CR 0x00 /* MII Management Control Register */ #define MII_SR 0x01 /* MII Management Status Register */ #define MII_ID0 0x02 /* PHY Identifier Register 0 */ #define MII_ID1 0x03 /* PHY Identifier Register 1 */ #define MII_ANA 0x04 /* Auto Negotiation Advertisement */ #define MII_ANLPA 0x05 /* Auto Negotiation Link Partner Ability */ #define MII_ANE 0x06 /* Auto Negotiation Expansion */ #define MII_ANP 0x07 /* Auto Negotiation Next Page TX */ #define DE4X5_MAX_MII 32 /* Maximum address of MII PHY devices */ /* ** MII Management Control Register */ #define MII_CR_RST 0x8000 /* RESET the PHY chip */ #define MII_CR_LPBK 0x4000 /* Loopback enable */ #define MII_CR_SPD 0x2000 /* 0: 10Mb/s; 1: 100Mb/s */ #define MII_CR_10 0x0000 /* Set 10Mb/s */ #define MII_CR_100 0x2000 /* Set 100Mb/s */ #define MII_CR_ASSE 0x1000 /* Auto Speed Select Enable */ #define MII_CR_PD 0x0800 /* Power Down */ #define MII_CR_ISOL 0x0400 /* Isolate Mode */ #define MII_CR_RAN 0x0200 /* Restart Auto Negotiation */ #define MII_CR_FDM 0x0100 /* Full Duplex Mode */ #define MII_CR_CTE 0x0080 /* Collision Test Enable */ /* ** MII Management Status Register */ #define MII_SR_T4C 0x8000 /* 100BASE-T4 capable */ #define MII_SR_TXFD 0x4000 /* 100BASE-TX Full Duplex capable */ #define MII_SR_TXHD 0x2000 /* 100BASE-TX Half Duplex capable */ #define MII_SR_TFD 0x1000 /* 10BASE-T Full Duplex capable */ #define MII_SR_THD 0x0800 /* 10BASE-T Half Duplex capable */ #define MII_SR_ASSC 0x0020 /* Auto Speed Selection Complete*/ #define MII_SR_RFD 0x0010 /* Remote Fault Detected */ #define MII_SR_ANC 0x0008 /* Auto Negotiation capable */ #define MII_SR_LKS 0x0004 /* Link Status */ #define MII_SR_JABD 0x0002 /* Jabber Detect */ #define MII_SR_XC 0x0001 /* Extended Capabilities */ /* ** MII Management Auto Negotiation Advertisement Register */ #define MII_ANA_TAF 0x03e0 /* Technology Ability Field */ #define MII_ANA_T4AM 0x0200 /* T4 Technology Ability Mask */ #define MII_ANA_TXAM 0x0180 /* TX Technology Ability Mask */ #define MII_ANA_FDAM 0x0140 /* Full Duplex Technology Ability Mask */ #define MII_ANA_HDAM 0x02a0 /* Half Duplex Technology Ability Mask */ #define MII_ANA_100M 0x0380 /* 100Mb Technology Ability Mask */ #define MII_ANA_10M 0x0060 /* 10Mb Technology Ability Mask */ #define MII_ANA_CSMA 0x0001 /* CSMA-CD Capable */ /* ** MII Management Auto Negotiation Remote End Register */ #define MII_ANLPA_NP 0x8000 /* Next Page (Enable) */ #define MII_ANLPA_ACK 0x4000 /* Remote Acknowledge */ #define MII_ANLPA_RF 0x2000 /* Remote Fault */ #define MII_ANLPA_TAF 0x03e0 /* Technology Ability Field */ #define MII_ANLPA_T4AM 0x0200 /* T4 Technology Ability Mask */ #define MII_ANLPA_TXAM 0x0180 /* TX Technology Ability Mask */ #define MII_ANLPA_FDAM 0x0140 /* Full Duplex Technology Ability Mask */ #define MII_ANLPA_HDAM 0x02a0 /* Half Duplex Technology Ability Mask */ #define MII_ANLPA_100M 0x0380 /* 100Mb Technology Ability Mask */ #define MII_ANLPA_10M 0x0060 /* 10Mb Technology Ability Mask */ #define MII_ANLPA_CSMA 0x0001 /* CSMA-CD Capable */ /* ** SROM Media Definitions (ABG SROM Section) */ #define MEDIA_NWAY 0x0080 /* Nway (Auto Negotiation) on PHY */ #define MEDIA_MII 0x0040 /* MII Present on the adapter */ #define MEDIA_FIBRE 0x0008 /* Fibre Media present */ #define MEDIA_AUI 0x0004 /* AUI Media present */ #define MEDIA_TP 0x0002 /* TP Media present */ #define MEDIA_BNC 0x0001 /* BNC Media present */ /* ** SROM Definitions (Digital Semiconductor Format) */ #define SROM_SSVID 0x0000 /* Sub-system Vendor ID offset */ #define SROM_SSID 0x0002 /* Sub-system ID offset */ #define SROM_CISPL 0x0004 /* CardBus CIS Pointer low offset */ #define SROM_CISPH 0x0006 /* CardBus CIS Pointer high offset */ #define SROM_IDCRC 0x0010 /* ID Block CRC offset*/ #define SROM_RSVD2 0x0011 /* ID Reserved 2 offset */ #define SROM_SFV 0x0012 /* SROM Format Version offset */ #define SROM_CCNT 0x0013 /* Controller Count offset */ #define SROM_HWADD 0x0014 /* Hardware Address offset */ #define SROM_MRSVD 0x007c /* Manufacturer Reserved offset*/ #define SROM_CRC 0x007e /* SROM CRC offset */ /* ** SROM Media Connection Definitions */ #define SROM_10BT 0x0000 /* 10BASE-T half duplex */ #define SROM_10BTN 0x0100 /* 10BASE-T with Nway */ #define SROM_10BTF 0x0204 /* 10BASE-T full duplex */ #define SROM_10BTNLP 0x0400 /* 10BASE-T without Link Pass test */ #define SROM_10B2 0x0001 /* 10BASE-2 (BNC) */ #define SROM_10B5 0x0002 /* 10BASE-5 (AUI) */ #define SROM_100BTH 0x0003 /* 100BASE-T half duplex */ #define SROM_100BTF 0x0205 /* 100BASE-T full duplex */ #define SROM_100BT4 0x0006 /* 100BASE-T4 */ #define SROM_100BFX 0x0007 /* 100BASE-FX half duplex (Fiber) */ #define SROM_M10BT 0x0009 /* MII 10BASE-T half duplex */ #define SROM_M10BTF 0x020a /* MII 10BASE-T full duplex */ #define SROM_M100BT 0x000d /* MII 100BASE-T half duplex */ #define SROM_M100BTF 0x020e /* MII 100BASE-T full duplex */ #define SROM_M100BT4 0x000f /* MII 100BASE-T4 */ #define SROM_M100BF 0x0010 /* MII 100BASE-FX half duplex */ #define SROM_M100BFF 0x0211 /* MII 100BASE-FX full duplex */ #define SROM_PDA 0x0800 /* Powerup & Dynamic Autosense */ #define SROM_PAO 0x8800 /* Powerup Autosense Only */ #define SROM_NSMI 0xffff /* No Selected Media Information */ /* ** SROM Media Definitions */ #define SROM_10BASET 0x0000 /* 10BASE-T half duplex */ #define SROM_10BASE2 0x0001 /* 10BASE-2 (BNC) */ #define SROM_10BASE5 0x0002 /* 10BASE-5 (AUI) */ #define SROM_100BASET 0x0003 /* 100BASE-T half duplex */ #define SROM_10BASETF 0x0004 /* 10BASE-T full duplex */ #define SROM_100BASETF 0x0005 /* 100BASE-T full duplex */ #define SROM_100BASET4 0x0006 /* 100BASE-T4 */ #define SROM_100BASEF 0x0007 /* 100BASE-FX half duplex */ #define SROM_100BASEFF 0x0008 /* 100BASE-FX full duplex */ #define BLOCK_LEN 0x7f /* Extended blocks length mask */ #define EXT_FIELD 0x40 /* Extended blocks extension field bit */ #define MEDIA_CODE 0x3f /* Extended blocks media code mask */ /* ** SROM Compact Format Block Masks */ #define COMPACT_FI 0x80 /* Format Indicator */ #define COMPACT_LEN 0x04 /* Length */ #define COMPACT_MC 0x3f /* Media Code */ /* ** SROM Extended Format Block Type 0 Masks */ #define BLOCK0_FI 0x80 /* Format Indicator */ #define BLOCK0_MCS 0x80 /* Media Code byte Sign */ #define BLOCK0_MC 0x3f /* Media Code */ /* ** DC21040 Full Duplex Register (DE4X5_FDR) */ #define FDR_FDACV 0x0000ffff /* Full Duplex Auto Configuration Value */ /* ** DC21041 General Purpose Timer Register (DE4X5_GPT) */ #define GPT_CON 0x00010000 /* One shot: 0, Continuous: 1 */ #define GPT_VAL 0x0000ffff /* Timer Value */ /* ** DC21140 General Purpose Register (DE4X5_GEP) (hardware dependent bits) */ /* Valid ONLY for DE500 hardware */ #define GEP_LNP 0x00000080 /* Link Pass (input) */ #define GEP_SLNK 0x00000040 /* SYM LINK (input) */ #define GEP_SDET 0x00000020 /* Signal Detect (input) */ #define GEP_HRST 0x00000010 /* Hard RESET (to PHY) (output) */ #define GEP_FDXD 0x00000008 /* Full Duplex Disable (output) */ #define GEP_PHYL 0x00000004 /* PHY Loopback (output) */ #define GEP_FLED 0x00000002 /* Force Activity LED on (output) */ #define GEP_MODE 0x00000001 /* 0: 10Mb/s, 1: 100Mb/s */ #define GEP_INIT 0x0000011f /* Setup inputs (0) and outputs (1) */ #define GEP_CTRL 0x00000100 /* GEP control bit */ /* ** SIA Register Defaults */ #define CSR13 0x00000001 #define CSR14 0x0003ff7f /* Autonegotiation disabled */ #define CSR15 0x00000008 /* ** SIA Status Register (DE4X5_SISR) */ #define SISR_LPC 0xffff0000 /* Link Partner's Code Word */ #define SISR_LPN 0x00008000 /* Link Partner Negotiable */ #define SISR_ANS 0x00007000 /* Auto Negotiation Arbitration State */ #define SISR_NSN 0x00000800 /* Non Stable NLPs Detected (DC21041) */ #define SISR_TRF 0x00000800 /* Transmit Remote Fault */ #define SISR_NSND 0x00000400 /* Non Stable NLPs Detected (DC21142) */ #define SISR_ANR_FDS 0x00000400 /* Auto Negotiate Restart/Full Duplex Sel.*/ #define SISR_TRA 0x00000200 /* 10BASE-T Receive Port Activity */ #define SISR_NRA 0x00000200 /* Non Selected Port Receive Activity */ #define SISR_ARA 0x00000100 /* AUI Receive Port Activity */ #define SISR_SRA 0x00000100 /* Selected Port Receive Activity */ #define SISR_DAO 0x00000080 /* PLL All One */ #define SISR_DAZ 0x00000040 /* PLL All Zero */ #define SISR_DSP 0x00000020 /* PLL Self-Test Pass */ #define SISR_DSD 0x00000010 /* PLL Self-Test Done */ #define SISR_APS 0x00000008 /* Auto Polarity State */ #define SISR_LKF 0x00000004 /* Link Fail Status */ #define SISR_LS10 0x00000004 /* 10Mb/s Link Fail Status */ #define SISR_NCR 0x00000002 /* Network Connection Error */ #define SISR_LS100 0x00000002 /* 100Mb/s Link Fail Status */ #define SISR_PAUI 0x00000001 /* AUI_TP Indication */ #define SISR_MRA 0x00000001 /* MII Receive Port Activity */ #define ANS_NDIS 0x00000000 /* Nway disable */ #define ANS_TDIS 0x00001000 /* Transmit Disable */ #define ANS_ADET 0x00002000 /* Ability Detect */ #define ANS_ACK 0x00003000 /* Acknowledge */ #define ANS_CACK 0x00004000 /* Complete Acknowledge */ #define ANS_NWOK 0x00005000 /* Nway OK - FLP Link Good */ #define ANS_LCHK 0x00006000 /* Link Check */ #define SISR_RST 0x00000301 /* CSR12 reset */ #define SISR_ANR 0x00001301 /* Autonegotiation restart */ /* ** SIA Connectivity Register (DE4X5_SICR) */ #define SICR_SDM 0xffff0000 /* SIA Diagnostics Mode */ #define SICR_OE57 0x00008000 /* Output Enable 5 6 7 */ #define SICR_OE24 0x00004000 /* Output Enable 2 4 */ #define SICR_OE13 0x00002000 /* Output Enable 1 3 */ #define SICR_IE 0x00001000 /* Input Enable */ #define SICR_EXT 0x00000000 /* SIA MUX Select External SIA Mode */ #define SICR_D_SIA 0x00000400 /* SIA MUX Select Diagnostics - SIA Sigs */ #define SICR_DPLL 0x00000800 /* SIA MUX Select Diagnostics - DPLL Sigs*/ #define SICR_APLL 0x00000a00 /* SIA MUX Select Diagnostics - DPLL Sigs*/ #define SICR_D_RxM 0x00000c00 /* SIA MUX Select Diagnostics - RxM Sigs */ #define SICR_M_RxM 0x00000d00 /* SIA MUX Select Diagnostics - RxM Sigs */ #define SICR_LNKT 0x00000e00 /* SIA MUX Select Diagnostics - Link Test*/ #define SICR_SEL 0x00000f00 /* SIA MUX Select AUI or TP with LEDs */ #define SICR_ASE 0x00000080 /* APLL Start Enable*/ #define SICR_SIM 0x00000040 /* Serial Interface Input Multiplexer */ #define SICR_ENI 0x00000020 /* Encoder Input Multiplexer */ #define SICR_EDP 0x00000010 /* SIA PLL External Input Enable */ #define SICR_AUI 0x00000008 /* 10Base-T (0) or AUI (1) */ #define SICR_CAC 0x00000004 /* CSR Auto Configuration */ #define SICR_PS 0x00000002 /* Pin AUI/TP Selection */ #define SICR_SRL 0x00000001 /* SIA Reset */ #define SIA_RESET 0x00000000 /* SIA Reset Value */ /* ** SIA Transmit and Receive Register (DE4X5_STRR) */ #define STRR_TAS 0x00008000 /* 10Base-T/AUI Autosensing Enable */ #define STRR_SPP 0x00004000 /* Set Polarity Plus */ #define STRR_APE 0x00002000 /* Auto Polarity Enable */ #define STRR_LTE 0x00001000 /* Link Test Enable */ #define STRR_SQE 0x00000800 /* Signal Quality Enable */ #define STRR_CLD 0x00000400 /* Collision Detect Enable */ #define STRR_CSQ 0x00000200 /* Collision Squelch Enable */ #define STRR_RSQ 0x00000100 /* Receive Squelch Enable */ #define STRR_ANE 0x00000080 /* Auto Negotiate Enable */ #define STRR_HDE 0x00000040 /* Half Duplex Enable */ #define STRR_CPEN 0x00000030 /* Compensation Enable */ #define STRR_LSE 0x00000008 /* Link Pulse Send Enable */ #define STRR_DREN 0x00000004 /* Driver Enable */ #define STRR_LBK 0x00000002 /* Loopback Enable */ #define STRR_ECEN 0x00000001 /* Encoder Enable */ #define STRR_RESET 0xffffffff /* Reset value for STRR */ /* ** SIA General Register (DE4X5_SIGR) */ #define SIGR_RMI 0x40000000 /* Receive Match Interrupt */ #define SIGR_GI1 0x20000000 /* General Port Interrupt 1 */ #define SIGR_GI0 0x10000000 /* General Port Interrupt 0 */ #define SIGR_CWE 0x08000000 /* Control Write Enable */ #define SIGR_RME 0x04000000 /* Receive Match Enable */ #define SIGR_GEI1 0x02000000 /* GEP Interrupt Enable on Port 1 */ #define SIGR_GEI0 0x01000000 /* GEP Interrupt Enable on Port 0 */ #define SIGR_LGS3 0x00800000 /* LED/GEP3 Select */ #define SIGR_LGS2 0x00400000 /* LED/GEP2 Select */ #define SIGR_LGS1 0x00200000 /* LED/GEP1 Select */ #define SIGR_LGS0 0x00100000 /* LED/GEP0 Select */ #define SIGR_MD 0x000f0000 /* General Purpose Mode and Data */ #define SIGR_LV2 0x00008000 /* General Purpose LED2 value */ #define SIGR_LE2 0x00004000 /* General Purpose LED2 enable */ #define SIGR_FRL 0x00002000 /* Force Receiver Low */ #define SIGR_DPST 0x00001000 /* PLL Self Test Start */ #define SIGR_LSD 0x00000800 /* LED Stretch Disable */ #define SIGR_FLF 0x00000400 /* Force Link Fail */ #define SIGR_FUSQ 0x00000200 /* Force Unsquelch */ #define SIGR_TSCK 0x00000100 /* Test Clock */ #define SIGR_LV1 0x00000080 /* General Purpose LED1 value */ #define SIGR_LE1 0x00000040 /* General Purpose LED1 enable */ #define SIGR_RWR 0x00000020 /* Receive Watchdog Release */ #define SIGR_RWD 0x00000010 /* Receive Watchdog Disable */ #define SIGR_ABM 0x00000008 /* BNC: 0, AUI:1 */ #define SIGR_JCK 0x00000004 /* Jabber Clock */ #define SIGR_HUJ 0x00000002 /* Host Unjab */ #define SIGR_JBD 0x00000001 /* Jabber Disable */ #define SIGR_RESET 0xffff0000 /* Reset value for SIGR */ /* ** Receive Descriptor Bit Summary */ #define R_OWN 0x80000000 /* Own Bit */ #define RD_FF 0x40000000 /* Filtering Fail */ #define RD_FL 0x3fff0000 /* Frame Length */ #define RD_ES 0x00008000 /* Error Summary */ #define RD_LE 0x00004000 /* Length Error */ #define RD_DT 0x00003000 /* Data Type */ #define RD_RF 0x00000800 /* Runt Frame */ #define RD_MF 0x00000400 /* Multicast Frame */ #define RD_FS 0x00000200 /* First Descriptor */ #define RD_LS 0x00000100 /* Last Descriptor */ #define RD_TL 0x00000080 /* Frame Too Long */ #define RD_CS 0x00000040 /* Collision Seen */ #define RD_FT 0x00000020 /* Frame Type */ #define RD_RJ 0x00000010 /* Receive Watchdog */ #define RD_RE 0x00000008 /* Report on MII Error */ #define RD_DB 0x00000004 /* Dribbling Bit */ #define RD_CE 0x00000002 /* CRC Error */ #define RD_OF 0x00000001 /* Overflow */ #define RD_RER 0x02000000 /* Receive End Of Ring */ #define RD_RCH 0x01000000 /* Second Address Chained */ #define RD_RBS2 0x003ff800 /* Buffer 2 Size */ #define RD_RBS1 0x000007ff /* Buffer 1 Size */ /* ** Transmit Descriptor Bit Summary */ #define T_OWN 0x80000000 /* Own Bit */ #define TD_ES 0x00008000 /* Error Summary */ #define TD_TO 0x00004000 /* Transmit Jabber Time-Out */ #define TD_LO 0x00000800 /* Loss Of Carrier */ #define TD_NC 0x00000400 /* No Carrier */ #define TD_LC 0x00000200 /* Late Collision */ #define TD_EC 0x00000100 /* Excessive Collisions */ #define TD_HF 0x00000080 /* Heartbeat Fail */ #define TD_CC 0x00000078 /* Collision Counter */ #define TD_LF 0x00000004 /* Link Fail */ #define TD_UF 0x00000002 /* Underflow Error */ #define TD_DE 0x00000001 /* Deferred */ #define TD_IC 0x80000000 /* Interrupt On Completion */ #define TD_LS 0x40000000 /* Last Segment */ #define TD_FS 0x20000000 /* First Segment */ #define TD_FT1 0x10000000 /* Filtering Type */ #define TD_SET 0x08000000 /* Setup Packet */ #define TD_AC 0x04000000 /* Add CRC Disable */ #define TD_TER 0x02000000 /* Transmit End Of Ring */ #define TD_TCH 0x01000000 /* Second Address Chained */ #define TD_DPD 0x00800000 /* Disabled Padding */ #define TD_FT0 0x00400000 /* Filtering Type */ #define TD_TBS2 0x003ff800 /* Buffer 2 Size */ #define TD_TBS1 0x000007ff /* Buffer 1 Size */ #define PERFECT_F 0x00000000 #define HASH_F TD_FT0 #define INVERSE_F TD_FT1 #define HASH_O_F (TD_FT1 | TD_F0) /* ** Media / mode state machine definitions ** User selectable: */ #define TP 0x0040 /* 10Base-T (now equiv to _10Mb) */ #define TP_NW 0x0002 /* 10Base-T with Nway */ #define BNC 0x0004 /* Thinwire */ #define AUI 0x0008 /* Thickwire */ #define BNC_AUI 0x0010 /* BNC/AUI on DC21040 indistinguishable */ #define _10Mb 0x0040 /* 10Mb/s Ethernet */ #define _100Mb 0x0080 /* 100Mb/s Ethernet */ #define AUTO 0x4000 /* Auto sense the media or speed */ /* ** Internal states */ #define NC 0x0000 /* No Connection */ #define ANS 0x0020 /* Intermediate AutoNegotiation State */ #define SPD_DET 0x0100 /* Parallel speed detection */ #define INIT 0x0200 /* Initial state */ #define EXT_SIA 0x0400 /* External SIA for motherboard chip */ #define ANS_SUSPECT 0x0802 /* Suspect the ANS (TP) port is down */ #define TP_SUSPECT 0x0803 /* Suspect the TP port is down */ #define BNC_AUI_SUSPECT 0x0804 /* Suspect the BNC or AUI port is down */ #define EXT_SIA_SUSPECT 0x0805 /* Suspect the EXT SIA port is down */ #define BNC_SUSPECT 0x0806 /* Suspect the BNC port is down */ #define AUI_SUSPECT 0x0807 /* Suspect the AUI port is down */ #define MII 0x1000 /* MII on the 21143 */ #define TIMER_CB 0x80000000 /* Timer callback detection */ /* ** DE4X5 DEBUG Options */ #define DEBUG_NONE 0x0000 /* No DEBUG messages */ #define DEBUG_VERSION 0x0001 /* Print version message */ #define DEBUG_MEDIA 0x0002 /* Print media messages */ #define DEBUG_TX 0x0004 /* Print TX (queue_pkt) messages */ #define DEBUG_RX 0x0008 /* Print RX (de4x5_rx) messages */ #define DEBUG_SROM 0x0010 /* Print SROM messages */ #define DEBUG_MII 0x0020 /* Print MII messages */ #define DEBUG_OPEN 0x0040 /* Print de4x5_open() messages */ #define DEBUG_CLOSE 0x0080 /* Print de4x5_close() messages */ #define DEBUG_PCICFG 0x0100 #define DEBUG_ALL 0x01ff /* ** Miscellaneous */ #define PCI 0 #define EISA 1 #define DE4X5_HASH_TABLE_LEN 512 /* Bits */ #define DE4X5_HASH_BITS 0x01ff /* 9 LS bits */ #define SETUP_FRAME_LEN 192 /* Bytes */ #define IMPERF_PA_OFFSET 156 /* Bytes */ #define POLL_DEMAND 1 #define LOST_MEDIA_THRESHOLD 3 #define MASK_INTERRUPTS 1 #define UNMASK_INTERRUPTS 0 #define DE4X5_STRLEN 8 #define DE4X5_INIT 0 /* Initialisation time */ #define DE4X5_RUN 1 /* Run time */ #define DE4X5_SAVE_STATE 0 #define DE4X5_RESTORE_STATE 1 /* ** Address Filtering Modes */ #define PERFECT 0 /* 16 perfect physical addresses */ #define HASH_PERF 1 /* 1 perfect, 512 multicast addresses */ #define PERFECT_REJ 2 /* Reject 16 perfect physical addresses */ #define ALL_HASH 3 /* Hashes all physical & multicast addrs */ #define ALL 0 /* Clear out all the setup frame */ #define PHYS_ADDR_ONLY 1 /* Update the physical address only */ /* ** Adapter state */ #define INITIALISED 0 /* After h/w initialised and mem alloc'd */ #define CLOSED 1 /* Ready for opening */ #define OPEN 2 /* Running */ /* ** Various wait times */ #define PDET_LINK_WAIT 1200 /* msecs to wait for link detect bits */ #define ANS_FINISH_WAIT 1000 /* msecs to wait for link detect bits */ /* ** IEEE OUIs for various PHY vendor/chip combos - Reg 2 values only. Since ** the vendors seem split 50-50 on how to calculate the OUI register values ** anyway, just reading Reg2 seems reasonable for now [see de4x5_get_oui()]. */ #define NATIONAL_TX 0x2000 #define BROADCOM_T4 0x03e0 #define SEEQ_T4 0x0016 #define CYPRESS_T4 0x0014 /* ** Speed Selection stuff */ #define SET_10Mb {\ if ((lp->phy[lp->active].id) && (!lp->useSROM || lp->useMII)) {\ omr = inl(DE4X5_OMR) & ~(OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX);\ if ((lp->tmp != MII_SR_ASSC) || (lp->autosense != AUTO)) {\ mii_wr(MII_CR_10|(lp->fdx?MII_CR_FDM:0), MII_CR, lp->phy[lp->active].addr, DE4X5_MII);\ }\ omr |= ((lp->fdx ? OMR_FDX : 0) | OMR_TTM);\ outl(omr, DE4X5_OMR);\ if (!lp->useSROM) lp->cache.gep = 0;\ } else if (lp->useSROM && !lp->useMII) {\ omr = (inl(DE4X5_OMR) & ~(OMR_PS | OMR_HBD | OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ omr |= (lp->fdx ? OMR_FDX : 0);\ outl(omr | (lp->infoblock_csr6 & ~(OMR_SCR | OMR_HBD)), DE4X5_OMR);\ } else {\ omr = (inl(DE4X5_OMR) & ~(OMR_PS | OMR_HBD | OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ omr |= (lp->fdx ? OMR_FDX : 0);\ outl(omr | OMR_SDP | OMR_TTM, DE4X5_OMR);\ lp->cache.gep = (lp->fdx ? 0 : GEP_FDXD);\ gep_wr(lp->cache.gep, dev);\ }\ } #define SET_100Mb {\ if ((lp->phy[lp->active].id) && (!lp->useSROM || lp->useMII)) {\ int fdx=0;\ if (lp->phy[lp->active].id == NATIONAL_TX) {\ mii_wr(mii_rd(0x18, lp->phy[lp->active].addr, DE4X5_MII) & ~0x2000,\ 0x18, lp->phy[lp->active].addr, DE4X5_MII);\ }\ omr = inl(DE4X5_OMR) & ~(OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX);\ sr = mii_rd(MII_SR, lp->phy[lp->active].addr, DE4X5_MII);\ if (!(sr & MII_ANA_T4AM) && lp->fdx) fdx=1;\ if ((lp->tmp != MII_SR_ASSC) || (lp->autosense != AUTO)) {\ mii_wr(MII_CR_100|(fdx?MII_CR_FDM:0), MII_CR, lp->phy[lp->active].addr, DE4X5_MII);\ }\ if (fdx) omr |= OMR_FDX;\ outl(omr, DE4X5_OMR);\ if (!lp->useSROM) lp->cache.gep = 0;\ } else if (lp->useSROM && !lp->useMII) {\ omr = (inl(DE4X5_OMR) & ~(OMR_PS | OMR_HBD | OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ omr |= (lp->fdx ? OMR_FDX : 0);\ outl(omr | lp->infoblock_csr6, DE4X5_OMR);\ } else {\ omr = (inl(DE4X5_OMR) & ~(OMR_PS | OMR_HBD | OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ omr |= (lp->fdx ? OMR_FDX : 0);\ outl(omr | OMR_SDP | OMR_PS | OMR_HBD | OMR_PCS | OMR_SCR, DE4X5_OMR);\ lp->cache.gep = (lp->fdx ? 0 : GEP_FDXD) | GEP_MODE;\ gep_wr(lp->cache.gep, dev);\ }\ } /* FIX ME so I don't jam 10Mb networks */ #define SET_100Mb_PDET {\ if ((lp->phy[lp->active].id) && (!lp->useSROM || lp->useMII)) {\ mii_wr(MII_CR_100|MII_CR_ASSE, MII_CR, lp->phy[lp->active].addr, DE4X5_MII);\ omr = (inl(DE4X5_OMR) & ~(OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ outl(omr, DE4X5_OMR);\ } else if (lp->useSROM && !lp->useMII) {\ omr = (inl(DE4X5_OMR) & ~(OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ outl(omr, DE4X5_OMR);\ } else {\ omr = (inl(DE4X5_OMR) & ~(OMR_PS | OMR_HBD | OMR_TTM | OMR_PCS | OMR_SCR | OMR_FDX));\ outl(omr | OMR_SDP | OMR_PS | OMR_HBD | OMR_PCS, DE4X5_OMR);\ lp->cache.gep = (GEP_FDXD | GEP_MODE);\ gep_wr(lp->cache.gep, dev);\ }\ } /* ** Include the IOCTL stuff */ #include <linux/sockios.h> struct de4x5_ioctl { unsigned short cmd; /* Command to run */ unsigned short len; /* Length of the data buffer */ unsigned char __user *data; /* Pointer to the data buffer */ }; /* ** Recognised commands for the driver */ #define DE4X5_GET_HWADDR 0x01 /* Get the hardware address */ #define DE4X5_SET_HWADDR 0x02 /* Set the hardware address */ /* 0x03 and 0x04 were used before and are obsoleted now. Don't use them. */ #define DE4X5_SAY_BOO 0x05 /* Say "Boo!" to the kernel log file */ #define DE4X5_GET_MCA 0x06 /* Get a multicast address */ #define DE4X5_SET_MCA 0x07 /* Set a multicast address */ #define DE4X5_CLR_MCA 0x08 /* Clear a multicast address */ #define DE4X5_MCA_EN 0x09 /* Enable a multicast address group */ #define DE4X5_GET_STATS 0x0a /* Get the driver statistics */ #define DE4X5_CLR_STATS 0x0b /* Zero out the driver statistics */ #define DE4X5_GET_OMR 0x0c /* Get the OMR Register contents */ #define DE4X5_SET_OMR 0x0d /* Set the OMR Register contents */ #define DE4X5_GET_REG 0x0e /* Get the DE4X5 Registers */ #define MOTO_SROM_BUG (lp->active == 8 && (get_unaligned_le32(dev->dev_addr) & 0x00ffffff) == 0x3e0008)
{ "pile_set_name": "Github" }
import {$} from 'protractor'; import {rightClick} from '../../tools.po'; export class PopoverAutoClosePage { async clickOutside() { await $('#outside-button').click(); } getOpenStatus() { return $('#open-status'); } getPopover() { return $('ngb-popover-window'); } getPopoverContent() { return this.getPopover().$('div.popover-body'); } async openPopover() { await $('button[ngbPopover]').click(); expect(await this.getPopover().isPresent()).toBeTruthy(`Popover should be visible`); } async rightClickOutside() { await rightClick($('#outside-button')); } async selectAutoClose(type: string) { await $('#autoclose-dropdown').click(); await $(`#autoclose-${type}`).click(); } }
{ "pile_set_name": "Github" }
$NetBSD: distinfo,v 1.25 2020/05/14 19:21:04 joerg Exp $ SHA1 (uhd-3.15.0.0.tar.gz) = 17286a52a591038af16e870962c1767c5935ec2f RMD160 (uhd-3.15.0.0.tar.gz) = b05282c4b3f25bd201e9b91b3652fba9b150c7ae SHA512 (uhd-3.15.0.0.tar.gz) = ef62639d956adb5b4815b8e57bf4d6e62519bcaf509ce87af4f6091b79b4bb78c2bc079ec6f678777c24a146d5d5e1c8f225ad7ac5dba46bec6f8e1cc20309b0 Size (uhd-3.15.0.0.tar.gz) = 12895934 bytes SHA1 (patch-CMakeLists.txt) = 5691ddd6a98f92e649a184cfc092a4eb721c6fe7 SHA1 (patch-lib_rfnoc_dma__fifo__block__ctrl__impl.cpp) = 0dcc0ddc231f3f1c2a6a3a57ef88df5467170fc6 SHA1 (patch-lib_transport_nirio_lvbitx_process-lvbitx.py) = 350af0f9ed999f64105a42195d2df037faef87c7 SHA1 (patch-lib_usrp__clock_octoclock_octoclock__impl.cpp) = b77c91c7a202e3c93707dba67bedc9704f77ab56 SHA1 (patch-lib_usrp_b100_b100__impl.cpp) = 9a53e41b45fdebaf0b56e3030f1c8f9790185046 SHA1 (patch-lib_usrp_cores_rx__dsp__core__3000.cpp) = d63fd1ab1d48da7e9eb8a7deb33a3803fb6c219d SHA1 (patch-lib_usrp_cores_tx__dsp__core__3000.cpp) = 3812d190f830128046ab39b21ba63b6775f8c764 SHA1 (patch-lib_usrp_dboard_db__cbx.cpp) = 3b8864f060acaeb262c91433f0dc0fd229f8c458 SHA1 (patch-lib_usrp_dboard_db__dbsrx.cpp) = 55de5be387fbe59015c4e1c0861313740b9b3088 SHA1 (patch-lib_usrp_dboard_db__dbsrx2.cpp) = a7b905d154ecfcfa72df8561b6ae98cd3f97fb08 SHA1 (patch-lib_usrp_dboard_db__rfx.cpp) = c01c17ee5ba2ad877c1bdf4c940af529adeab8d6 SHA1 (patch-lib_usrp_dboard_db__sbx__common.cpp) = 35345e22a714b67a35055f303aaa074ff3b637fa SHA1 (patch-lib_usrp_dboard_db__sbx__version3.cpp) = 8e243093fae46256dde8f8c531715dd2d88f2c9d SHA1 (patch-lib_usrp_dboard_db__sbx__version4.cpp) = 8ed4d05a98438a852f352c6db003d20b5886bd49 SHA1 (patch-lib_usrp_dboard_db__tvrx.cpp) = 6b94d0a89558198340b75cd2d9b77c1fe0f04c5d SHA1 (patch-lib_usrp_dboard_db__tvrx2.cpp) = 5d3618a80d5ece89c2a4488b12613ca824ba7a19 SHA1 (patch-lib_usrp_dboard_db__ubx.cpp) = c8bdfbe6b4494fc50f6b8229a5ee796cd9d85af5 SHA1 (patch-lib_usrp_dboard_db__wbx__common.cpp) = 190619aaf7a93314e4d13cac28497b9af0011bf4 SHA1 (patch-lib_usrp_dboard_db__wbx__simple.cpp) = e889a9fd77f610bf672b0e95ab6ab9178c14d7da SHA1 (patch-lib_usrp_dboard_db__wbx__version2.cpp) = 4285b334b32e246d54c53715da7a07f126ef6533 SHA1 (patch-lib_usrp_dboard_db__wbx__version3.cpp) = e1d69f09f9f134a0c5aa2bbf772d8dd995cda681 SHA1 (patch-lib_usrp_dboard_db__wbx__version4.cpp) = dbbe24a2a755031c06ff74045e2502d2ee646f59 SHA1 (patch-lib_usrp_dboard_db__xcvr2450.cpp) = 90d2e4bfe072e84e20c35c256af036da696425c1 SHA1 (patch-lib_usrp_dboard_eiscat_eiscat__radio__ctrl__impl.cpp) = 54e9e06fd37dd7cc5a9767519148266dd915f4a0 SHA1 (patch-lib_usrp_multi__usrp.cpp) = ff711af0232cbe92dfe822411ab228b35fa63389 SHA1 (patch-lib_usrp_x300_x300__radio__ctrl__impl.cpp) = 2f0c10f2443b6d2f361ee8a9f339b39c9498d2bb
{ "pile_set_name": "Github" }
stderr of test 'portals` in directory 'sql/test/pg_regress` itself: # 17:11:18 > # 17:11:18 > "mserver5" "--debug=10" "--set" "gdk_nr_threads=0" "--set" "mapi_open=true" "--set" "mapi_port=38959" "--set" "mapi_usock=/var/tmp/mtest-1142/.s.monetdb.38959" "--set" "monet_prompt=" "--forcemito" "--set" "mal_listing=2" "--dbpath=/ufs/dinther/INSTALL/var/MonetDB/mTests_sql_test_pg_regress" "--set" "mal_listing=0" # 17:11:18 > # builtin opt gdk_dbpath = /ufs/dinther/INSTALL/var/monetdb5/dbfarm/demo # builtin opt gdk_debug = 0 # builtin opt gdk_vmtrim = no # builtin opt monet_prompt = > # builtin opt monet_daemon = no # builtin opt mapi_port = 50000 # builtin opt mapi_open = false # builtin opt mapi_autosense = false # builtin opt sql_optimizer = default_pipe # builtin opt sql_debug = 0 # cmdline opt gdk_nr_threads = 0 # cmdline opt mapi_open = true # cmdline opt mapi_port = 38959 # cmdline opt mapi_usock = /var/tmp/mtest-1142/.s.monetdb.38959 # cmdline opt monet_prompt = # cmdline opt mal_listing = 2 # cmdline opt gdk_dbpath = /ufs/dinther/INSTALL/var/MonetDB/mTests_sql_test_pg_regress # cmdline opt mal_listing = 0 # cmdline opt gdk_debug = 536870922 # 17:11:19 > # 17:11:19 > "mclient" "-lsql" "-ftest" "-Eutf-8" "-i" "-e" "--host=/var/tmp/mtest-1142" "--port=38959" # 17:11:19 > #-- #-- Cursor regression tests #-- #-- leave some cursors open, to test that auto-close works. #-- #-- NO SCROLL disallows backward fetching #-- #-- #-- Cursors outside transaction blocks #-- #-- #-- ROLLBACK should close holdable cursors #-- #-- should fail #-- #-- Parameterized DECLARE needs to insert param values into the cursor portal #-- # 17:11:19 > # 17:11:19 > "Done." # 17:11:19 >
{ "pile_set_name": "Github" }
<!-- This file is excerpted from "XML Query Use Cases" (http://www.w3.org/TR/xquery-use-cases/) Copyright (c) 2007 W3C (R) (MIT, ERCIM, Keio). All Rights Reserved. W3C liability, trademark and document use rules apply. The Status section of "XML Query Use Cases" states as follows: Status of this Document This section describes the status of this document at the time of its publication. Other documents may supersede this document. A list of current W3C publications and the latest revision of this technical report can be found in the W3C technical reports index at http://www.w3.org/TR/. This is the W3C Working Group Note of "XML Query (XQuery) Use Cases", produced by the W3C XML Query Working Group, part of the XML Activity. This document is being published as a Working Group Note to persistently record the Use Cases that guided the development of XQuery 1.0: An XML Query Language and its associated specifications as W3C Recommendations. Please submit comments about this document using W3C's public Bugzilla system (instructions can be found at http://www.w3.org/XML/2005/04/qt-bugzilla). If access to that system is not feasible, you may send your comments to the W3C XSLT/XPath/XQuery public comments mailing list, [email protected]. It will be very helpful if you include the string [XQRecUseCases] in the subject line of your report, whether made in Bugzilla or in email. Each Bugzilla entry and email message should contain only one comment. Archives of the comments and responses are available at http://lists.w3.org/Archives/Public/public-qt-comments/. Publication as a Working Group Note does not imply endorsement by the W3C Membership. At the time of publication, work on this document was considered complete and no further revisions are anticipated. It is a stable document and may be used as reference material or cited from another document. However, this document may be updated, replaced, or made obsolete by other documents at any time. This document was produced by a group operating under the 5 February 2004 W3C Patent Policy. W3C maintains a public list of any patent disclosures made in connection with the deliverables of the group; that page also includes instructions for disclosing a patent. An individual who has actual knowledge of a patent which the individual believes contains Essential Claim(s) must disclose the information in accordance with section 6 of the W3C Patent Policy . --> <prices> <book> <title>Advanced Programming in the Unix environment</title> <source>bstore2.example.com</source> <price>65.95</price> </book> <book> <title>Advanced Programming in the Unix environment</title> <source>bstore1.example.com</source> <price>65.95</price> </book> <book> <title>TCP/IP Illustrated</title> <source>bstore2.example.com</source> <price>65.95</price> </book> <book> <title>TCP/IP Illustrated</title> <source>bstore1.example.com</source> <price>65.95</price> </book> <book> <title>Data on the Web</title> <source>bstore2.example.com</source> <price>34.95</price> </book> <book> <title>Data on the Web</title> <source>bstore1.example.com</source> <price>39.95</price> </book> </prices>
{ "pile_set_name": "Github" }
#============================================================================= # Copyright 2004-2011 Kitware, Inc. # # Distributed under the OSI-approved BSD License (the "License"); # see accompanying file Copyright.txt for details. # # This software is distributed WITHOUT ANY WARRANTY; without even the # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the License for more information. #============================================================================= # (To distribute this file outside of CMake, substitute the full # License text for the above reference.) # This file sets the basic flags for the C++ language in CMake. # It also loads the available platform file for the system-compiler # if it exists. # It also loads a system - compiler - processor (or target hardware) # specific file, which is mainly useful for crosscompiling and embedded systems. # some compilers use different extensions (e.g. sdcc uses .rel) # so set the extension here first so it can be overridden by the compiler specific file IF(UNIX) SET(CMAKE_CXX_OUTPUT_EXTENSION .o) ELSE(UNIX) SET(CMAKE_CXX_OUTPUT_EXTENSION .obj) ENDIF(UNIX) SET(_INCLUDED_FILE 0) # Load compiler-specific information. IF(CMAKE_CXX_COMPILER_ID) INCLUDE(Compiler/${CMAKE_CXX_COMPILER_ID}-CXX OPTIONAL) ENDIF(CMAKE_CXX_COMPILER_ID) SET(CMAKE_BASE_NAME) GET_FILENAME_COMPONENT(CMAKE_BASE_NAME ${CMAKE_CXX_COMPILER} NAME_WE) # since the gnu compiler has several names force g++ IF(CMAKE_COMPILER_IS_GNUCXX) SET(CMAKE_BASE_NAME g++) ENDIF(CMAKE_COMPILER_IS_GNUCXX) # load a hardware specific file, mostly useful for embedded compilers IF(CMAKE_SYSTEM_PROCESSOR) IF(CMAKE_CXX_COMPILER_ID) INCLUDE(Platform/${CMAKE_SYSTEM_NAME}-${CMAKE_CXX_COMPILER_ID}-CXX-${CMAKE_SYSTEM_PROCESSOR} OPTIONAL RESULT_VARIABLE _INCLUDED_FILE) ENDIF(CMAKE_CXX_COMPILER_ID) IF (NOT _INCLUDED_FILE) INCLUDE(Platform/${CMAKE_SYSTEM_NAME}-${CMAKE_BASE_NAME}-${CMAKE_SYSTEM_PROCESSOR} OPTIONAL) ENDIF (NOT _INCLUDED_FILE) ENDIF(CMAKE_SYSTEM_PROCESSOR) # load the system- and compiler specific files IF(CMAKE_CXX_COMPILER_ID) INCLUDE(Platform/${CMAKE_SYSTEM_NAME}-${CMAKE_CXX_COMPILER_ID}-CXX OPTIONAL RESULT_VARIABLE _INCLUDED_FILE) ENDIF(CMAKE_CXX_COMPILER_ID) IF (NOT _INCLUDED_FILE) INCLUDE(Platform/${CMAKE_SYSTEM_NAME}-${CMAKE_BASE_NAME} OPTIONAL RESULT_VARIABLE _INCLUDED_FILE) ENDIF (NOT _INCLUDED_FILE) # We specify the compiler information in the system file for some # platforms, but this language may not have been enabled when the file # was first included. Include it again to get the language info. # Remove this when all compiler info is removed from system files. IF (NOT _INCLUDED_FILE) INCLUDE(Platform/${CMAKE_SYSTEM_NAME} OPTIONAL) ENDIF (NOT _INCLUDED_FILE) IF(CMAKE_CXX_SIZEOF_DATA_PTR) FOREACH(f ${CMAKE_CXX_ABI_FILES}) INCLUDE(${f}) ENDFOREACH() UNSET(CMAKE_CXX_ABI_FILES) ENDIF() # This should be included before the _INIT variables are # used to initialize the cache. Since the rule variables # have if blocks on them, users can still define them here. # But, it should still be after the platform file so changes can # be made to those values. IF(CMAKE_USER_MAKE_RULES_OVERRIDE) # Save the full path of the file so try_compile can use it. INCLUDE(${CMAKE_USER_MAKE_RULES_OVERRIDE} RESULT_VARIABLE _override) SET(CMAKE_USER_MAKE_RULES_OVERRIDE "${_override}") ENDIF() IF(CMAKE_USER_MAKE_RULES_OVERRIDE_CXX) # Save the full path of the file so try_compile can use it. INCLUDE(${CMAKE_USER_MAKE_RULES_OVERRIDE_CXX} RESULT_VARIABLE _override) SET(CMAKE_USER_MAKE_RULES_OVERRIDE_CXX "${_override}") ENDIF() # for most systems a module is the same as a shared library # so unless the variable CMAKE_MODULE_EXISTS is set just # copy the values from the LIBRARY variables IF(NOT CMAKE_MODULE_EXISTS) SET(CMAKE_SHARED_MODULE_CXX_FLAGS ${CMAKE_SHARED_LIBRARY_CXX_FLAGS}) ENDIF(NOT CMAKE_MODULE_EXISTS) # Create a set of shared library variable specific to C++ # For 90% of the systems, these are the same flags as the C versions # so if these are not set just copy the flags from the c version IF(NOT CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS) SET(CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS ${CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS}) ENDIF(NOT CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS) IF(NOT CMAKE_SHARED_LIBRARY_CXX_FLAGS) SET(CMAKE_SHARED_LIBRARY_CXX_FLAGS ${CMAKE_SHARED_LIBRARY_C_FLAGS}) ENDIF(NOT CMAKE_SHARED_LIBRARY_CXX_FLAGS) IF(NOT DEFINED CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS) SET(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS ${CMAKE_SHARED_LIBRARY_LINK_C_FLAGS}) ENDIF(NOT DEFINED CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS) IF(NOT CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG) SET(CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG ${CMAKE_SHARED_LIBRARY_RUNTIME_C_FLAG}) ENDIF(NOT CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG) IF(NOT CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG_SEP) SET(CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG_SEP ${CMAKE_SHARED_LIBRARY_RUNTIME_C_FLAG_SEP}) ENDIF(NOT CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG_SEP) IF(NOT CMAKE_SHARED_LIBRARY_RPATH_LINK_CXX_FLAG) SET(CMAKE_SHARED_LIBRARY_RPATH_LINK_CXX_FLAG ${CMAKE_SHARED_LIBRARY_RPATH_LINK_C_FLAG}) ENDIF(NOT CMAKE_SHARED_LIBRARY_RPATH_LINK_CXX_FLAG) IF(NOT DEFINED CMAKE_EXE_EXPORTS_CXX_FLAG) SET(CMAKE_EXE_EXPORTS_CXX_FLAG ${CMAKE_EXE_EXPORTS_C_FLAG}) ENDIF() IF(NOT DEFINED CMAKE_SHARED_LIBRARY_SONAME_CXX_FLAG) SET(CMAKE_SHARED_LIBRARY_SONAME_CXX_FLAG ${CMAKE_SHARED_LIBRARY_SONAME_C_FLAG}) ENDIF() IF(NOT CMAKE_EXECUTABLE_RUNTIME_CXX_FLAG) SET(CMAKE_EXECUTABLE_RUNTIME_CXX_FLAG ${CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG}) ENDIF(NOT CMAKE_EXECUTABLE_RUNTIME_CXX_FLAG) IF(NOT CMAKE_EXECUTABLE_RUNTIME_CXX_FLAG_SEP) SET(CMAKE_EXECUTABLE_RUNTIME_CXX_FLAG_SEP ${CMAKE_SHARED_LIBRARY_RUNTIME_CXX_FLAG_SEP}) ENDIF(NOT CMAKE_EXECUTABLE_RUNTIME_CXX_FLAG_SEP) IF(NOT CMAKE_EXECUTABLE_RPATH_LINK_CXX_FLAG) SET(CMAKE_EXECUTABLE_RPATH_LINK_CXX_FLAG ${CMAKE_SHARED_LIBRARY_RPATH_LINK_CXX_FLAG}) ENDIF(NOT CMAKE_EXECUTABLE_RPATH_LINK_CXX_FLAG) IF(NOT DEFINED CMAKE_SHARED_LIBRARY_LINK_CXX_WITH_RUNTIME_PATH) SET(CMAKE_SHARED_LIBRARY_LINK_CXX_WITH_RUNTIME_PATH ${CMAKE_SHARED_LIBRARY_LINK_C_WITH_RUNTIME_PATH}) ENDIF(NOT DEFINED CMAKE_SHARED_LIBRARY_LINK_CXX_WITH_RUNTIME_PATH) IF(NOT CMAKE_INCLUDE_FLAG_CXX) SET(CMAKE_INCLUDE_FLAG_CXX ${CMAKE_INCLUDE_FLAG_C}) ENDIF(NOT CMAKE_INCLUDE_FLAG_CXX) IF(NOT CMAKE_INCLUDE_FLAG_SEP_CXX) SET(CMAKE_INCLUDE_FLAG_SEP_CXX ${CMAKE_INCLUDE_FLAG_SEP_C}) ENDIF(NOT CMAKE_INCLUDE_FLAG_SEP_CXX) # repeat for modules IF(NOT CMAKE_SHARED_MODULE_CREATE_CXX_FLAGS) SET(CMAKE_SHARED_MODULE_CREATE_CXX_FLAGS ${CMAKE_SHARED_MODULE_CREATE_C_FLAGS}) ENDIF(NOT CMAKE_SHARED_MODULE_CREATE_CXX_FLAGS) IF(NOT CMAKE_SHARED_MODULE_CXX_FLAGS) SET(CMAKE_SHARED_MODULE_CXX_FLAGS ${CMAKE_SHARED_MODULE_C_FLAGS}) ENDIF(NOT CMAKE_SHARED_MODULE_CXX_FLAGS) # Initialize CXX link type selection flags from C versions. FOREACH(type SHARED_LIBRARY SHARED_MODULE EXE) IF(NOT CMAKE_${type}_LINK_STATIC_CXX_FLAGS) SET(CMAKE_${type}_LINK_STATIC_CXX_FLAGS ${CMAKE_${type}_LINK_STATIC_C_FLAGS}) ENDIF(NOT CMAKE_${type}_LINK_STATIC_CXX_FLAGS) IF(NOT CMAKE_${type}_LINK_DYNAMIC_CXX_FLAGS) SET(CMAKE_${type}_LINK_DYNAMIC_CXX_FLAGS ${CMAKE_${type}_LINK_DYNAMIC_C_FLAGS}) ENDIF(NOT CMAKE_${type}_LINK_DYNAMIC_CXX_FLAGS) ENDFOREACH(type) # add the flags to the cache based # on the initial values computed in the platform/*.cmake files # use _INIT variables so that this only happens the first time # and you can set these flags in the cmake cache SET(CMAKE_CXX_FLAGS_INIT "$ENV{CXXFLAGS} ${CMAKE_CXX_FLAGS_INIT}") # avoid just having a space as the initial value for the cache IF(CMAKE_CXX_FLAGS_INIT STREQUAL " ") SET(CMAKE_CXX_FLAGS_INIT) ENDIF(CMAKE_CXX_FLAGS_INIT STREQUAL " ") SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_INIT}" CACHE STRING "Flags used by the compiler during all build types.") IF(NOT CMAKE_NOT_USING_CONFIG_FLAGS) SET (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG_INIT}" CACHE STRING "Flags used by the compiler during debug builds.") SET (CMAKE_CXX_FLAGS_MINSIZEREL "${CMAKE_CXX_FLAGS_MINSIZEREL_INIT}" CACHE STRING "Flags used by the compiler during release minsize builds.") SET (CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE_INIT}" CACHE STRING "Flags used by the compiler during release builds (/MD /Ob1 /Oi /Ot /Oy /Gs will produce slightly less optimized but smaller files).") SET (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO_INIT}" CACHE STRING "Flags used by the compiler during Release with Debug Info builds.") ENDIF(NOT CMAKE_NOT_USING_CONFIG_FLAGS) IF(CMAKE_CXX_STANDARD_LIBRARIES_INIT) SET(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES_INIT}" CACHE STRING "Libraries linked by defalut with all C++ applications.") MARK_AS_ADVANCED(CMAKE_CXX_STANDARD_LIBRARIES) ENDIF(CMAKE_CXX_STANDARD_LIBRARIES_INIT) INCLUDE(CMakeCommonLanguageInclude) # now define the following rules: # CMAKE_CXX_CREATE_SHARED_LIBRARY # CMAKE_CXX_CREATE_SHARED_MODULE # CMAKE_CXX_COMPILE_OBJECT # CMAKE_CXX_LINK_EXECUTABLE # variables supplied by the generator at use time # <TARGET> # <TARGET_BASE> the target without the suffix # <OBJECTS> # <OBJECT> # <LINK_LIBRARIES> # <FLAGS> # <LINK_FLAGS> # CXX compiler information # <CMAKE_CXX_COMPILER> # <CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS> # <CMAKE_CXX_SHARED_MODULE_CREATE_FLAGS> # <CMAKE_CXX_LINK_FLAGS> # Static library tools # <CMAKE_AR> # <CMAKE_RANLIB> # create a shared C++ library IF(NOT CMAKE_CXX_CREATE_SHARED_LIBRARY) SET(CMAKE_CXX_CREATE_SHARED_LIBRARY "<CMAKE_CXX_COMPILER> <CMAKE_SHARED_LIBRARY_CXX_FLAGS> <LANGUAGE_COMPILE_FLAGS> <LINK_FLAGS> <CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS> <CMAKE_SHARED_LIBRARY_SONAME_CXX_FLAG><TARGET_SONAME> -o <TARGET> <OBJECTS> <LINK_LIBRARIES>") ENDIF(NOT CMAKE_CXX_CREATE_SHARED_LIBRARY) # create a c++ shared module copy the shared library rule by default IF(NOT CMAKE_CXX_CREATE_SHARED_MODULE) SET(CMAKE_CXX_CREATE_SHARED_MODULE ${CMAKE_CXX_CREATE_SHARED_LIBRARY}) ENDIF(NOT CMAKE_CXX_CREATE_SHARED_MODULE) # Create a static archive incrementally for large object file counts. # If CMAKE_CXX_CREATE_STATIC_LIBRARY is set it will override these. IF(NOT DEFINED CMAKE_CXX_ARCHIVE_CREATE) SET(CMAKE_CXX_ARCHIVE_CREATE "<CMAKE_AR> cr <TARGET> <LINK_FLAGS> <OBJECTS>") ENDIF() IF(NOT DEFINED CMAKE_CXX_ARCHIVE_APPEND) SET(CMAKE_CXX_ARCHIVE_APPEND "<CMAKE_AR> r <TARGET> <LINK_FLAGS> <OBJECTS>") ENDIF() IF(NOT DEFINED CMAKE_CXX_ARCHIVE_FINISH) SET(CMAKE_CXX_ARCHIVE_FINISH "<CMAKE_RANLIB> <TARGET>") ENDIF() # compile a C++ file into an object file IF(NOT CMAKE_CXX_COMPILE_OBJECT) SET(CMAKE_CXX_COMPILE_OBJECT "<CMAKE_CXX_COMPILER> <DEFINES> <FLAGS> -o <OBJECT> -c <SOURCE>") ENDIF(NOT CMAKE_CXX_COMPILE_OBJECT) IF(NOT CMAKE_CXX_LINK_EXECUTABLE) SET(CMAKE_CXX_LINK_EXECUTABLE "<CMAKE_CXX_COMPILER> <FLAGS> <CMAKE_CXX_LINK_FLAGS> <LINK_FLAGS> <OBJECTS> -o <TARGET> <LINK_LIBRARIES>") ENDIF(NOT CMAKE_CXX_LINK_EXECUTABLE) MARK_AS_ADVANCED( CMAKE_BUILD_TOOL CMAKE_VERBOSE_MAKEFILE CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_RELWITHDEBINFO CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_DEBUG) SET(CMAKE_CXX_INFORMATION_LOADED 1)
{ "pile_set_name": "Github" }
/*==LICENSE==* CyanWorlds.com Engine - MMOG client, server and tools Copyright (C) 2011 Cyan Worlds, Inc. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Additional permissions under GNU GPL version 3 section 7 If you modify this Program, or any covered work, by linking or combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK, NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK (or a modified version of those libraries), containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA, PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the licensors of this Program grant you additional permission to convey the resulting work. Corresponding Source for a non-source form of such a combination shall include the source code for the parts of OpenSSL and IJG JPEG Library used as well as that of the covered work. You can contact Cyan Worlds, Inc. by email [email protected] or by snail mail at: Cyan Worlds, Inc. 14617 N Newport Hwy Mead, WA 99021 *==LICENSE==*/ #include "HeadSpin.h" #include "plLightInfo.h" #include "plLightKonstants.h" #include "hsBounds.h" #include "hsStream.h" #include "hsResMgr.h" #include "pnMessage/plNodeRefMsg.h" #include "plgDispatch.h" #include "plIntersect/plVolumeIsect.h" #include "plDrawable/plSpaceTree.h" #include "plDrawable/plDrawableGenerator.h" #include "plDrawable/plDrawableSpans.h" #include "hsGDeviceRef.h" #include "plPipeline/plRenderTarget.h" #include "hsFastMath.h" #include "pnSceneObject/plDrawInterface.h" #include "plSurface/plLayerInterface.h" #include "plSurface/plLayer.h" #include "plSurface/hsGMaterial.h" #include "plGImage/plMipmap.h" #include "plMessage/plRenderMsg.h" #include "plMessage/plRenderRequestMsg.h" #include "plScene/plRenderRequest.h" #include "plPipeline.h" #include "plIntersect/plSoftVolume.h" #include "plPipeDebugFlags.h" #include "pnMessage/plPipeResMakeMsg.h" #include "plScene/plVisRegion.h" #include "plScene/plVisMgr.h" // heinous #include "plNetClient/plNetClientMgr.h" #include "pnMessage/plEnableMsg.h" static float kMaxYon = 1000.f; static float kMinHither = 1.f; #include "plLightProxy.h" #include "plDrawable/plDrawableGenerator.h" plLightInfo::plLightInfo() : fSceneNode(nil), fDeviceRef(nil), fVolFlags(0), fProjection(nil), fSoftVolume(nil) { fLightToWorld.Reset(); fWorldToLight.Reset(); fLocalToWorld.Reset(); fWorldToLocal.Reset(); fLightToLocal.Reset(); fLocalToLight.Reset(); fWorldToProj.Reset(); fNextDevPtr = nil; fPrevDevPtr = nil; fProxyGen = new plLightProxy; fProxyGen->Init(this); fRegisteredForRenderMsg = false; fVisSet.SetBit(plVisMgr::kNormal); } plLightInfo::~plLightInfo() { if( fNextDevPtr != nil || fPrevDevPtr != nil ) Unlink(); hsRefCnt_SafeUnRef( fDeviceRef ); if( fRegisteredForRenderMsg ) { plgDispatch::Dispatch()->UnRegisterForExactType(plRenderMsg::Index(), GetKey()); plgDispatch::Dispatch()->UnRegisterForExactType(plPipeRTMakeMsg::Index(), GetKey()); fRegisteredForRenderMsg = false; } delete fProxyGen; } void plLightInfo::SetDeviceRef( hsGDeviceRef *ref ) { hsRefCnt_SafeAssign( fDeviceRef, ref ); } void plLightInfo::IRefresh() { ICheckMaxStrength(); if( fDeviceRef ) fDeviceRef->SetDirty( true ); } void plLightInfo::ICheckMaxStrength() { float r = GetDiffuse().r >= 0 ? GetDiffuse().r : -GetDiffuse().r; float g = GetDiffuse().g >= 0 ? GetDiffuse().g : -GetDiffuse().g; float b = GetDiffuse().b >= 0 ? GetDiffuse().b : -GetDiffuse().b; fMaxStrength = r > g ? ( r > b ? r : b ) : ( g > b ? g : b ); const float kMinMaxStrength = 1.e-2f; SetZero(fMaxStrength < kMinMaxStrength); } void plLightInfo::GetStrengthAndScale(const hsBounds3Ext& bnd, float& strength, float& scale) const { if( IsIdle() ) { strength = scale = 0.f; return; } strength = fMaxStrength; scale = 1.f; if( fSoftVolume ) { scale = fSoftVolume->GetStrength(bnd.GetCenter()); strength *= scale; } return; } void plLightInfo::GetAffectedForced(const plSpaceTree* space, hsBitVector& list, bool charac) { Refresh(); if( IGetIsect() ) { space->HarvestLeaves(IGetIsect(), list); } else { list.Set(space->GetNumLeaves()); } } void plLightInfo::GetAffected(const plSpaceTree* space, hsBitVector& list, bool charac) { Refresh(); if( IsIdle() ) return; if( !GetProperty(kLPHasIncludes) || (GetProperty(kLPIncludesChars) && charac) ) { if( IGetIsect() ) { space->HarvestLeaves(IGetIsect(), list); } else { list.Set(space->GetNumLeaves()); } } } const hsTArray<int16_t>& plLightInfo::GetAffected(plSpaceTree* space, const hsTArray<int16_t>& visList, hsTArray<int16_t>& litList, bool charac) { Refresh(); if( !IsIdle() ) { if( !GetProperty(kLPHasIncludes) || (GetProperty(kLPIncludesChars) && charac) ) { if( IGetIsect() ) { static hsBitVector cache; cache.Clear(); space->EnableLeaves(visList, cache); space->HarvestEnabledLeaves(IGetIsect(), cache, litList); return litList; } else { return visList; } } } litList.SetCount(0); return litList; } //// Set/GetProperty ///////////////////////////////////////////////////////// // Sets/gets a property just like the normal Set/GetNativeProperty, but the // flag taken in is from plDrawInterface, not our props flags. So we have to // translate... void plLightInfo::SetProperty( int prop, bool on ) { plObjInterface::SetProperty(prop, on); if( kDisable == prop ) fProxyGen->SetDisable(on); } //// SetSpecular ///////////////////////////////////////////////////////////// // A bit more complicated here--make sure we set/clear the kLPHasSpecular // flag so we can test more easily for such a condition. void plLightInfo::SetSpecular( const hsColorRGBA& c ) { fSpecular = c; if( fSpecular.r == 0.f && fSpecular.g == 0.f && fSpecular.b == 0.f ) SetProperty( kLPHasSpecular, false ); else SetProperty( kLPHasSpecular, true ); SetDirty(); } void plLightInfo::SetTransform(const hsMatrix44& l2w, const hsMatrix44& w2l) { fLocalToWorld = l2w; fWorldToLocal = w2l; fLightToWorld = l2w * fLightToLocal; fWorldToLight = fLocalToLight * w2l; if( IGetIsect() ) IGetIsect()->SetTransform(fLightToWorld, fWorldToLight); if( fDeviceRef != nil ) fDeviceRef->SetDirty( true ); fProxyGen->SetTransform(fLightToWorld, fWorldToLight); SetDirty(true); if( GetProjection() ) { Refresh(); hsMatrix44 w2proj = IGetWorldToProj(); plLayer* lay = plLayer::ConvertNoRef(GetProjection()->BottomOfStack()); if( lay ) { lay->SetTransform(w2proj); } } } void plLightInfo::SetLocalToLight(const hsMatrix44& l2lt, const hsMatrix44& lt2l) { fLocalToLight = l2lt; fLightToLocal = lt2l; } const hsMatrix44& plLightInfo::GetLocalToWorld() const { return fLocalToWorld; } const hsMatrix44& plLightInfo::GetWorldToLocal() const { return fWorldToLocal; } const hsMatrix44& plLightInfo::GetLightToWorld() const { return fLightToWorld; } const hsMatrix44& plLightInfo::GetWorldToLight() const { return fWorldToLight; } #include "plProfile.h" plProfile_CreateTimer("Light Info", "RenderSetup", LightInfo); void plLightInfo::IAddVisRegion(plVisRegion* reg) { if( reg ) { int idx = fVisRegions.Find(reg); if( fVisRegions.kMissingIndex == idx ) { fVisRegions.Append(reg); if( reg->GetProperty(plVisRegion::kIsNot) ) fVisNot.SetBit(reg->GetIndex()); else { fVisSet.SetBit(reg->GetIndex()); if( reg->ReplaceNormal() ) fVisSet.ClearBit(plVisMgr::kNormal); } } } } void plLightInfo::IRemoveVisRegion(plVisRegion* reg) { if( reg ) { int idx = fVisRegions.Find(reg); if( fVisRegions.kMissingIndex != idx ) { fVisRegions.Remove(idx); if( reg->GetProperty(plVisRegion::kIsNot) ) fVisNot.ClearBit(reg->GetIndex()); else fVisSet.ClearBit(reg->GetIndex()); } } } bool plLightInfo::MsgReceive(plMessage* msg) { plRenderMsg* rendMsg = plRenderMsg::ConvertNoRef(msg); if( rendMsg ) { plProfile_BeginLap(LightInfo, this->GetKey()->GetUoid().GetObjectName().c_str()); if( !fDeviceRef && !GetProperty(kLPShadowOnly) ) { rendMsg->Pipeline()->RegisterLight( this ); } ICheckMaxStrength(); plProfile_EndLap(LightInfo, this->GetKey()->GetUoid().GetObjectName().c_str()); return true; } plGenRefMsg* refMsg = plGenRefMsg::ConvertNoRef(msg); if( refMsg ) { if( refMsg->GetContext() & (plRefMsg::kOnCreate|plRefMsg::kOnRequest|plRefMsg::kOnReplace) ) { switch( refMsg->fType ) { case kProjection: fProjection = plLayerInterface::ConvertNoRef(refMsg->GetRef()); { if( GetKey() && GetKey()->GetName().starts_with("RTPatternLight") ) SetProperty(kLPForceProj, true); } break; case kSoftVolume: fSoftVolume = plSoftVolume::ConvertNoRef(refMsg->GetRef()); break; case kVisRegion: IAddVisRegion(plVisRegion::ConvertNoRef(refMsg->GetRef())); break; } } else if( refMsg->GetContext() & (plRefMsg::kOnRemove | plRefMsg::kOnDestroy) ) { switch( refMsg->fType ) { case kProjection: fProjection = nil; break; case kSoftVolume: fSoftVolume = nil; break; case kVisRegion: IRemoveVisRegion(plVisRegion::ConvertNoRef(refMsg->GetRef())); break; } } return true; } plPipeRTMakeMsg* rtMake = plPipeRTMakeMsg::ConvertNoRef(msg); if( rtMake ) { // Make sure we're registered with the pipeline // If we're only here to cast shadows, just don't tell anyone // about us. if( !fDeviceRef && !GetProperty(kLPShadowOnly) ) { rtMake->Pipeline()->RegisterLight( this ); } return true; } plEnableMsg* enaMsg = plEnableMsg::ConvertNoRef(msg); if( enaMsg ) { SetProperty(kDisable, enaMsg->Cmd(plEnableMsg::kDisable)); return true; } return plObjInterface::MsgReceive(msg); } void plLightInfo::Read(hsStream* s, hsResMgr* mgr) { hsRefCnt_SafeUnRef( fDeviceRef ); fDeviceRef = nil; plObjInterface::Read(s, mgr); fAmbient.Read(s); fDiffuse.Read(s); fSpecular.Read(s); fLightToLocal.Read(s); fLocalToLight.Read(s); fLightToWorld.Read(s); fWorldToLight.Read(s); fLocalToWorld = fLightToWorld * fLocalToLight; fWorldToLocal = fLightToLocal * fWorldToLight; mgr->ReadKeyNotifyMe(s, new plGenRefMsg(GetKey(), plRefMsg::kOnCreate, 0, kProjection), plRefFlags::kActiveRef); mgr->ReadKeyNotifyMe(s, new plGenRefMsg(GetKey(), plRefMsg::kOnCreate, 0, kSoftVolume), plRefFlags::kActiveRef); // Let our sceneNode know we're here. plKey nodeKey = mgr->ReadKey(s); ISetSceneNode(nodeKey); int n = s->ReadLE32(); fVisRegions.SetCountAndZero(n); int i; for( i = 0; i < n; i++ ) mgr->ReadKeyNotifyMe(s, new plGenRefMsg(GetKey(), plRefMsg::kOnCreate, 0, kVisRegion), plRefFlags::kActiveRef); SetDirty(true); } void plLightInfo::Write(hsStream* s, hsResMgr* mgr) { plObjInterface::Write(s, mgr); fAmbient.Write(s); fDiffuse.Write(s); fSpecular.Write(s); fLightToLocal.Write(s); fLocalToLight.Write(s); fLightToWorld.Write(s); fWorldToLight.Write(s); mgr->WriteKey(s, GetProjection()); mgr->WriteKey(s, fSoftVolume); mgr->WriteKey(s, fSceneNode); s->WriteLE32(fVisRegions.GetCount()); int i; for( i = 0; i < fVisRegions.GetCount(); i++ ) mgr->WriteKey(s, fVisRegions[i]); } // These two should only be called by the SceneObject void plLightInfo::ISetSceneNode(plKey node) { if( node != fSceneNode ) { if( node ) { plNodeRefMsg* refMsg = new plNodeRefMsg(node, plRefMsg::kOnCreate, -1, plNodeRefMsg::kLight); hsgResMgr::ResMgr()->AddViaNotify(GetKey(), refMsg, plRefFlags::kPassiveRef); } if( fSceneNode ) { fSceneNode->Release(GetKey()); } } fSceneNode = node; if( fSceneNode != nil ) { if( !fRegisteredForRenderMsg ) { plgDispatch::Dispatch()->RegisterForExactType(plRenderMsg::Index(), GetKey()); plgDispatch::Dispatch()->RegisterForExactType(plPipeRTMakeMsg::Index(), GetKey()); fRegisteredForRenderMsg = true; } } else if( fRegisteredForRenderMsg ) { plgDispatch::Dispatch()->UnRegisterForExactType(plRenderMsg::Index(), GetKey()); plgDispatch::Dispatch()->UnRegisterForExactType(plPipeRTMakeMsg::Index(), GetKey()); fRegisteredForRenderMsg = false; } } plKey plLightInfo::GetSceneNode() const { return fSceneNode; } //// Link & Unlink /////////////////////////////////////////////////////// void plLightInfo::Unlink() { hsAssert( fPrevDevPtr, "Light info not in list" ); if( fNextDevPtr ) fNextDevPtr->fPrevDevPtr = fPrevDevPtr; *fPrevDevPtr = fNextDevPtr; fNextDevPtr = nil; fPrevDevPtr = nil; } void plLightInfo::Link( plLightInfo **back ) { hsAssert( fNextDevPtr == nil && fPrevDevPtr == nil, "Trying to link a lightInfo that's already linked" ); fNextDevPtr = *back; if( *back ) (*back)->fPrevDevPtr = &fNextDevPtr; fPrevDevPtr = back; *back = this; } ////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////// ///// Standard light types ////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////// // Directional plDirectionalLightInfo::plDirectionalLightInfo() { } plDirectionalLightInfo::~plDirectionalLightInfo() { } void plDirectionalLightInfo::GetStrengthAndScale(const hsBounds3Ext& bnd, float& strength, float& scale) const { plLightInfo::GetStrengthAndScale(bnd, strength, scale); } void plDirectionalLightInfo::Read(hsStream* s, hsResMgr* mgr) { plLightInfo::Read(s, mgr); } void plDirectionalLightInfo::Write(hsStream* s, hsResMgr* mgr) { plLightInfo::Write(s, mgr); } hsVector3 plDirectionalLightInfo::GetWorldDirection() const { return -fLightToWorld.GetAxis( hsMatrix44::kUp ); } ////////////////////////////////////////////////////////////////////////// // Limited Directional plLimitedDirLightInfo::plLimitedDirLightInfo() : fParPlanes(nil) { } plLimitedDirLightInfo::~plLimitedDirLightInfo() { delete fParPlanes; } void plLimitedDirLightInfo::IRefresh() { plLightInfo::IRefresh(); if( !IGetIsect() ) IMakeIsect(); if( GetProjection() ) { hsMatrix44 l2ndc; l2ndc.Reset(); float width = fWidth; float height = fHeight; l2ndc.fMap[0][0] = 1.f / width; l2ndc.fMap[0][3] = 0.5f; l2ndc.fMap[1][1] = -1.f / height; l2ndc.fMap[1][3] = 0.5f; // Map Screen Z to range 0 (at hither) to 1 (at yon) // No, map z dead on to 1.f l2ndc.fMap[2][2] = 1.f / fDepth; l2ndc.fMap[2][3] = 0; l2ndc.fMap[3][3] = 1.f; l2ndc.NotIdentity(); fWorldToProj = l2ndc * fWorldToLight; } } void plLimitedDirLightInfo::GetStrengthAndScale(const hsBounds3Ext& bnd, float& strength, float& scale) const { // If we haven't culled the object, return that we're full strength. plLightInfo::GetStrengthAndScale(bnd, strength, scale); } void plLimitedDirLightInfo::Read(hsStream* s, hsResMgr* mgr) { plDirectionalLightInfo::Read(s, mgr); fWidth = s->ReadLEScalar(); fHeight = s->ReadLEScalar(); fDepth = s->ReadLEScalar(); } void plLimitedDirLightInfo::Write(hsStream* s, hsResMgr* mgr) { plDirectionalLightInfo::Write(s, mgr); s->WriteLEScalar(fWidth); s->WriteLEScalar(fHeight); s->WriteLEScalar(fDepth); } void plLimitedDirLightInfo::IMakeIsect() { if( !fParPlanes ) fParPlanes = new plParallelIsect; fParPlanes->SetNumPlanes(3); hsPoint3 p0, p1; float width = fWidth; float height = fHeight; p0.Set(-width*0.5f, 0, 0); p1.Set(width*0.5f, 0, 0); fParPlanes->SetPlane(0, p0, p1); p0.Set(0, -height * 0.5f, 0); p1.Set(0, height * 0.5f, 0); fParPlanes->SetPlane(1, p0, p1); p0.Set(0, 0, 0); p1.Set(0, 0, -fDepth); fParPlanes->SetPlane(2, p0, p1); fParPlanes->SetTransform(fLightToWorld, fWorldToLight); } //// ICreateProxy ////////////////////////////////////////////////////// // Creates a new box drawable for showing the light's // influence. plDrawableSpans* plLimitedDirLightInfo::CreateProxy(hsGMaterial* mat, hsTArray<uint32_t>& idx, plDrawableSpans* addTo) { hsPoint3 corner; corner.Set(-fWidth*0.5f, -fHeight*0.5f, -fDepth); hsVector3 vecs[3]; vecs[0].Set(fWidth, 0, 0); vecs[1].Set(0, fHeight, 0); vecs[2].Set(0, 0, fDepth); // Generate a rectangular drawable based on a corner and three vectors plDrawableSpans* draw = plDrawableGenerator::GenerateBoxDrawable( corner, vecs[0], vecs[1], vecs[2], mat, fLightToWorld, true, nil, &idx, addTo ); return draw; } ////////////////////////////////////////////////////////////////////////// // Omni plOmniLightInfo::plOmniLightInfo() : fAttenConst(0), fAttenLinear(1.f), fAttenQuadratic(0), fAttenCutoff(0), fSphere(nil) { } plOmniLightInfo::~plOmniLightInfo() { delete fSphere; } void plOmniLightInfo::IMakeIsect() { fSphere = new plSphereIsect; fSphere->SetTransform(fLightToWorld, fWorldToLight); } void plOmniLightInfo::GetStrengthAndScale(const hsBounds3Ext& bnd, float& strength, float& scale) const { plLightInfo::GetStrengthAndScale(bnd, strength, scale); // Volume - Want to base this on the closest point on the bounds, instead of just the center. const hsPoint3& pos = bnd.GetCenter(); hsPoint3 wpos = GetWorldPosition(); float dist = hsVector3(&pos, &wpos).MagnitudeSquared(); dist = 1.f / hsFastMath::InvSqrtAppr(dist); if( fAttenQuadratic > 0 ) { strength /= (fAttenConst + fAttenLinear * dist + fAttenQuadratic * dist * dist); } else if( fAttenLinear > 0 ) { strength /= (fAttenConst + fAttenLinear * dist); } else if( fAttenConst > 0 ) { strength /= fAttenConst; } else if( fAttenCutoff > 0 ) { if( dist > fAttenCutoff ) strength = 0; } } float plOmniLightInfo::GetRadius() const { float radius = 0; if( fAttenQuadratic > 0 ) { float mult = fDiffuse.a >= 0 ? fDiffuse.a : -fDiffuse.a; float det = fAttenLinear*fAttenLinear - 4.f * fAttenQuadratic * fAttenConst * (1.f - mult * plSillyLightKonstants::GetFarPowerKonst()); if( det > 0 ) { det = sqrt(det); radius = -fAttenLinear + det; radius /= fAttenQuadratic * 2.f; if( radius < 0 ) radius = 0; } } else if( fAttenLinear > 0 ) { float mult = fDiffuse.a >= 0 ? fDiffuse.a : -fDiffuse.a; radius = (mult * plSillyLightKonstants::GetFarPowerKonst() - 1.f ) * fAttenConst / fAttenLinear; } else if( fAttenCutoff > 0 ) { radius = fAttenCutoff; } return radius; } void plOmniLightInfo::IRefresh() { plLightInfo::IRefresh(); if( IsAttenuated() ) { if( !fSphere ) IMakeIsect(); fSphere->SetRadius(GetRadius()); } else { delete fSphere; fSphere = nil; } } hsVector3 plOmniLightInfo::GetNegativeWorldDirection(const hsPoint3& pos) const { hsPoint3 wpos = GetWorldPosition(); hsVector3 tmp(&wpos, &pos); return hsFastMath::NormalizeAppr(tmp); } void plOmniLightInfo::Read(hsStream* s, hsResMgr* mgr) { plLightInfo::Read(s, mgr); fAttenConst = s->ReadLEScalar(); fAttenLinear = s->ReadLEScalar(); fAttenQuadratic = s->ReadLEScalar(); fAttenCutoff = s->ReadLEScalar(); } void plOmniLightInfo::Write(hsStream* s, hsResMgr* mgr) { plLightInfo::Write(s, mgr); s->WriteLEScalar(fAttenConst); s->WriteLEScalar(fAttenLinear); s->WriteLEScalar(fAttenQuadratic); s->WriteLEScalar( fAttenCutoff ); } //// ICreateProxy ////////////////////////////////////////////////////// // Creates a new sphere drawable for showing the omnilight's // sphere (haha) of influence. plDrawableSpans* plOmniLightInfo::CreateProxy(hsGMaterial* mat, hsTArray<uint32_t>& idx, plDrawableSpans* addTo) { float rad = GetRadius(); if( rad == 0 ) rad = 50; plDrawableSpans* draw = plDrawableGenerator::GenerateSphericalDrawable(hsPoint3(0,0,0), rad, mat, fLightToWorld, true, nil, &idx, addTo); return draw; } ////////////////////////////////////////////////////////////////////////// // Spot plSpotLightInfo::plSpotLightInfo() : fFalloff(1.f), fSpotInner(M_PI * 0.125f), fSpotOuter(M_PI * 0.25f), fCone(nil) { } plSpotLightInfo::~plSpotLightInfo() { delete fCone; } void plSpotLightInfo::GetStrengthAndScale(const hsBounds3Ext& bnd, float& strength, float& scale) const { plOmniLightInfo::GetStrengthAndScale(bnd, strength, scale); // Volume - Want to base this on the closest point on the bounds, instead of just the center. const hsPoint3& pos = bnd.GetCenter(); hsVector3 del; hsPoint3 wpos = GetWorldPosition(); del.Set(&pos, &wpos); float invDist = del.MagnitudeSquared(); invDist = hsFastMath::InvSqrtAppr(invDist); float dot = del.InnerProduct(GetWorldDirection()); dot *= invDist; float cosInner, cosOuter, t; hsFastMath::SinCosInRangeAppr(fSpotInner, t, cosInner); hsFastMath::SinCosInRangeAppr(fSpotOuter, t, cosOuter); if( dot < cosOuter ) strength = 0; else if( dot < cosInner ) strength *= (dot - cosOuter) / (cosInner - cosOuter); } void plSpotLightInfo::IMakeIsect() { fCone = new plConeIsect; fCone->SetTransform(fLightToWorld, fWorldToLight); } void plSpotLightInfo::IRefresh() { plLightInfo::IRefresh(); if( !fCone ) IMakeIsect(); float effFOV = fSpotOuter; fCone->SetAngle(effFOV); if( IsAttenuated() ) { fCone->SetLength(GetRadius()); fCone->SetTransform(fLightToWorld, fWorldToLight); } if( GetProjection() ) { float yon = GetRadius(); if( yon < kMinHither ) yon = kMaxYon; float hither = std::min(kMinHither, yon * 0.5f); float sinFOV, cosFOV; hsFastMath::SinCos(effFOV, sinFOV, cosFOV); hsMatrix44 l2ndc; l2ndc.Reset(); l2ndc.fMap[0][0] = cosFOV / sinFOV * 0.5f; l2ndc.fMap[0][2] = -0.5f; l2ndc.fMap[1][1] = -cosFOV / sinFOV * 0.5f; l2ndc.fMap[1][2] = -0.5f; l2ndc.fMap[2][2] = -yon / (yon - hither); l2ndc.fMap[3][3] = 0; l2ndc.fMap[3][2] = -1.f; l2ndc.NotIdentity(); fWorldToProj = l2ndc * fWorldToLight; } } void plSpotLightInfo::Read(hsStream* s, hsResMgr* mgr) { plOmniLightInfo::Read(s, mgr); fFalloff = s->ReadLEScalar(); fSpotInner = s->ReadLEScalar(); fSpotOuter = s->ReadLEScalar(); } void plSpotLightInfo::Write(hsStream* s, hsResMgr* mgr) { plOmniLightInfo::Write(s, mgr); s->WriteLEScalar(fFalloff); s->WriteLEScalar(fSpotInner); s->WriteLEScalar(fSpotOuter); } hsVector3 plSpotLightInfo::GetWorldDirection() const { return -fLightToWorld.GetAxis( hsMatrix44::kUp ); } //// ICreateProxy ////////////////////////////////////////////////////// // Generates a new drawable for showing the spotlight's // sphere of influence. plDrawableSpans* plSpotLightInfo::CreateProxy(hsGMaterial* mat, hsTArray<uint32_t>& idx, plDrawableSpans* addTo) { float rad = GetRadius(); float x, y; if( rad == 0 ) rad = 80; hsFastMath::SinCosAppr( GetSpotOuter(), x, y ); plDrawableSpans* draw = plDrawableGenerator::GenerateConicalDrawable(rad * x / y, -rad, mat, fLightToWorld, true, nil, &idx, addTo); return draw; }
{ "pile_set_name": "Github" }
package android.hardware.location; import android.annotation.SystemApi; import android.net.wifi.WifiEnterpriseConfig; import android.os.Parcel; import android.os.Parcelable; import android.telecom.Logging.Session; import java.util.Arrays; @SystemApi public final class NanoAppMessage implements Parcelable { public static final Parcelable.Creator<NanoAppMessage> CREATOR = new Parcelable.Creator<NanoAppMessage>() { /* class android.hardware.location.NanoAppMessage.AnonymousClass1 */ @Override // android.os.Parcelable.Creator public NanoAppMessage createFromParcel(Parcel in) { return new NanoAppMessage(in); } @Override // android.os.Parcelable.Creator public NanoAppMessage[] newArray(int size) { return new NanoAppMessage[size]; } }; private static final int DEBUG_LOG_NUM_BYTES = 16; private boolean mIsBroadcasted; private byte[] mMessageBody; private int mMessageType; private long mNanoAppId; private NanoAppMessage(long nanoAppId, int messageType, byte[] messageBody, boolean broadcasted) { this.mNanoAppId = nanoAppId; this.mMessageType = messageType; this.mMessageBody = messageBody; this.mIsBroadcasted = broadcasted; } public static NanoAppMessage createMessageToNanoApp(long targetNanoAppId, int messageType, byte[] messageBody) { return new NanoAppMessage(targetNanoAppId, messageType, messageBody, false); } public static NanoAppMessage createMessageFromNanoApp(long sourceNanoAppId, int messageType, byte[] messageBody, boolean broadcasted) { return new NanoAppMessage(sourceNanoAppId, messageType, messageBody, broadcasted); } public long getNanoAppId() { return this.mNanoAppId; } public int getMessageType() { return this.mMessageType; } public byte[] getMessageBody() { return this.mMessageBody; } public boolean isBroadcastMessage() { return this.mIsBroadcasted; } private NanoAppMessage(Parcel in) { this.mNanoAppId = in.readLong(); this.mIsBroadcasted = in.readInt() != 1 ? false : true; this.mMessageType = in.readInt(); this.mMessageBody = new byte[in.readInt()]; in.readByteArray(this.mMessageBody); } @Override // android.os.Parcelable public int describeContents() { return 0; } @Override // android.os.Parcelable public void writeToParcel(Parcel out, int flags) { out.writeLong(this.mNanoAppId); out.writeInt(this.mIsBroadcasted ? 1 : 0); out.writeInt(this.mMessageType); out.writeInt(this.mMessageBody.length); out.writeByteArray(this.mMessageBody); } public String toString() { String ret; int length = this.mMessageBody.length; StringBuilder sb = new StringBuilder(); sb.append("NanoAppMessage[type = "); sb.append(this.mMessageType); sb.append(", length = "); sb.append(this.mMessageBody.length); sb.append(" bytes, "); sb.append(this.mIsBroadcasted ? "broadcast" : "unicast"); sb.append(", nanoapp = 0x"); sb.append(Long.toHexString(this.mNanoAppId)); sb.append("]("); String ret2 = sb.toString(); if (length > 0) { ret2 = ret2 + "data = 0x"; } for (int i = 0; i < Math.min(length, 16); i++) { ret = ret + Byte.toHexString(this.mMessageBody[i], true); if ((i + 1) % 4 == 0) { ret = ret + WifiEnterpriseConfig.CA_CERT_ALIAS_DELIMITER; } } if (length > 16) { ret = ret + Session.TRUNCATE_STRING; } return ret + ")"; } public boolean equals(Object object) { boolean isEqual = true; if (object == this) { return true; } if (!(object instanceof NanoAppMessage)) { return false; } NanoAppMessage other = (NanoAppMessage) object; if (!(other.getNanoAppId() == this.mNanoAppId && other.getMessageType() == this.mMessageType && other.isBroadcastMessage() == this.mIsBroadcasted && Arrays.equals(other.getMessageBody(), this.mMessageBody))) { isEqual = false; } return isEqual; } }
{ "pile_set_name": "Github" }
{ "auto": { "EMotionFXSDK/MCore/Platform/Windows": [ "../Common/WinAPI/MCore/Source/DiskFile_WinAPI.cpp" ] } }
{ "pile_set_name": "Github" }
tornado>=4.5.3
{ "pile_set_name": "Github" }
package colorable import ( "bytes" "io" ) // NonColorable hold writer but remove escape sequence. type NonColorable struct { out io.Writer } // NewNonColorable return new instance of Writer which remove escape sequence from Writer. func NewNonColorable(w io.Writer) io.Writer { return &NonColorable{out: w} } // Write write data on console func (w *NonColorable) Write(data []byte) (n int, err error) { er := bytes.NewReader(data) var bw [1]byte loop: for { c1, err := er.ReadByte() if err != nil { break loop } if c1 != 0x1b { bw[0] = c1 w.out.Write(bw[:]) continue } c2, err := er.ReadByte() if err != nil { break loop } if c2 != 0x5b { continue } var buf bytes.Buffer for { c, err := er.ReadByte() if err != nil { break loop } if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { break } buf.Write([]byte(string(c))) } } return len(data), nil }
{ "pile_set_name": "Github" }
# # (C) Copyright 2000, 2001, 2002 # Wolfgang Denk, DENX Software Engineering, [email protected]. # # Copyright (C) 2007 Sergey Kubushyn <[email protected]> # # SPDX-License-Identifier: GPL-2.0+ # obj-y += legoev3.o
{ "pile_set_name": "Github" }
// go run mksysnum.go https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master // Code generated by the command above; see README.md. DO NOT EDIT. // +build amd64,openbsd package unix const ( SYS_EXIT = 1 // { void sys_exit(int rval); } SYS_FORK = 2 // { int sys_fork(void); } SYS_READ = 3 // { ssize_t sys_read(int fd, void *buf, size_t nbyte); } SYS_WRITE = 4 // { ssize_t sys_write(int fd, const void *buf, size_t nbyte); } SYS_OPEN = 5 // { int sys_open(const char *path, int flags, ... mode_t mode); } SYS_CLOSE = 6 // { int sys_close(int fd); } SYS_GETENTROPY = 7 // { int sys_getentropy(void *buf, size_t nbyte); } SYS___TFORK = 8 // { int sys___tfork(const struct __tfork *param, size_t psize); } SYS_LINK = 9 // { int sys_link(const char *path, const char *link); } SYS_UNLINK = 10 // { int sys_unlink(const char *path); } SYS_WAIT4 = 11 // { pid_t sys_wait4(pid_t pid, int *status, int options, struct rusage *rusage); } SYS_CHDIR = 12 // { int sys_chdir(const char *path); } SYS_FCHDIR = 13 // { int sys_fchdir(int fd); } SYS_MKNOD = 14 // { int sys_mknod(const char *path, mode_t mode, dev_t dev); } SYS_CHMOD = 15 // { int sys_chmod(const char *path, mode_t mode); } SYS_CHOWN = 16 // { int sys_chown(const char *path, uid_t uid, gid_t gid); } SYS_OBREAK = 17 // { int sys_obreak(char *nsize); } break SYS_GETDTABLECOUNT = 18 // { int sys_getdtablecount(void); } SYS_GETRUSAGE = 19 // { int sys_getrusage(int who, struct rusage *rusage); } SYS_GETPID = 20 // { pid_t sys_getpid(void); } SYS_MOUNT = 21 // { int sys_mount(const char *type, const char *path, int flags, void *data); } SYS_UNMOUNT = 22 // { int sys_unmount(const char *path, int flags); } SYS_SETUID = 23 // { int sys_setuid(uid_t uid); } SYS_GETUID = 24 // { uid_t sys_getuid(void); } SYS_GETEUID = 25 // { uid_t sys_geteuid(void); } SYS_PTRACE = 26 // { int sys_ptrace(int req, pid_t pid, caddr_t addr, int data); } SYS_RECVMSG = 27 // { ssize_t sys_recvmsg(int s, struct msghdr *msg, int flags); } SYS_SENDMSG = 28 // { ssize_t sys_sendmsg(int s, const struct msghdr *msg, int flags); } SYS_RECVFROM = 29 // { ssize_t sys_recvfrom(int s, void *buf, size_t len, int flags, struct sockaddr *from, socklen_t *fromlenaddr); } SYS_ACCEPT = 30 // { int sys_accept(int s, struct sockaddr *name, socklen_t *anamelen); } SYS_GETPEERNAME = 31 // { int sys_getpeername(int fdes, struct sockaddr *asa, socklen_t *alen); } SYS_GETSOCKNAME = 32 // { int sys_getsockname(int fdes, struct sockaddr *asa, socklen_t *alen); } SYS_ACCESS = 33 // { int sys_access(const char *path, int amode); } SYS_CHFLAGS = 34 // { int sys_chflags(const char *path, u_int flags); } SYS_FCHFLAGS = 35 // { int sys_fchflags(int fd, u_int flags); } SYS_SYNC = 36 // { void sys_sync(void); } SYS_STAT = 38 // { int sys_stat(const char *path, struct stat *ub); } SYS_GETPPID = 39 // { pid_t sys_getppid(void); } SYS_LSTAT = 40 // { int sys_lstat(const char *path, struct stat *ub); } SYS_DUP = 41 // { int sys_dup(int fd); } SYS_FSTATAT = 42 // { int sys_fstatat(int fd, const char *path, struct stat *buf, int flag); } SYS_GETEGID = 43 // { gid_t sys_getegid(void); } SYS_PROFIL = 44 // { int sys_profil(caddr_t samples, size_t size, u_long offset, u_int scale); } SYS_KTRACE = 45 // { int sys_ktrace(const char *fname, int ops, int facs, pid_t pid); } SYS_SIGACTION = 46 // { int sys_sigaction(int signum, const struct sigaction *nsa, struct sigaction *osa); } SYS_GETGID = 47 // { gid_t sys_getgid(void); } SYS_SIGPROCMASK = 48 // { int sys_sigprocmask(int how, sigset_t mask); } SYS_SETLOGIN = 50 // { int sys_setlogin(const char *namebuf); } SYS_ACCT = 51 // { int sys_acct(const char *path); } SYS_SIGPENDING = 52 // { int sys_sigpending(void); } SYS_FSTAT = 53 // { int sys_fstat(int fd, struct stat *sb); } SYS_IOCTL = 54 // { int sys_ioctl(int fd, u_long com, ... void *data); } SYS_REBOOT = 55 // { int sys_reboot(int opt); } SYS_REVOKE = 56 // { int sys_revoke(const char *path); } SYS_SYMLINK = 57 // { int sys_symlink(const char *path, const char *link); } SYS_READLINK = 58 // { ssize_t sys_readlink(const char *path, char *buf, size_t count); } SYS_EXECVE = 59 // { int sys_execve(const char *path, char * const *argp, char * const *envp); } SYS_UMASK = 60 // { mode_t sys_umask(mode_t newmask); } SYS_CHROOT = 61 // { int sys_chroot(const char *path); } SYS_GETFSSTAT = 62 // { int sys_getfsstat(struct statfs *buf, size_t bufsize, int flags); } SYS_STATFS = 63 // { int sys_statfs(const char *path, struct statfs *buf); } SYS_FSTATFS = 64 // { int sys_fstatfs(int fd, struct statfs *buf); } SYS_FHSTATFS = 65 // { int sys_fhstatfs(const fhandle_t *fhp, struct statfs *buf); } SYS_VFORK = 66 // { int sys_vfork(void); } SYS_GETTIMEOFDAY = 67 // { int sys_gettimeofday(struct timeval *tp, struct timezone *tzp); } SYS_SETTIMEOFDAY = 68 // { int sys_settimeofday(const struct timeval *tv, const struct timezone *tzp); } SYS_SETITIMER = 69 // { int sys_setitimer(int which, const struct itimerval *itv, struct itimerval *oitv); } SYS_GETITIMER = 70 // { int sys_getitimer(int which, struct itimerval *itv); } SYS_SELECT = 71 // { int sys_select(int nd, fd_set *in, fd_set *ou, fd_set *ex, struct timeval *tv); } SYS_KEVENT = 72 // { int sys_kevent(int fd, const struct kevent *changelist, int nchanges, struct kevent *eventlist, int nevents, const struct timespec *timeout); } SYS_MUNMAP = 73 // { int sys_munmap(void *addr, size_t len); } SYS_MPROTECT = 74 // { int sys_mprotect(void *addr, size_t len, int prot); } SYS_MADVISE = 75 // { int sys_madvise(void *addr, size_t len, int behav); } SYS_UTIMES = 76 // { int sys_utimes(const char *path, const struct timeval *tptr); } SYS_FUTIMES = 77 // { int sys_futimes(int fd, const struct timeval *tptr); } SYS_MINCORE = 78 // { int sys_mincore(void *addr, size_t len, char *vec); } SYS_GETGROUPS = 79 // { int sys_getgroups(int gidsetsize, gid_t *gidset); } SYS_SETGROUPS = 80 // { int sys_setgroups(int gidsetsize, const gid_t *gidset); } SYS_GETPGRP = 81 // { int sys_getpgrp(void); } SYS_SETPGID = 82 // { int sys_setpgid(pid_t pid, pid_t pgid); } SYS_FUTEX = 83 // { int sys_futex(uint32_t *f, int op, int val, const struct timespec *timeout, uint32_t *g); } SYS_UTIMENSAT = 84 // { int sys_utimensat(int fd, const char *path, const struct timespec *times, int flag); } SYS_FUTIMENS = 85 // { int sys_futimens(int fd, const struct timespec *times); } SYS_KBIND = 86 // { int sys_kbind(const struct __kbind *param, size_t psize, int64_t proc_cookie); } SYS_CLOCK_GETTIME = 87 // { int sys_clock_gettime(clockid_t clock_id, struct timespec *tp); } SYS_CLOCK_SETTIME = 88 // { int sys_clock_settime(clockid_t clock_id, const struct timespec *tp); } SYS_CLOCK_GETRES = 89 // { int sys_clock_getres(clockid_t clock_id, struct timespec *tp); } SYS_DUP2 = 90 // { int sys_dup2(int from, int to); } SYS_NANOSLEEP = 91 // { int sys_nanosleep(const struct timespec *rqtp, struct timespec *rmtp); } SYS_FCNTL = 92 // { int sys_fcntl(int fd, int cmd, ... void *arg); } SYS_ACCEPT4 = 93 // { int sys_accept4(int s, struct sockaddr *name, socklen_t *anamelen, int flags); } SYS___THRSLEEP = 94 // { int sys___thrsleep(const volatile void *ident, clockid_t clock_id, const struct timespec *tp, void *lock, const int *abort); } SYS_FSYNC = 95 // { int sys_fsync(int fd); } SYS_SETPRIORITY = 96 // { int sys_setpriority(int which, id_t who, int prio); } SYS_SOCKET = 97 // { int sys_socket(int domain, int type, int protocol); } SYS_CONNECT = 98 // { int sys_connect(int s, const struct sockaddr *name, socklen_t namelen); } SYS_GETDENTS = 99 // { int sys_getdents(int fd, void *buf, size_t buflen); } SYS_GETPRIORITY = 100 // { int sys_getpriority(int which, id_t who); } SYS_PIPE2 = 101 // { int sys_pipe2(int *fdp, int flags); } SYS_DUP3 = 102 // { int sys_dup3(int from, int to, int flags); } SYS_SIGRETURN = 103 // { int sys_sigreturn(struct sigcontext *sigcntxp); } SYS_BIND = 104 // { int sys_bind(int s, const struct sockaddr *name, socklen_t namelen); } SYS_SETSOCKOPT = 105 // { int sys_setsockopt(int s, int level, int name, const void *val, socklen_t valsize); } SYS_LISTEN = 106 // { int sys_listen(int s, int backlog); } SYS_CHFLAGSAT = 107 // { int sys_chflagsat(int fd, const char *path, u_int flags, int atflags); } SYS_PLEDGE = 108 // { int sys_pledge(const char *promises, const char *execpromises); } SYS_PPOLL = 109 // { int sys_ppoll(struct pollfd *fds, u_int nfds, const struct timespec *ts, const sigset_t *mask); } SYS_PSELECT = 110 // { int sys_pselect(int nd, fd_set *in, fd_set *ou, fd_set *ex, const struct timespec *ts, const sigset_t *mask); } SYS_SIGSUSPEND = 111 // { int sys_sigsuspend(int mask); } SYS_SENDSYSLOG = 112 // { int sys_sendsyslog(const char *buf, size_t nbyte, int flags); } SYS_UNVEIL = 114 // { int sys_unveil(const char *path, const char *permissions); } SYS_GETSOCKOPT = 118 // { int sys_getsockopt(int s, int level, int name, void *val, socklen_t *avalsize); } SYS_THRKILL = 119 // { int sys_thrkill(pid_t tid, int signum, void *tcb); } SYS_READV = 120 // { ssize_t sys_readv(int fd, const struct iovec *iovp, int iovcnt); } SYS_WRITEV = 121 // { ssize_t sys_writev(int fd, const struct iovec *iovp, int iovcnt); } SYS_KILL = 122 // { int sys_kill(int pid, int signum); } SYS_FCHOWN = 123 // { int sys_fchown(int fd, uid_t uid, gid_t gid); } SYS_FCHMOD = 124 // { int sys_fchmod(int fd, mode_t mode); } SYS_SETREUID = 126 // { int sys_setreuid(uid_t ruid, uid_t euid); } SYS_SETREGID = 127 // { int sys_setregid(gid_t rgid, gid_t egid); } SYS_RENAME = 128 // { int sys_rename(const char *from, const char *to); } SYS_FLOCK = 131 // { int sys_flock(int fd, int how); } SYS_MKFIFO = 132 // { int sys_mkfifo(const char *path, mode_t mode); } SYS_SENDTO = 133 // { ssize_t sys_sendto(int s, const void *buf, size_t len, int flags, const struct sockaddr *to, socklen_t tolen); } SYS_SHUTDOWN = 134 // { int sys_shutdown(int s, int how); } SYS_SOCKETPAIR = 135 // { int sys_socketpair(int domain, int type, int protocol, int *rsv); } SYS_MKDIR = 136 // { int sys_mkdir(const char *path, mode_t mode); } SYS_RMDIR = 137 // { int sys_rmdir(const char *path); } SYS_ADJTIME = 140 // { int sys_adjtime(const struct timeval *delta, struct timeval *olddelta); } SYS_GETLOGIN_R = 141 // { int sys_getlogin_r(char *namebuf, u_int namelen); } SYS_SETSID = 147 // { int sys_setsid(void); } SYS_QUOTACTL = 148 // { int sys_quotactl(const char *path, int cmd, int uid, char *arg); } SYS_NFSSVC = 155 // { int sys_nfssvc(int flag, void *argp); } SYS_GETFH = 161 // { int sys_getfh(const char *fname, fhandle_t *fhp); } SYS_SYSARCH = 165 // { int sys_sysarch(int op, void *parms); } SYS_PREAD = 173 // { ssize_t sys_pread(int fd, void *buf, size_t nbyte, int pad, off_t offset); } SYS_PWRITE = 174 // { ssize_t sys_pwrite(int fd, const void *buf, size_t nbyte, int pad, off_t offset); } SYS_SETGID = 181 // { int sys_setgid(gid_t gid); } SYS_SETEGID = 182 // { int sys_setegid(gid_t egid); } SYS_SETEUID = 183 // { int sys_seteuid(uid_t euid); } SYS_PATHCONF = 191 // { long sys_pathconf(const char *path, int name); } SYS_FPATHCONF = 192 // { long sys_fpathconf(int fd, int name); } SYS_SWAPCTL = 193 // { int sys_swapctl(int cmd, const void *arg, int misc); } SYS_GETRLIMIT = 194 // { int sys_getrlimit(int which, struct rlimit *rlp); } SYS_SETRLIMIT = 195 // { int sys_setrlimit(int which, const struct rlimit *rlp); } SYS_MMAP = 197 // { void *sys_mmap(void *addr, size_t len, int prot, int flags, int fd, long pad, off_t pos); } SYS_LSEEK = 199 // { off_t sys_lseek(int fd, int pad, off_t offset, int whence); } SYS_TRUNCATE = 200 // { int sys_truncate(const char *path, int pad, off_t length); } SYS_FTRUNCATE = 201 // { int sys_ftruncate(int fd, int pad, off_t length); } SYS_SYSCTL = 202 // { int sys_sysctl(const int *name, u_int namelen, void *old, size_t *oldlenp, void *new, size_t newlen); } SYS_MLOCK = 203 // { int sys_mlock(const void *addr, size_t len); } SYS_MUNLOCK = 204 // { int sys_munlock(const void *addr, size_t len); } SYS_GETPGID = 207 // { pid_t sys_getpgid(pid_t pid); } SYS_UTRACE = 209 // { int sys_utrace(const char *label, const void *addr, size_t len); } SYS_SEMGET = 221 // { int sys_semget(key_t key, int nsems, int semflg); } SYS_MSGGET = 225 // { int sys_msgget(key_t key, int msgflg); } SYS_MSGSND = 226 // { int sys_msgsnd(int msqid, const void *msgp, size_t msgsz, int msgflg); } SYS_MSGRCV = 227 // { int sys_msgrcv(int msqid, void *msgp, size_t msgsz, long msgtyp, int msgflg); } SYS_SHMAT = 228 // { void *sys_shmat(int shmid, const void *shmaddr, int shmflg); } SYS_SHMDT = 230 // { int sys_shmdt(const void *shmaddr); } SYS_MINHERIT = 250 // { int sys_minherit(void *addr, size_t len, int inherit); } SYS_POLL = 252 // { int sys_poll(struct pollfd *fds, u_int nfds, int timeout); } SYS_ISSETUGID = 253 // { int sys_issetugid(void); } SYS_LCHOWN = 254 // { int sys_lchown(const char *path, uid_t uid, gid_t gid); } SYS_GETSID = 255 // { pid_t sys_getsid(pid_t pid); } SYS_MSYNC = 256 // { int sys_msync(void *addr, size_t len, int flags); } SYS_PIPE = 263 // { int sys_pipe(int *fdp); } SYS_FHOPEN = 264 // { int sys_fhopen(const fhandle_t *fhp, int flags); } SYS_PREADV = 267 // { ssize_t sys_preadv(int fd, const struct iovec *iovp, int iovcnt, int pad, off_t offset); } SYS_PWRITEV = 268 // { ssize_t sys_pwritev(int fd, const struct iovec *iovp, int iovcnt, int pad, off_t offset); } SYS_KQUEUE = 269 // { int sys_kqueue(void); } SYS_MLOCKALL = 271 // { int sys_mlockall(int flags); } SYS_MUNLOCKALL = 272 // { int sys_munlockall(void); } SYS_GETRESUID = 281 // { int sys_getresuid(uid_t *ruid, uid_t *euid, uid_t *suid); } SYS_SETRESUID = 282 // { int sys_setresuid(uid_t ruid, uid_t euid, uid_t suid); } SYS_GETRESGID = 283 // { int sys_getresgid(gid_t *rgid, gid_t *egid, gid_t *sgid); } SYS_SETRESGID = 284 // { int sys_setresgid(gid_t rgid, gid_t egid, gid_t sgid); } SYS_MQUERY = 286 // { void *sys_mquery(void *addr, size_t len, int prot, int flags, int fd, long pad, off_t pos); } SYS_CLOSEFROM = 287 // { int sys_closefrom(int fd); } SYS_SIGALTSTACK = 288 // { int sys_sigaltstack(const struct sigaltstack *nss, struct sigaltstack *oss); } SYS_SHMGET = 289 // { int sys_shmget(key_t key, size_t size, int shmflg); } SYS_SEMOP = 290 // { int sys_semop(int semid, struct sembuf *sops, size_t nsops); } SYS_FHSTAT = 294 // { int sys_fhstat(const fhandle_t *fhp, struct stat *sb); } SYS___SEMCTL = 295 // { int sys___semctl(int semid, int semnum, int cmd, union semun *arg); } SYS_SHMCTL = 296 // { int sys_shmctl(int shmid, int cmd, struct shmid_ds *buf); } SYS_MSGCTL = 297 // { int sys_msgctl(int msqid, int cmd, struct msqid_ds *buf); } SYS_SCHED_YIELD = 298 // { int sys_sched_yield(void); } SYS_GETTHRID = 299 // { pid_t sys_getthrid(void); } SYS___THRWAKEUP = 301 // { int sys___thrwakeup(const volatile void *ident, int n); } SYS___THREXIT = 302 // { void sys___threxit(pid_t *notdead); } SYS___THRSIGDIVERT = 303 // { int sys___thrsigdivert(sigset_t sigmask, siginfo_t *info, const struct timespec *timeout); } SYS___GETCWD = 304 // { int sys___getcwd(char *buf, size_t len); } SYS_ADJFREQ = 305 // { int sys_adjfreq(const int64_t *freq, int64_t *oldfreq); } SYS_SETRTABLE = 310 // { int sys_setrtable(int rtableid); } SYS_GETRTABLE = 311 // { int sys_getrtable(void); } SYS_FACCESSAT = 313 // { int sys_faccessat(int fd, const char *path, int amode, int flag); } SYS_FCHMODAT = 314 // { int sys_fchmodat(int fd, const char *path, mode_t mode, int flag); } SYS_FCHOWNAT = 315 // { int sys_fchownat(int fd, const char *path, uid_t uid, gid_t gid, int flag); } SYS_LINKAT = 317 // { int sys_linkat(int fd1, const char *path1, int fd2, const char *path2, int flag); } SYS_MKDIRAT = 318 // { int sys_mkdirat(int fd, const char *path, mode_t mode); } SYS_MKFIFOAT = 319 // { int sys_mkfifoat(int fd, const char *path, mode_t mode); } SYS_MKNODAT = 320 // { int sys_mknodat(int fd, const char *path, mode_t mode, dev_t dev); } SYS_OPENAT = 321 // { int sys_openat(int fd, const char *path, int flags, ... mode_t mode); } SYS_READLINKAT = 322 // { ssize_t sys_readlinkat(int fd, const char *path, char *buf, size_t count); } SYS_RENAMEAT = 323 // { int sys_renameat(int fromfd, const char *from, int tofd, const char *to); } SYS_SYMLINKAT = 324 // { int sys_symlinkat(const char *path, int fd, const char *link); } SYS_UNLINKAT = 325 // { int sys_unlinkat(int fd, const char *path, int flag); } SYS___SET_TCB = 329 // { void sys___set_tcb(void *tcb); } SYS___GET_TCB = 330 // { void *sys___get_tcb(void); } )
{ "pile_set_name": "Github" }
<!doctype html> <html> <head> <meta charset="utf-8"> <meta name="viewport" content="maximum-scale=1.0,minimum-scale=1.0,user-scalable=0,width=device-width,initial-scale=1.0" /> <title>支付成功</title> <link rel="stylesheet" type="text/css" href="/mobile/css/aui.css" /> <link rel="stylesheet" type="text/css" href="/mobile/css/api.css" /> <!--购买系统样式表--> <link rel="stylesheet" type="text/css" href="/mobile/css/groupon.css" /> <script type="text/javascript" src="/mobile/js/api.js"></script> <script type="text/javascript" src="/mobile/js/base.js"></script> <script type="text/javascript" src="/mobile/js/jquery.min.js"></script> </head> <body> <article class="aui-content"> <div class="pay-status"> <img src="/mobile/images/groupon/pay_ok.png" class="pay-status-img"> <h2 class="aui-text-danger">支付成功</h2> <p class="aui-font-size-12 aui-padded-t-15 aui-margin-t-15">您已成功支付(港币)</p> <h1 style="font-size:34px;">{{$order['buyer_pay_amount']}}</h1> </div> <div class="aui-content-padded"> <div class="aui-btn aui-btn-danger aui-btn-block aui-btn-sm"> <a href="{{$url}}" style="color:#fff">完成</a> </div> </div> </article> <script> setTimeout(function(){ window.location.href="{{$url}}"; }, 2000); </script> </body> </html>
{ "pile_set_name": "Github" }
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.content.browser; import androidx.test.filters.SmallTest; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.Feature; import org.chromium.content_public.browser.test.ContentJUnit4ClassRunner; import org.chromium.content_public.browser.test.util.CriteriaHelper; import org.chromium.content_public.browser.test.util.JavaScriptUtils; import org.chromium.content_shell_apk.ContentShellActivityTestRule; import java.util.concurrent.TimeoutException; /** * Integration tests for the Wake Lock API. */ @RunWith(ContentJUnit4ClassRunner.class) @CommandLineFlags.Add({"enable-experimental-web-platform-features"}) public class WakeLockTest { @Rule public ContentShellActivityTestRule mActivityTestRule = new ContentShellActivityTestRule(); private static final String TEST_PATH = "content/test/data/android/title1.html"; @Before public void setUp() { try { mActivityTestRule.launchContentShellWithUrlSync(TEST_PATH); } catch (Throwable t) { Assert.fail("Couldn't load test page."); } } private void getWakeLock(String type) throws TimeoutException { final String code = "navigator.wakeLock.request('" + type + "');"; JavaScriptUtils.executeJavaScriptAndWaitForResult(mActivityTestRule.getWebContents(), code); } @After public void tearDown() {} @Test @SmallTest @Feature({"WakeLock"}) public void testScreenLock() throws Exception { Assert.assertFalse(mActivityTestRule.getActivity() .getActiveShell() .getContentView() .getKeepScreenOn()); getWakeLock("screen"); CriteriaHelper.pollInstrumentationThread(() -> mActivityTestRule.getActivity() .getActiveShell() .getContentView() .getKeepScreenOn()); } }
{ "pile_set_name": "Github" }
Allocation scores: galera:0 promotion score on rhos6-node1: 100 galera:1 promotion score on rhos6-node2: 100 galera:2 promotion score on rhos6-node3: 100 pcmk__clone_allocate: ceilometer-alarm-evaluator-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: ceilometer-alarm-evaluator:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: ceilometer-alarm-evaluator:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-alarm-evaluator:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: ceilometer-alarm-notifier-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-alarm-notifier-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-alarm-notifier-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: ceilometer-alarm-notifier:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: ceilometer-alarm-notifier:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-alarm-notifier:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: ceilometer-api-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-api-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-api-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-api:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: ceilometer-api:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-api:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-api:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-api:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: ceilometer-api:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-api:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-api:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-api:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: ceilometer-collector-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-collector-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-collector-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-collector:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: ceilometer-collector:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-collector:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-collector:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-collector:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: ceilometer-collector:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-collector:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-collector:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-collector:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: ceilometer-delay-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-delay-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-delay-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-delay:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: ceilometer-delay:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-delay:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-delay:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-delay:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: ceilometer-delay:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-delay:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-delay:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-delay:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: ceilometer-notification-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-notification-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-notification-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-notification:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: ceilometer-notification:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-notification:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-notification:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-notification:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: ceilometer-notification:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: ceilometer-notification:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: ceilometer-notification:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: ceilometer-notification:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: galera-master allocation score on rhos6-node1: 0 pcmk__clone_allocate: galera-master allocation score on rhos6-node2: 0 pcmk__clone_allocate: galera-master allocation score on rhos6-node3: 0 pcmk__clone_allocate: galera:0 allocation score on rhos6-node1: 101 pcmk__clone_allocate: galera:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: galera:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: galera:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: galera:1 allocation score on rhos6-node2: 101 pcmk__clone_allocate: galera:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: galera:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: galera:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: galera:2 allocation score on rhos6-node3: 101 pcmk__clone_allocate: glance-api-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-api-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-api-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-api:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: glance-api:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-api:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-api:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-api:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: glance-api:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-api:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-api:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-api:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: glance-fs-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-fs-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-fs-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-fs:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: glance-fs:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-fs:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-fs:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-fs:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: glance-fs:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-fs:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-fs:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-fs:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: glance-registry-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-registry-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-registry-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-registry:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: glance-registry:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-registry:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-registry:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-registry:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: glance-registry:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: glance-registry:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: glance-registry:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: glance-registry:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: heat-api-cfn-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-cfn-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-cfn-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cfn:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: heat-api-cfn:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-cfn:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cfn:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-cfn:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: heat-api-cfn:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cfn:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-cfn:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-cfn:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: heat-api-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cloudwatch-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-cloudwatch-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-cloudwatch-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cloudwatch:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: heat-api-cloudwatch:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-cloudwatch:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cloudwatch:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-cloudwatch:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: heat-api-cloudwatch:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api-cloudwatch:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api-cloudwatch:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api-cloudwatch:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: heat-api:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: heat-api:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: heat-api:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: heat-api:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: heat-api:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: heat-api:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: horizon-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: horizon-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: horizon-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: horizon:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: horizon:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: horizon:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: horizon:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: horizon:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: horizon:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: horizon:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: horizon:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: horizon:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: keystone-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: keystone-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: keystone-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: keystone:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: keystone:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: keystone:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: keystone:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: keystone:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: keystone:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: keystone:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: keystone:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: keystone:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: lb-haproxy-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: lb-haproxy-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: lb-haproxy-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: lb-haproxy:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: lb-haproxy:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: lb-haproxy:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: lb-haproxy:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: lb-haproxy:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: lb-haproxy:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: lb-haproxy:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: lb-haproxy:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: lb-haproxy:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: memcached-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: memcached-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: memcached-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: memcached:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: memcached:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: memcached:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: memcached:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: memcached:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: memcached:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: memcached:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: memcached:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: memcached:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: mongodb-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: mongodb-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: mongodb-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: mongodb:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: mongodb:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: mongodb:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: mongodb:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: mongodb:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: mongodb:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: mongodb:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: mongodb:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: mongodb:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-dhcp-agent-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-dhcp-agent-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-dhcp-agent-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-dhcp-agent:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-dhcp-agent:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-dhcp-agent:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-dhcp-agent:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-dhcp-agent:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-dhcp-agent:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-dhcp-agent:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-dhcp-agent:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-dhcp-agent:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-l3-agent-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-l3-agent-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-l3-agent-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-l3-agent:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-l3-agent:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-l3-agent:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-l3-agent:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-l3-agent:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-l3-agent:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-l3-agent:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-l3-agent:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-l3-agent:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-metadata-agent-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-metadata-agent-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-metadata-agent-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-metadata-agent:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-metadata-agent:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-metadata-agent:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-metadata-agent:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-metadata-agent:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-metadata-agent:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-metadata-agent:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-metadata-agent:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-metadata-agent:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-netns-cleanup-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-netns-cleanup-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-netns-cleanup-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-netns-cleanup:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-netns-cleanup:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-netns-cleanup:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-netns-cleanup:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-netns-cleanup:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-netns-cleanup:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-netns-cleanup:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-netns-cleanup:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-netns-cleanup:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-openvswitch-agent-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-openvswitch-agent-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-openvswitch-agent-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-openvswitch-agent:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-openvswitch-agent:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-openvswitch-agent:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-openvswitch-agent:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-openvswitch-agent:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-openvswitch-agent:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-openvswitch-agent:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-openvswitch-agent:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-openvswitch-agent:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-ovs-cleanup-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-ovs-cleanup-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-ovs-cleanup-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-ovs-cleanup:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-ovs-cleanup:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-ovs-cleanup:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-ovs-cleanup:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-ovs-cleanup:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-ovs-cleanup:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-ovs-cleanup:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-ovs-cleanup:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-ovs-cleanup:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-scale-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-scale-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-scale-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-scale:0 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-scale:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-scale:0 allocation score on rhos6-node3: 1 pcmk__clone_allocate: neutron-scale:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-scale:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-scale:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-scale:2 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-scale:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-scale:2 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-server-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-server-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-server-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-server:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: neutron-server:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-server:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-server:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-server:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: neutron-server:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: neutron-server:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: neutron-server:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: neutron-server:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: nova-api-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-api-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-api-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-api:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: nova-api:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-api:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-api:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-api:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: nova-api:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-api:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-api:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-api:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: nova-conductor-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-conductor-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-conductor-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-conductor:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: nova-conductor:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-conductor:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-conductor:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-conductor:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: nova-conductor:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-conductor:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-conductor:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-conductor:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: nova-consoleauth-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-consoleauth-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-consoleauth-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-consoleauth:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: nova-consoleauth:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-consoleauth:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-consoleauth:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-consoleauth:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: nova-consoleauth:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-consoleauth:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-consoleauth:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-consoleauth:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: nova-novncproxy-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-novncproxy-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-novncproxy-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-novncproxy:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: nova-novncproxy:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-novncproxy:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-novncproxy:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-novncproxy:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: nova-novncproxy:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-novncproxy:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-novncproxy:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-novncproxy:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: nova-scheduler-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-scheduler-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-scheduler-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-scheduler:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: nova-scheduler:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-scheduler:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-scheduler:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-scheduler:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: nova-scheduler:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: nova-scheduler:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: nova-scheduler:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: nova-scheduler:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: rabbitmq-server-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: rabbitmq-server-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: rabbitmq-server-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: rabbitmq-server:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: rabbitmq-server:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: rabbitmq-server:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: rabbitmq-server:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: rabbitmq-server:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: rabbitmq-server:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: rabbitmq-server:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: rabbitmq-server:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: rabbitmq-server:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: swift-account-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-account-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-account-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-account:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: swift-account:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-account:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-account:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-account:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: swift-account:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-account:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-account:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-account:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: swift-container-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-container-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-container-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-container:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: swift-container:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-container:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-container:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-container:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: swift-container:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-container:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-container:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-container:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: swift-fs-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-fs-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-fs-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-fs:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: swift-fs:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-fs:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-fs:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-fs:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: swift-fs:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-fs:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-fs:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-fs:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: swift-object-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-object-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-object-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-object:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: swift-object:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-object:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-object:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-object:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: swift-object:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-object:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-object:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-object:2 allocation score on rhos6-node3: 1 pcmk__clone_allocate: swift-proxy-clone allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-proxy-clone allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-proxy-clone allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-proxy:0 allocation score on rhos6-node1: 1 pcmk__clone_allocate: swift-proxy:0 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-proxy:0 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-proxy:1 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-proxy:1 allocation score on rhos6-node2: 1 pcmk__clone_allocate: swift-proxy:1 allocation score on rhos6-node3: 0 pcmk__clone_allocate: swift-proxy:2 allocation score on rhos6-node1: 0 pcmk__clone_allocate: swift-proxy:2 allocation score on rhos6-node2: 0 pcmk__clone_allocate: swift-proxy:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: ceilometer-alarm-evaluator:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: ceilometer-alarm-evaluator:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-alarm-evaluator:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-alarm-evaluator:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-alarm-evaluator:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: ceilometer-alarm-evaluator:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-alarm-evaluator:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-alarm-evaluator:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-alarm-evaluator:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: ceilometer-alarm-notifier:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: ceilometer-alarm-notifier:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-alarm-notifier:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-alarm-notifier:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-alarm-notifier:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: ceilometer-alarm-notifier:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-alarm-notifier:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-alarm-notifier:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-alarm-notifier:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: ceilometer-api:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: ceilometer-api:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-api:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-api:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-api:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: ceilometer-api:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-api:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-api:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-api:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: ceilometer-central allocation score on rhos6-node1: 0 pcmk__native_allocate: ceilometer-central allocation score on rhos6-node2: 0 pcmk__native_allocate: ceilometer-central allocation score on rhos6-node3: 0 pcmk__native_allocate: ceilometer-collector:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: ceilometer-collector:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: ceilometer-collector:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: ceilometer-collector:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-collector:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: ceilometer-collector:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: ceilometer-collector:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-collector:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-collector:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: ceilometer-delay:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: ceilometer-delay:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-delay:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-delay:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-delay:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: ceilometer-delay:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-delay:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-delay:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-delay:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: ceilometer-notification:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: ceilometer-notification:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-notification:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-notification:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-notification:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: ceilometer-notification:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: ceilometer-notification:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: ceilometer-notification:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: ceilometer-notification:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: cinder-api allocation score on rhos6-node1: 0 pcmk__native_allocate: cinder-api allocation score on rhos6-node2: 0 pcmk__native_allocate: cinder-api allocation score on rhos6-node3: 0 pcmk__native_allocate: cinder-scheduler allocation score on rhos6-node1: 0 pcmk__native_allocate: cinder-scheduler allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: cinder-scheduler allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: cinder-volume allocation score on rhos6-node1: 0 pcmk__native_allocate: cinder-volume allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: cinder-volume allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: galera:0 allocation score on rhos6-node1: 101 pcmk__native_allocate: galera:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: galera:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: galera:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: galera:1 allocation score on rhos6-node2: 101 pcmk__native_allocate: galera:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: galera:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: galera:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: galera:2 allocation score on rhos6-node3: 101 pcmk__native_allocate: glance-api:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: glance-api:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: glance-api:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: glance-api:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: glance-api:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: glance-api:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: glance-api:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: glance-api:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: glance-api:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: glance-fs:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: glance-fs:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: glance-fs:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: glance-fs:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: glance-fs:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: glance-fs:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: glance-fs:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: glance-fs:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: glance-fs:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: glance-registry:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: glance-registry:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: glance-registry:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: glance-registry:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: glance-registry:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: glance-registry:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: glance-registry:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: glance-registry:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: glance-registry:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: heat-api-cfn:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: heat-api-cfn:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: heat-api-cfn:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: heat-api-cfn:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: heat-api-cfn:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: heat-api-cfn:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: heat-api-cfn:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: heat-api-cfn:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: heat-api-cfn:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: heat-api-cloudwatch:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: heat-api-cloudwatch:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: heat-api-cloudwatch:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: heat-api-cloudwatch:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: heat-api-cloudwatch:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: heat-api-cloudwatch:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: heat-api-cloudwatch:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: heat-api-cloudwatch:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: heat-api-cloudwatch:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: heat-api:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: heat-api:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: heat-api:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: heat-api:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: heat-api:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: heat-api:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: heat-api:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: heat-api:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: heat-api:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: heat-engine allocation score on rhos6-node1: 0 pcmk__native_allocate: heat-engine allocation score on rhos6-node2: 0 pcmk__native_allocate: heat-engine allocation score on rhos6-node3: 0 pcmk__native_allocate: horizon:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: horizon:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: horizon:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: horizon:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: horizon:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: horizon:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: horizon:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: horizon:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: horizon:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: keystone:0 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: keystone:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: keystone:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: keystone:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: keystone:1 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: keystone:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: keystone:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: keystone:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: keystone:2 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: lb-haproxy:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: lb-haproxy:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: lb-haproxy:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: lb-haproxy:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: lb-haproxy:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: lb-haproxy:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: lb-haproxy:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: lb-haproxy:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: lb-haproxy:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: memcached:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: memcached:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: memcached:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: memcached:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: memcached:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: memcached:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: memcached:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: memcached:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: memcached:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: mongodb:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: mongodb:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: mongodb:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: mongodb:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: mongodb:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: mongodb:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: mongodb:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: mongodb:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: mongodb:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-dhcp-agent:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-dhcp-agent:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-dhcp-agent:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-dhcp-agent:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-dhcp-agent:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-dhcp-agent:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-dhcp-agent:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-dhcp-agent:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-dhcp-agent:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-l3-agent:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-l3-agent:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-l3-agent:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-l3-agent:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-l3-agent:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-l3-agent:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-l3-agent:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-l3-agent:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-l3-agent:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-metadata-agent:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-metadata-agent:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-metadata-agent:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-metadata-agent:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-metadata-agent:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-metadata-agent:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-metadata-agent:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-metadata-agent:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-metadata-agent:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-netns-cleanup:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-netns-cleanup:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-netns-cleanup:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-netns-cleanup:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-netns-cleanup:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-netns-cleanup:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-netns-cleanup:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-netns-cleanup:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-netns-cleanup:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-openvswitch-agent:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-openvswitch-agent:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-openvswitch-agent:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-openvswitch-agent:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-openvswitch-agent:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-openvswitch-agent:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-openvswitch-agent:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-openvswitch-agent:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-openvswitch-agent:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-ovs-cleanup:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-ovs-cleanup:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-ovs-cleanup:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-ovs-cleanup:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-ovs-cleanup:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-ovs-cleanup:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-ovs-cleanup:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-ovs-cleanup:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-ovs-cleanup:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-scale:0 allocation score on rhos6-node1: 0 pcmk__native_allocate: neutron-scale:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: neutron-scale:0 allocation score on rhos6-node3: 1 pcmk__native_allocate: neutron-scale:1 allocation score on rhos6-node1: 0 pcmk__native_allocate: neutron-scale:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-scale:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-scale:2 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-scale:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-scale:2 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: neutron-server:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: neutron-server:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: neutron-server:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: neutron-server:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-server:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: neutron-server:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: neutron-server:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: neutron-server:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: neutron-server:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: node1-fence allocation score on rhos6-node1: 0 pcmk__native_allocate: node1-fence allocation score on rhos6-node2: 0 pcmk__native_allocate: node1-fence allocation score on rhos6-node3: 0 pcmk__native_allocate: node2-fence allocation score on rhos6-node1: 0 pcmk__native_allocate: node2-fence allocation score on rhos6-node2: 0 pcmk__native_allocate: node2-fence allocation score on rhos6-node3: 0 pcmk__native_allocate: node3-fence allocation score on rhos6-node1: 0 pcmk__native_allocate: node3-fence allocation score on rhos6-node2: 0 pcmk__native_allocate: node3-fence allocation score on rhos6-node3: 0 pcmk__native_allocate: nova-api:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: nova-api:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-api:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-api:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-api:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: nova-api:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-api:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-api:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-api:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: nova-conductor:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: nova-conductor:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-conductor:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-conductor:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-conductor:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: nova-conductor:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-conductor:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-conductor:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-conductor:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: nova-consoleauth:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: nova-consoleauth:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: nova-consoleauth:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: nova-consoleauth:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-consoleauth:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: nova-consoleauth:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: nova-consoleauth:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-consoleauth:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-consoleauth:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: nova-novncproxy:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: nova-novncproxy:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-novncproxy:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-novncproxy:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-novncproxy:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: nova-novncproxy:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-novncproxy:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-novncproxy:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-novncproxy:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: nova-scheduler:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: nova-scheduler:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-scheduler:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-scheduler:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-scheduler:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: nova-scheduler:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: nova-scheduler:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: nova-scheduler:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: nova-scheduler:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: rabbitmq-server:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: rabbitmq-server:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: rabbitmq-server:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: rabbitmq-server:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: rabbitmq-server:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: rabbitmq-server:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: rabbitmq-server:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: rabbitmq-server:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: rabbitmq-server:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: swift-account:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: swift-account:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-account:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: swift-account:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-account:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: swift-account:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: swift-account:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-account:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-account:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: swift-container:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: swift-container:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-container:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: swift-container:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-container:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: swift-container:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: swift-container:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-container:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-container:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: swift-fs:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: swift-fs:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: swift-fs:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: swift-fs:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-fs:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: swift-fs:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: swift-fs:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-fs:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-fs:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: swift-object-expirer allocation score on rhos6-node1: 0 pcmk__native_allocate: swift-object-expirer allocation score on rhos6-node2: 0 pcmk__native_allocate: swift-object-expirer allocation score on rhos6-node3: 0 pcmk__native_allocate: swift-object:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: swift-object:0 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-object:0 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: swift-object:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-object:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: swift-object:1 allocation score on rhos6-node3: -INFINITY pcmk__native_allocate: swift-object:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-object:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-object:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: swift-proxy:0 allocation score on rhos6-node1: 1 pcmk__native_allocate: swift-proxy:0 allocation score on rhos6-node2: 0 pcmk__native_allocate: swift-proxy:0 allocation score on rhos6-node3: 0 pcmk__native_allocate: swift-proxy:1 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-proxy:1 allocation score on rhos6-node2: 1 pcmk__native_allocate: swift-proxy:1 allocation score on rhos6-node3: 0 pcmk__native_allocate: swift-proxy:2 allocation score on rhos6-node1: -INFINITY pcmk__native_allocate: swift-proxy:2 allocation score on rhos6-node2: -INFINITY pcmk__native_allocate: swift-proxy:2 allocation score on rhos6-node3: 1 pcmk__native_allocate: vip-ceilometer allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-ceilometer allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-ceilometer allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-cinder allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-cinder allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-cinder allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-db allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-db allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-db allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-glance allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-glance allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-glance allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-heat allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-heat allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-heat allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-horizon allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-horizon allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-horizon allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-keystone allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-keystone allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-keystone allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-neutron allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-neutron allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-neutron allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-nova allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-nova allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-nova allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-qpid allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-qpid allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-qpid allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-rabbitmq allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-rabbitmq allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-rabbitmq allocation score on rhos6-node3: 0 pcmk__native_allocate: vip-swift allocation score on rhos6-node1: 0 pcmk__native_allocate: vip-swift allocation score on rhos6-node2: 0 pcmk__native_allocate: vip-swift allocation score on rhos6-node3: 0
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <!-- /** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ --> <config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:ObjectManager/etc/config.xsd"> <preference for="Magento\Sitemap\Model\SitemapItemInterface" type="Magento\Sitemap\Model\SitemapItem" /> <preference for="Magento\Sitemap\Model\ItemProvider\ItemProviderInterface" type="Magento\Sitemap\Model\ItemProvider\Composite" /> <preference for="Magento\Sitemap\Model\SitemapConfigReaderInterface" type="Magento\Sitemap\Model\SitemapConfigReader" /> <type name="Magento\Sitemap\Model\Sitemap"> <arguments> <argument name="resource" xsi:type="object">Magento\Sitemap\Model\ResourceModel\Sitemap</argument> </arguments> </type> <type name="Magento\Config\Model\Config\TypePool"> <arguments> <argument name="sensitive" xsi:type="array"> <item name="sitemap/generate/error_email" xsi:type="string">1</item> </argument> </arguments> </type> <type name="Magento\Sitemap\Model\ItemProvider\Composite"> <arguments> <argument name="itemProviders" xsi:type="array"> <item name="storeUrlProvider" xsi:type="object">Magento\Sitemap\Model\ItemProvider\StoreUrl</item> <item name="categoryProvider" xsi:type="object">Magento\Sitemap\Model\ItemProvider\Category</item> <item name="cmsPageProvider" xsi:type="object">Magento\Sitemap\Model\ItemProvider\CmsPage</item> <item name="productProvider" xsi:type="object">Magento\Sitemap\Model\ItemProvider\Product</item> </argument> </arguments> </type> <type name="Magento\Sitemap\Model\ItemProvider\StoreUrl"> <arguments> <argument name="configReader" xsi:type="object">Magento\Sitemap\Model\ItemProvider\StoreUrlConfigReader</argument> </arguments> </type> <type name="Magento\Sitemap\Model\ItemProvider\Category"> <arguments> <argument name="configReader" xsi:type="object">Magento\Sitemap\Model\ItemProvider\CategoryConfigReader</argument> </arguments> </type> <type name="Magento\Sitemap\Model\ItemProvider\Product"> <arguments> <argument name="configReader" xsi:type="object">Magento\Sitemap\Model\ItemProvider\ProductConfigReader</argument> </arguments> </type> <type name="Magento\Sitemap\Model\ItemProvider\CmsPage"> <arguments> <argument name="configReader" xsi:type="object">Magento\Sitemap\Model\ItemProvider\CmsPageConfigReader</argument> </arguments> </type> </config>
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id$ * * @author Andy Heninger, IBM */ #if HAVE_CONFIG_H # include <config.h> #endif #include <stdlib.h> #include <stdio.h> #include <string.h> #include <ctype.h> #include <xercesc/parsers/SAXParser.hpp> #include <xercesc/parsers/XercesDOMParser.hpp> #include <xercesc/util/PlatformUtils.hpp> #include <xercesc/sax/HandlerBase.hpp> #include <xercesc/framework/MemBufInputSource.hpp> #include <xercesc/sax2/SAX2XMLReader.hpp> #include <xercesc/sax2/XMLReaderFactory.hpp> #include <xercesc/sax2/Attributes.hpp> #include <xercesc/sax2/DefaultHandler.hpp> #include <xercesc/dom/DOM.hpp> #include <xercesc/framework/StdOutFormatTarget.hpp> #include <xercesc/framework/XMLGrammarPoolImpl.hpp> #include <xercesc/internal/MemoryManagerImpl.hpp> #include <xercesc/util/OutOfMemoryException.hpp> void clearFileInfoMemory(); #ifdef HAVE_PTHREAD #include <pthread.h> #include <unistd.h> #include <errno.h> //------------------------------------------------------------------------------ // // UNIX specific code for starting threads // //------------------------------------------------------------------------------ extern "C" { typedef void (*ThreadFunc)(void *); typedef void *(*pthreadfunc)(void *); class ThreadFuncs // This class isolates OS dependent threading { // functions from the rest of ThreadTest program. public: static void Sleep(int millis); static void startThread(ThreadFunc, void *param); }; void ThreadFuncs::Sleep(int millis) { int seconds = millis/1000; if (seconds <= 0) seconds = 1; #if defined(SOLARIS) // somehow the sleep hangs on Solaris // so ignore the call #else ::sleep(seconds); #endif } void ThreadFuncs::startThread(ThreadFunc func, void *param) { int x; pthread_t tId; //thread_t tId; #if defined(_HP_UX) && defined(XML_USE_DCE) x = pthread_create( &tId, pthread_attr_default, (pthreadfunc)func, param); #else pthread_attr_t attr; pthread_attr_init(&attr); x = pthread_create( &tId, &attr, (pthreadfunc)func, param); #endif if (x == -1) { fprintf(stderr, "Error starting thread. Errno = %d\n", errno); clearFileInfoMemory(); exit(-1); } } } // end of extern "C" #else //------------------------------------------------------------------------------ // // Windows specific code for starting threads // //------------------------------------------------------------------------------ #include <Windows.h> #include <process.h> typedef DWORD (WINAPI *ThreadFunc)(void *); class ThreadFuncs // This class isolates OS dependent threading { // functions from the rest of ThreadTest program. public: static void Sleep(int millis) {::Sleep(millis);}; static void startThread(ThreadFunc, void *param); }; void ThreadFuncs::startThread(ThreadFunc func, void *param) { HANDLE tHandle; DWORD threadID; tHandle = CreateThread(0, // Security Attributes, 0x10000, // Stack Size, func, // Starting Address. param, // Parmeters 0, // Creation Flags, &threadID); // Thread ID (Can not be null on 95/98) if (tHandle == 0) { fprintf(stderr, "Error starting thread. Errno = %d\n", errno); clearFileInfoMemory(); exit(-1); } // Set the priority of the working threads low, so that the UI of the running system will // remain responsive. SetThreadPriority(tHandle, THREAD_PRIORITY_IDLE); } #endif //------------------------------------------------------------------------------ // // struct InFileInfo One of these structs will be set up for each file listed // on the command line. Once set, the data is unchanging // and can safely be referenced by the test threads without // use of synchronization. // //------------------------------------------------------------------------------ struct InFileInfo { char *fileName; XMLCh *uFileName; // When doing an in-memory parse, avoid transcoding file name // each time through. char *fileContent; // If doing an in-memory parse, this field points // to an allocated string containing the entire file // contents. Otherwise it's 0. size_t fileSize; // The file length. Only initialized when doing // an in-memory test. int checkSum; // The XML checksum. Set up by the main thread for // each file before the worker threads are started. }; //------------------------------------------------------------------------------ // // struct threadInfo Holds information specific to an individual thread. // One of these is set up for each thread in the test. // The main program monitors the threads by looking // at the status stored in these structs. // //------------------------------------------------------------------------------ struct ThreadInfo { bool fHeartBeat; // Set true by the thread each time it finishes // parsing a file. bool fInProgress; // Set to false by the thread when parse in progress unsigned int fParses; // Number of parses completed. int fThreadNum; // Identifying number for this thread. ThreadInfo() { fHeartBeat = false; fInProgress = false; fParses = 0; fThreadNum = -1; } }; XERCES_CPP_NAMESPACE_USE //------------------------------------------------------------------------------ // // struct runInfo Holds the info extracted from the command line. // There is only one of these, and it is static, and // unchanging once the command line has been parsed. // During the test, the threads will access this info without // any synchronization. // //------------------------------------------------------------------------------ const int MAXINFILES = 25; struct RunInfo { bool doGrammarCaching; bool quiet; bool verbose; bool stopNow; bool dom; bool sax; bool reuseParser; bool inMemory; bool dumpOnErr; bool doSchema; bool schemaFullChecking; bool doNamespaces; bool doInitialParse; bool doNamespacePrefixes; // SAX2 SAXParser::ValSchemes valScheme; int numThreads; int totalTime; int numInputFiles; unsigned int numParses; InFileInfo files[MAXINFILES]; }; // //------------------------------------------------------------------------------ // // Global Data // //------------------------------------------------------------------------------ RunInfo gRunInfo; ThreadInfo *gThreadInfo; /** Grammar caching thread testing */ MemoryManager* gpMemMgr = 0; XMLGrammarPool* gp = 0; #ifdef HELPER_ROUTINES // Routines which maybe helpful for debugging static void printString(const XMLCh *str) { char *s = XMLString::transcode(str); printf("%s", s); delete s; } #define CHARS_PER_LINE 40 #define BYTES_PER_LINE 16 /* * DumpLine: Dump out a buffer (address and length) to stderr. */ static void DumpLine(char* address, int length) { int i, c, charCount=0; if (length % 4) length += 4; fprintf(stderr, "%8.8p: ", address); for (i=0; i < length/4; ++i) { fprintf(stderr, "%8.8X ", ((int*)address)[i]); charCount += 9; } for (i=charCount; i < CHARS_PER_LINE; ++i) { putc(' ', stderr); } fprintf(stderr, "| "); for (i=0; i < length; ++i) { c = address[i]; c = (isprint(c) ? c : '.'); fprintf(stderr, "%c", c); } fprintf(stderr, "\n"); } /* * dump: dump out a buffer (address and length) to stderr by dumping out * a line at a time (DumpLine), until the buffer is written out. */ static void dump(void* generalAddress, int length) { int curr = 0; char* address = (char*) generalAddress; while (&address[curr] < &address[length-BYTES_PER_LINE]) { DumpLine(&address[curr], BYTES_PER_LINE); curr += BYTES_PER_LINE; } if (curr < length) { DumpLine(&address[curr], length-curr); } fflush(stderr); } #endif //------------------------------------------------------------------------------ // // class ThreadParser Bundles together a SAX parser and the SAX handlers // and contains the API that the rest of this test // program uses for creating parsers and doing parsing. // // Multiple instances of this class can operate concurrently // in different threads. // //------------------------------------------------------------------------------- class ThreadParser { public: class SAXHandler; class SAX2Handler; SAXHandler* fSAXHandler; SAX2Handler* fSAX2Handler; ErrorHandler* fDOMErrorHandler; // This is the API used by the rest of the test program ThreadParser(); ~ThreadParser(); int parse(int fileNum); // Parse the specified file. fileNum is an index // into the gRunInfo.files array. // return the XML checksum, or // 0 if a parse error occurred. int getCheckSum() { return fCheckSum; } int reCheck(); // Try to compute the checksum again. // for DOM, re-walk the tree. // for SAX, can't do, just return previous value. void domPrint(); // including any children. Default (no param) // version dumps the entire document. void addToCheckSum(const XMLCh *chars, XMLSize_t len=(XMLSize_t)-1); // These are the SAX call-back functions that this class implements. Can be used // for SAX and SAX2. void characters(const XMLCh* const chars, const XMLSize_t length) { addToCheckSum(chars, length); } void ignorableWhitespace(const XMLCh* const chars, const XMLSize_t length) { addToCheckSum(chars, length); } void resetDocument() { } void warning(const SAXParseException& exc) { fprintf(stderr, "*** Warning "); fflush(stderr); throw exc; } void error(const SAXParseException& exc) { fprintf(stderr, "*** Error "); fflush(stderr); throw exc; } void fatalError(const SAXParseException& exc) { fprintf(stderr, "***** Fatal error "); fflush(stderr); throw exc; } // Create a nested class that can inherit from HandlerBase // for SAX startElement callbacks. class SAXHandler : public HandlerBase { public: ThreadParser* SAXInstance; void startElement(const XMLCh* const name, AttributeList& attributes); }; // Create a nested class that can inherit from DefaultHandler // for SAX2 startElement callbacks. class SAX2Handler : public DefaultHandler { public: ThreadParser* SAX2Instance; void startElement(const XMLCh* const uri, const XMLCh* const localname, const XMLCh* const qname, const Attributes& attributes); }; private: int fCheckSum; SAXParser* fSAXParser; SAX2XMLReader* fSAX2Parser; XercesDOMParser* fXercesDOMParser; XERCES_CPP_NAMESPACE_QUALIFIER DOMDocument * fDoc; ThreadParser(const ThreadParser &); // No copy constructor const ThreadParser & operator =(const ThreadParser &); // No assignment. void domCheckSum(const DOMNode *); }; // // ThreadParser constructor. Invoked by the threads of the test program // to create parsers. // ThreadParser::ThreadParser() { fSAXParser = 0; fSAX2Parser = 0; fXercesDOMParser = 0; fSAXHandler = 0; fSAX2Handler = 0; fDOMErrorHandler = 0; fDoc = 0; fCheckSum = 0; if (gRunInfo.dom) { // Set up to use a DOM parser /** Grammar caching thread testing */ if (gp) { fXercesDOMParser = new XercesDOMParser(0, XMLPlatformUtils::fgMemoryManager, gp); fXercesDOMParser->cacheGrammarFromParse(true); fXercesDOMParser->useCachedGrammarInParse(true); } else { fXercesDOMParser = new XercesDOMParser; } switch (gRunInfo.valScheme) { case SAXParser::Val_Never: fXercesDOMParser->setValidationScheme(XercesDOMParser::Val_Never); break; case SAXParser::Val_Auto: fXercesDOMParser->setValidationScheme(XercesDOMParser::Val_Auto); break; default: //SAXParser::Val_Always: fXercesDOMParser->setValidationScheme(XercesDOMParser::Val_Always); break; } fXercesDOMParser->setDoSchema(gRunInfo.doSchema); fXercesDOMParser->setHandleMultipleImports (true); fXercesDOMParser->setValidationSchemaFullChecking(gRunInfo.schemaFullChecking); fXercesDOMParser->setDoNamespaces(gRunInfo.doNamespaces); fDOMErrorHandler = (ErrorHandler*) new HandlerBase(); fXercesDOMParser->setErrorHandler(fDOMErrorHandler); } else if (gRunInfo.sax) { // Set up to use a SAX1 parser. /** Grammar caching thread testing */ if (gp) { fSAXParser = new SAXParser(0, XMLPlatformUtils::fgMemoryManager, gp); fSAXParser->cacheGrammarFromParse(true); fSAXParser->useCachedGrammarInParse(true); } else { fSAXParser = new SAXParser(); } fSAXParser->setValidationScheme(gRunInfo.valScheme); fSAXParser->setDoSchema(gRunInfo.doSchema); fSAXParser->setHandleMultipleImports (true); fSAXParser->setValidationSchemaFullChecking(gRunInfo.schemaFullChecking); fSAXParser->setDoNamespaces(gRunInfo.doNamespaces); fSAXHandler = new ThreadParser::SAXHandler(); fSAXHandler->SAXInstance = this; fSAXParser->setDocumentHandler(fSAXHandler); fSAXParser->setErrorHandler(fSAXHandler); } else { // Set up to use a SAX2 parser. /** Grammar caching thread testing */ if (gp) { fSAX2Parser = XMLReaderFactory::createXMLReader(gpMemMgr, gp); fSAX2Parser->setFeature(XMLUni::fgXercesCacheGrammarFromParse,true); fSAX2Parser->setFeature(XMLUni::fgXercesUseCachedGrammarInParse,true); } else { fSAX2Parser = XMLReaderFactory::createXMLReader(); } fSAX2Parser->setFeature(XMLUni::fgSAX2CoreNameSpaces,(gRunInfo.doNamespaces)); fSAX2Parser->setFeature(XMLUni::fgXercesSchema,(gRunInfo.doSchema)); fSAX2Parser->setFeature(XMLUni::fgXercesHandleMultipleImports, true); fSAX2Parser->setFeature(XMLUni::fgXercesSchemaFullChecking,(gRunInfo.schemaFullChecking)); switch (gRunInfo.valScheme) { case SAXParser::Val_Never: fSAX2Parser->setFeature(XMLUni::fgSAX2CoreValidation, false); break; case SAXParser::Val_Auto: fSAX2Parser->setFeature(XMLUni::fgSAX2CoreValidation, true); fSAX2Parser->setFeature(XMLUni::fgXercesDynamic, true); break; default: //SAXParser::Val_Always: fSAX2Parser->setFeature(XMLUni::fgSAX2CoreValidation, true); fSAX2Parser->setFeature(XMLUni::fgXercesDynamic, false); break; } fSAX2Parser->setFeature(XMLUni::fgSAX2CoreNameSpacePrefixes,(gRunInfo.doNamespacePrefixes)); fSAX2Handler = new ThreadParser::SAX2Handler(); fSAX2Handler->SAX2Instance = this; fSAX2Parser->setContentHandler(fSAX2Handler); fSAX2Parser->setErrorHandler(fSAX2Handler); } } ThreadParser::~ThreadParser() { delete fSAXParser; delete fSAX2Parser; delete fXercesDOMParser; delete fSAXHandler; delete fSAX2Handler; delete fDOMErrorHandler; } //------------------------------------------------------------------------ // // parse - This is the method that is invoked by the rest of // the test program to actually parse an XML file. // //------------------------------------------------------------------------ int ThreadParser::parse(int fileNum) { MemBufInputSource *mbis = 0; InFileInfo *fInfo = &gRunInfo.files[fileNum]; bool errors = false; fCheckSum = 0; if (gRunInfo.inMemory) { mbis = new MemBufInputSource((const XMLByte *) fInfo->fileContent, fInfo->fileSize, fInfo->uFileName, false); } try { if (gRunInfo.dom) { // Do a DOM parse fXercesDOMParser->resetDocumentPool(); if (gRunInfo.inMemory) fXercesDOMParser->parse(*mbis); else fXercesDOMParser->parse(fInfo->fileName); fDoc = fXercesDOMParser->getDocument(); domCheckSum(fDoc); } else if (gRunInfo.sax) { // Do a SAX1 parse if (gRunInfo.inMemory) fSAXParser->parse(*mbis); else fSAXParser->parse(fInfo->fileName); } else { // Do a SAX2 parse if (gRunInfo.inMemory) fSAX2Parser->parse(*mbis); else fSAX2Parser->parse(fInfo->fileName); } } catch (const OutOfMemoryException&) { fprintf(stderr, " during parsing: %s\n OutOfMemoryException.\n", fInfo->fileName); errors = true; } catch (const XMLException& e) { char *exceptionMessage = XMLString::transcode(e.getMessage()); fprintf(stderr, " during parsing: %s\n Exception message is: %s\n", fInfo->fileName, exceptionMessage); XMLString::release(&exceptionMessage); errors = true; } catch (const DOMException& toCatch) { fprintf(stderr, " during parsing: %s\n DOMException code is: %i\n", fInfo->fileName, toCatch.code); errors = true; } catch (const SAXParseException& e) { char *exceptionMessage = XMLString::transcode(e.getMessage()); fprintf(stderr, " during parsing: %s\n Exception message is: %s\n", fInfo->fileName, exceptionMessage); XMLString::release(&exceptionMessage); errors = true; } catch (...) { fprintf(stderr, "Unexpected exception during parsing\n"); errors = true; } delete mbis; if (errors) { fflush(stderr); return 0; // if errors occurred, return zero as if checksum = 0; } return fCheckSum; } // // addToCheckSum - private function, used within ThreadParser in // computing the checksum of the XML file. // // Unichar Strings to be added to the checksum // can either be null terminated (omit len param, which // will then default to -1), or provide an explicit // length. // void ThreadParser::addToCheckSum(const XMLCh *chars, XMLSize_t len) { if (len == (XMLSize_t)-1) { // Null terminated string. while (*chars != 0) { fCheckSum = fCheckSum*5 + *chars; chars++; } } else { // String with character count. XMLSize_t i; for (i=0; i<len; i++) fCheckSum = fCheckSum*5 + chars[i]; } } // // startElement - our SAX handler callback function for startElement. // Update the document checksum with the element name // and any attribute names and values. // void ThreadParser::SAXHandler::startElement(const XMLCh *const name, AttributeList &attributes) { SAXInstance->addToCheckSum(name); XMLSize_t n = attributes.getLength(); XMLSize_t i; for (i=0; i<n; i++) { const XMLCh *attNam = attributes.getName(i); SAXInstance->addToCheckSum(attNam); const XMLCh *attVal = attributes.getValue(i); SAXInstance->addToCheckSum(attVal); } } // // startElement - our SAX2 handler callback function for startElement. // Update the document checksum with the element name // and any attribute names and values. // void ThreadParser::SAX2Handler::startElement(const XMLCh *const /*uri*/, const XMLCh *const localname, const XMLCh *const /*qname*/, const Attributes& attributes) { SAX2Instance->addToCheckSum(localname); XMLSize_t n = attributes.getLength(); XMLSize_t i; for (i=0; i<n; i++) { const XMLCh *attNam = attributes.getQName(i); SAX2Instance->addToCheckSum(attNam); const XMLCh *attVal = attributes.getValue(i); SAX2Instance->addToCheckSum(attVal); } } // // domCheckSum - Compute the check sum for a DOM node. // Works recursively - initially called with a document node. // void ThreadParser::domCheckSum(const DOMNode *node) { const XMLCh *s; DOMNode *child; DOMNamedNodeMap *attributes; switch (node->getNodeType() ) { case DOMNode::ELEMENT_NODE: { s = node->getNodeName(); // the element name attributes = node->getAttributes(); // Element's attributes XMLSize_t numAttributes = attributes->getLength(); XMLSize_t i; for (i=0; i<numAttributes; i++) domCheckSum(attributes->item(i)); addToCheckSum(s); // Content and Children for (child=node->getFirstChild(); child!=0; child=child->getNextSibling()) domCheckSum(child); break; } case DOMNode::ATTRIBUTE_NODE: { s = node->getNodeName(); // The attribute name addToCheckSum(s); s = node->getNodeValue(); // The attribute value if (s != 0) addToCheckSum(s); break; } case DOMNode::TEXT_NODE: case DOMNode::CDATA_SECTION_NODE: { s = node->getNodeValue(); addToCheckSum(s); break; } case DOMNode::ENTITY_REFERENCE_NODE: case DOMNode::DOCUMENT_NODE: { // For entity references and the document, nothing is dirctly // added to the checksum, but we do want to process the chidren nodes. // for (child=node->getFirstChild(); child!=0; child=child->getNextSibling()) domCheckSum(child); break; } case DOMNode::ENTITY_NODE: case DOMNode::PROCESSING_INSTRUCTION_NODE: case DOMNode::COMMENT_NODE: case DOMNode::DOCUMENT_TYPE_NODE: case DOMNode::DOCUMENT_FRAGMENT_NODE: case DOMNode::NOTATION_NODE: break; } } // // Recompute the checksum. Meaningful only for DOM, will tell us whether // a failure is transient, or whether the DOM data is permanently corrupted. // int ThreadParser::reCheck() { if (gRunInfo.dom) { fCheckSum = 0; domCheckSum(fDoc); } return fCheckSum; } // // domPrint - Dump the contents of a DOM node. // For debugging failures, when all else fails. // Works recursively - initially called with a document node. // void ThreadParser::domPrint() { printf("Begin DOMPrint ...\n"); if (gRunInfo.dom) { try { XMLCh tempStr[100]; XMLString::transcode("LS", tempStr, 99); DOMImplementation *impl = DOMImplementationRegistry::getDOMImplementation(tempStr); DOMLSSerializer *theSerializer = ((DOMImplementationLS*)impl)->createLSSerializer(); DOMLSOutput *theOutput = ((DOMImplementationLS*)impl)->createLSOutput(); XMLFormatTarget *myFormTarget = new StdOutFormatTarget(); theOutput->setByteStream(myFormTarget); DOMNode *doc = fXercesDOMParser->getDocument(); theSerializer->write(doc,theOutput); delete myFormTarget; theSerializer->release(); theOutput->release(); } catch (...) { // do nothing } } printf("End DOMPrint\n"); } //---------------------------------------------------------------------- // // parseCommandLine Read through the command line, and save all // of the options in the gRunInfo struct. // // Display the usage message if the command line // is no good. // // Probably ought to be a member function of RunInfo. // //---------------------------------------------------------------------- void parseCommandLine(int argc, char **argv) { gRunInfo.doGrammarCaching = false; gRunInfo.quiet = false; // Set up defaults for run. gRunInfo.verbose = false; gRunInfo.stopNow = false; gRunInfo.dom = false; gRunInfo.sax = true; gRunInfo.reuseParser = false; gRunInfo.inMemory = false; gRunInfo.dumpOnErr = false; gRunInfo.doSchema = false; gRunInfo.schemaFullChecking = false; gRunInfo.doNamespaces = false; gRunInfo.doInitialParse = false; gRunInfo.doNamespacePrefixes = false; gRunInfo.valScheme = SAXParser::Val_Auto; gRunInfo.numThreads = 2; gRunInfo.totalTime = 0; gRunInfo.numInputFiles = 0; gRunInfo.numParses = 0; try // Use exceptions for command line syntax errors. { int argnum = 1; while (argnum < argc) { if (strcmp(argv[argnum], "-quiet") == 0) gRunInfo.quiet = true; else if (strcmp(argv[argnum], "-verbose") == 0) gRunInfo.verbose = true; else if (strncmp(argv[argnum], "-v=", 3) == 0) { const char* const parm = &argv[argnum][3]; if (!strcmp(parm, "never")) gRunInfo.valScheme = SAXParser::Val_Never; else if (!strcmp(parm, "auto")) gRunInfo.valScheme = SAXParser::Val_Auto; else if (!strcmp(parm, "always")) gRunInfo.valScheme = SAXParser::Val_Always; else { fprintf(stderr, "Unrecognized -v option \"%s\"\n", parm); throw 1; } } else if (strcmp(argv[argnum], "-v") == 0) { fprintf(stderr, "Please note the -v option has been changed to -v=[always | never | auto]\n"); fprintf(stderr, "ThreadTest will continue with -v=always\n"); gRunInfo.valScheme = SAXParser::Val_Always; } else if (strcmp(argv[argnum], "-s") == 0) gRunInfo.doSchema = true; else if (strcmp(argv[argnum], "-f") == 0) gRunInfo.schemaFullChecking = true; else if (strcmp(argv[argnum], "-n") == 0) gRunInfo.doNamespaces = true; else if (strcmp(argv[argnum], "-p") == 0) gRunInfo.doNamespacePrefixes = true; else if (!strncmp(argv[argnum], "-parser=", 8)) { const char* const parm = &argv[argnum][8]; if (!strcmp(parm, "dom")) { gRunInfo.dom = true; gRunInfo.sax = false; } else if (!strcmp(parm, "sax")) { gRunInfo.dom = false; gRunInfo.sax = true; } else if (!strcmp(parm, "sax2")) { gRunInfo.dom = false; gRunInfo.sax = false; } else { fprintf(stderr, "Unrecognized -parser option \"%s\"\n", parm); throw 1; } } else if (strcmp(argv[argnum], "-init") == 0) gRunInfo.doInitialParse = true; else if (strcmp(argv[argnum], "-reuse") == 0) gRunInfo.reuseParser = true; else if (strcmp(argv[argnum], "-dump") == 0) gRunInfo.dumpOnErr = true; else if (strcmp(argv[argnum], "-mem") == 0) gRunInfo.inMemory = true; else if (strcmp(argv[argnum], "-threads") == 0) { ++argnum; if (argnum >= argc) { fprintf(stderr, "Invalid -threads option (missing # of threads)\n"); throw 1; } gRunInfo.numThreads = atoi(argv[argnum]); if (gRunInfo.numThreads < 0) { fprintf(stderr, "Invalid -threads option (negative # of threads)\n"); throw 1; } } else if (strcmp(argv[argnum], "-time") == 0) { ++argnum; if (argnum >= argc) { fprintf(stderr, "Invalid -time option (missing time value)\n"); throw 1; } gRunInfo.totalTime = atoi(argv[argnum]); if (gRunInfo.totalTime < 1) { fprintf(stderr, "Invalid -time option (time value < 1)\n"); throw 1; } } else if (strcmp(argv[argnum], "-gc") == 0) gRunInfo.doGrammarCaching = true; else if (strcmp(argv[argnum], "-parses") == 0) { ++argnum; if (argnum >= argc) { fprintf(stderr, "Invalid -parses option (missing # of parses)\n"); throw 1; } int temp = atoi(argv[argnum]); if (temp < 0) { fprintf(stderr, "Invalid -parses option (negative # of parses)\n"); throw 1; } gRunInfo.numParses = temp; } else if (argv[argnum][0] == '-') { fprintf(stderr, "Unrecognized command line option. Scanning \"%s\"\n", argv[argnum]); throw 1; } else { gRunInfo.numInputFiles++; if (gRunInfo.numInputFiles >= MAXINFILES) { fprintf(stderr, "Too many input files. Limit is %d\n", MAXINFILES); throw 1; } gRunInfo.files[gRunInfo.numInputFiles-1].fileName = argv[argnum]; } argnum++; } // We've made it through the command line. // Verify that at least one input file to be parsed was specified. if (gRunInfo.numInputFiles == 0) { fprintf(stderr, "No input XML file specified on command line.\n"); throw 1; }; if (gRunInfo.numParses && gRunInfo.totalTime) { fprintf(stderr, "Both -parses nnn and -time nnn were specified. Ignoring -time nnn.\n"); } } catch (int) { fprintf(stderr, "usage: ThreadTest [-v] [-threads nnn] [-time nnn] [-quiet] [-verbose] xmlfile...\n" " -v=xxx Validation scheme [always | never | auto]. Default is AUTO.\n" " -n Enable namespace processing. Defaults to off.\n" " -s Enable schema processing. Defaults to off.\n" " -f Enable full schema constraint checking. Defaults to off.\n" " -parser=xxx Parser Type [dom | sax | sax2]. Default is SAX (SAX1).\n" " -p Enable namespace prefixes. Defaults to off.\n" " (Only used with -parser=sax2, ignored otherwise.)\n" " -quiet Suppress periodic status display.\n" " -verbose Display extra messages.\n" " -reuse Retain and reuse parser. Default creates new for each parse.\n" " -threads nnn Number of threads. Default is 2.\n" " -time nnn Total time to run, in seconds. Default is forever.\n" " -parses nnn Run for nnn parses instead of time. Default is to use time\n" " -dump Dump DOM tree on error.\n" " -mem Read files into memory once only, and parse them from there.\n" " -gc Enable grammar caching (i.e. grammar cached and used in subsequent parses). Defaults to off.\n" " -init Perform an initial parse of the file(s) before starting up the individual threads.\n\n" ); exit(1); } } //--------------------------------------------------------------------------- // // ReadFilesIntoMemory For use when parsing from memory rather than // reading the files each time, here is the code that // reads the files into local memory buffers. // // This function is only called once, from the main // thread, before all of the worker threads are started. // //--------------------------------------------------------------------------- void ReadFilesIntoMemory() { int fileNum; FILE *fileF; size_t t; if (gRunInfo.inMemory) { for (fileNum = 0; fileNum <gRunInfo.numInputFiles; fileNum++) { InFileInfo *fInfo = &gRunInfo.files[fileNum]; fInfo->uFileName = XMLString::transcode(fInfo->fileName); fileF = fopen( fInfo->fileName, "rb" ); if (fileF == 0) { fprintf(stderr, "Can not open file \"%s\".\n", fInfo->fileName); clearFileInfoMemory(); exit(-1); } fseek(fileF, 0, SEEK_END); fInfo->fileSize = ftell(fileF); fseek(fileF, 0, SEEK_SET); fInfo->fileContent = new char[fInfo->fileSize + 1]; t = fread(fInfo->fileContent, 1, fInfo->fileSize, fileF); if (t != fInfo->fileSize) { fprintf(stderr, "Error reading file \"%s\".\n", fInfo->fileName); clearFileInfoMemory(); exit(-1); } fclose(fileF); fInfo->fileContent[fInfo->fileSize] = 0; } } } void clearFileInfoMemory() { int fileNum; if (gRunInfo.inMemory) { for (fileNum = 0; fileNum <gRunInfo.numInputFiles; fileNum++) { InFileInfo *fInfo = &gRunInfo.files[fileNum]; XMLString::release(&fInfo->uFileName); delete [] fInfo->fileContent; } } } //---------------------------------------------------------------------- // // threadMain The main function for each of the swarm of test threads. // Run in an infinite loop, parsing each of the documents // given on the command line in turn. // //---------------------------------------------------------------------- #ifdef HAVE_PTHREAD extern "C" { void threadMain (void *param) #else unsigned long WINAPI threadMain (void *param) #endif { ThreadInfo *thInfo = (ThreadInfo *)param; ThreadParser *thParser = 0; if (gRunInfo.verbose) printf("Thread #%d: starting\n", thInfo->fThreadNum); int docNum = gRunInfo.numInputFiles; // // Each time through this loop, one file will be parsed and its checksum // computed and compared with the precomputed value for that file. // while (gRunInfo.stopNow == false) { if (gRunInfo.numParses == 0 || thInfo->fParses < gRunInfo.numParses) { thInfo->fInProgress = true; if (thParser == 0) thParser = new ThreadParser; docNum++; if (docNum >= gRunInfo.numInputFiles) docNum = 0; InFileInfo *fInfo = &gRunInfo.files[docNum]; if (gRunInfo.verbose ) printf("Thread #%d: parse %d starting file %s\n", thInfo->fThreadNum, thInfo->fParses, fInfo->fileName); int checkSum = 0; checkSum = thParser->parse(docNum); // For the case where we skip the preparse we will have nothing to // compare the first parse's results to ... so if this looks like first // parse move the checkSum back into the gRunInfo data for this file. if (gRunInfo.files[docNum].checkSum == 0) { gRunInfo.files[docNum].checkSum = checkSum; } else if (checkSum != gRunInfo.files[docNum].checkSum) { if (checkSum == 0) { // parse returns 0 if there was an error so do this to get the real // checksum value checkSum = thParser->getCheckSum(); } fprintf(stderr, "\nThread %d: Parse Check sum error on file \"%s\" for parse # %d. Expected %x, got %x\n", thInfo->fThreadNum, fInfo->fileName, thInfo->fParses, fInfo->checkSum, checkSum); double totalParsesCompleted = 0; for (int threadNum=0; threadNum < gRunInfo.numThreads; threadNum++) { totalParsesCompleted += gThreadInfo[threadNum].fParses; } fprintf(stderr, "Total number of parses completed is %f.\n", totalParsesCompleted); // Revisit - let the loop continue to run? int secondTryCheckSum = thParser->reCheck(); fprintf(stderr, " Retry checksum is %x\n", secondTryCheckSum); if (gRunInfo.dumpOnErr && gRunInfo.dom) { thParser->domPrint(); } fflush(stdout); fflush(stderr); clearFileInfoMemory(); exit(-1); } if (gRunInfo.reuseParser == false) { delete thParser; thParser = 0; } thInfo->fHeartBeat = true; thInfo->fParses++; thInfo->fInProgress = false; } else { ThreadFuncs::Sleep(1000); } } delete thParser; #ifdef HAVE_PTHREAD return; } #else return 0; #endif } //---------------------------------------------------------------------- // // main // //---------------------------------------------------------------------- int main (int argc, char **argv) { parseCommandLine(argc, argv); // // Initialize the XML system. // try { XMLPlatformUtils::Initialize(); } catch (...) { fprintf(stderr, "Exception from XMLPlatfromUtils::Initialize.\n"); return 1; } /** Grammar caching thread testing */ // Initialize memory manger and grammar pool // set doInitialParse to true so that the first parse will cache the // grammar and it'll be used in subsequent parses if (gRunInfo.doSchema == true && gRunInfo.doNamespaces == true && gRunInfo.doGrammarCaching == true) { gpMemMgr = new MemoryManagerImpl(); gp = new XMLGrammarPoolImpl(gpMemMgr); gRunInfo.doInitialParse = true; } // // If we will be parsing from memory, read each of the input files // into memory now. // ReadFilesIntoMemory(); // Initialize checksums to zero so we can check first parse and if // zero then we need to move first parse's checksum into array. This // is for the cse where we skip the initial parse. for (int n = 0; n < gRunInfo.numInputFiles; n++) { gRunInfo.files[n].checkSum = 0; } if (gRunInfo.doInitialParse) { // // While we are still single threaded, parse each of the documents // once, to check for errors, and to note the checksum. // Blow off the rest of the test if there are errors. // ThreadParser *mainParser = new ThreadParser; int n; bool errors = false; int cksum; for (n = 0; n < gRunInfo.numInputFiles; n++) { char *fileName = gRunInfo.files[n].fileName; if (gRunInfo.verbose) printf("%s checksum is ", fileName); cksum = mainParser->parse(n); if (cksum == 0) { fprintf(stderr, "An error occurred while initially parsing %s\n", fileName); errors = true; }; gRunInfo.files[n].checkSum = cksum; if (gRunInfo.verbose ) printf("%x\n", cksum); if (gRunInfo.dumpOnErr && errors && gRunInfo.dom) { mainParser->domPrint(); } } delete mainParser; if (errors) { fprintf(stderr, "Quitting due to error incurred during initial parse\n"); clearFileInfoMemory(); return 1; } } // // Fire off the requested number of parallel threads // if (gRunInfo.numThreads == 0) { clearFileInfoMemory(); exit(0); } gThreadInfo = new ThreadInfo[gRunInfo.numThreads]; int threadNum; for (threadNum=0; threadNum < gRunInfo.numThreads; threadNum++) { gThreadInfo[threadNum].fThreadNum = threadNum; ThreadFuncs::startThread(threadMain, &gThreadInfo[threadNum]); } if (gRunInfo.numParses) { bool notDone; while (true) { ThreadFuncs::Sleep(1000); notDone = false; for (threadNum = 0; threadNum < gRunInfo.numThreads; threadNum++) { if (gThreadInfo[threadNum].fParses < gRunInfo.numParses) notDone = true; } if (notDone == false) { break; } } } else { // // Loop, watching the heartbeat of the worker threads. // Each second, display "+" when all threads have completed a parse // display "." if some thread hasn't since previous "+" // unsigned long startTime = XMLPlatformUtils::getCurrentMillis(); int elapsedSeconds = 0; while (gRunInfo.totalTime == 0 || gRunInfo.totalTime > elapsedSeconds) { ThreadFuncs::Sleep(1000); if (gRunInfo.quiet == false && gRunInfo.verbose == false) { char c = '+'; for (threadNum=0; threadNum < gRunInfo.numThreads; threadNum++) { if (gThreadInfo[threadNum].fHeartBeat == false) { c = '.'; break; } } fputc(c, stdout); fflush(stdout); if (c == '+') for (threadNum=0; threadNum < gRunInfo.numThreads; threadNum++) gThreadInfo[threadNum].fHeartBeat = false; } elapsedSeconds = (XMLPlatformUtils::getCurrentMillis() - startTime) / 1000; } } // // Time's up, we are done. (We only get here if this was a timed run) // Tally up the total number of parses completed by each of the threads. // gRunInfo.stopNow = true; // set flag, which will cause worker threads to stop. // // Make sure all threads are done before terminate // for (threadNum=0; threadNum < gRunInfo.numThreads; threadNum++) { while (gThreadInfo[threadNum].fInProgress == true) { ThreadFuncs::Sleep(1000); } if (gRunInfo.verbose) printf("Thread #%d: is finished.\n", threadNum); } // // We are done! Count the number of parse and terminate the program // double totalParsesCompleted = 0; for (threadNum=0; threadNum < gRunInfo.numThreads; threadNum++) { totalParsesCompleted += gThreadInfo[threadNum].fParses; // printf("%f ", totalParsesCompleted); } if (gRunInfo.quiet == false) { if (gRunInfo.numParses) { printf("\n%8.0f total parses were completed.\n", totalParsesCompleted); } else { double parsesPerMinute = totalParsesCompleted / (double(gRunInfo.totalTime) / double(60)); printf("\n%8.2f parses per minute.\n", parsesPerMinute); } } // delete grammar pool and memory manager if (gp) { delete gp; delete gpMemMgr; } XMLPlatformUtils::Terminate(); clearFileInfoMemory(); delete [] gThreadInfo; printf("Test Run Successfully\n"); return 0; }
{ "pile_set_name": "Github" }
# Onedrive Music Library plugin for Volumio 2 ## About the plugin This plugin allows you to listen to sound files that are stored on your Onedrive. Before using it, make sure that you logon to Onedrive under "settings". ### Limitations This plugin in still under development and currently has several limitations. - Volumio will show an ugly (temporary) url as "currently playing". - The plugin doesn't allow searching through your Onedrive, it only allows browsing and starting the file that is found. ## Development Notes - Details on plugin system can be found here: https://volumio.github.io/docs/Plugin_System/Plugin_System_Overview - Music service based on webradio code: https://github.com/volumio/Volumio2/blob/master/app/plugins/music_service/webradio/index.js
{ "pile_set_name": "Github" }
/****************************************************************************** * Copyright (c) 2004, 2011 IBM Corporation * All rights reserved. * This program and the accompanying materials * are made available under the terms of the BSD License * which accompanies this distribution, and is available at * http://www.opensource.org/licenses/bsd-license.php * * Contributors: * IBM Corporation - initial implementation *****************************************************************************/ OUTPUT_FORMAT("elf64-powerpc", "elf64-powerpc", "elf64-powerpc") OUTPUT_ARCH(powerpc:common64) ENTRY(module_init) SECTIONS { .code 0xF800000: { __module_start = .; module_entry.o(.opd) *(.text .stub .text.* .gnu.linkonce.t.*) *(.sfpr .glink) *(.rodata .rodata.* .gnu.linkonce.r.*) *(.data .data.* .gnu.linkonce.d.*) *(.opd) } .got : { _got = .; *(.got .toc) } .bss : { __bss_start = .; *(*COM* .bss .gnu.linkonce.b.*) __bss_end = .; } __module_end = .; }
{ "pile_set_name": "Github" }
<h1>Gebruiks-, bijdrage- en hergebruiksvoorwaarden</h1> <p>&nbsp;</p> <h2>Inleiding</h2> <h3>Over Open Food Facts</h3> <p>Open Food Facts inventariseert alle voedingsmiddelen uit de hele wereld.</p> <p>Informatie over voedingsmiddelen (foto's, ingrediënten, voedingsfeiten etc.) wordt op een collaboratieve manier verzameld en voor iedereen beschikbaar gemaakt voor alle gebruik in een gratis en open database.</p> <p>Deze database kan worden bekeken op de <a href="https://openfoodfacts.org"> Open Food Facts </a> website (openfoodfacts.org) waar gebruikers (bijdragers) ook de productgegevens kunnen toevoegen, aanvullen of corrigeren. </p> <p>De Open Food Facts-database is beschikbaar onder de <a href="https://opendatacommons.org/licenses/odbl/1.0/"> Open Database-licentie </a>. De individuele inhoud van de database is beschikbaar onder de <a href="https://opendatacommons.org/licenses/dbcl/1.0/"> database-inhoudslicentie </a>. De productfoto's zijn beschikbaar onder de <a href="https://creativecommons.org/licenses/by-sa/3.0/deed.en"> Creative Commons Naamsvermelding Gelijk delen </a> licentie. De foto's kunnen grafische elementen bevatten die onderhevig zijn aan auteursrechten of andere rechten, die in sommige gevallen kunnen worden gereproduceerd (citaatrechten of redelijk gebruik). </p> <h3>Ondersteun Open Food Facts</h3> <p>De Open Food Facts-database en -service wordt gepubliceerd door de vzw <0>Open Food Facts (Franse "Loi 1901" Association).<0 /> Adres: 21 rue des Iles, 94100 Saint-Maur des Fossés, Frankrijk e-mail: <a href="mailto:[email protected]"> [email protected] </a> Hierna wordt de editor aangeduid met de naam Open Food Facts. </p> <h3>Gebruikers, bijdragers en hergebruikers</h3> <p>Deze algemene voorwaarden vormen een contract tussen Open Food Facts en de gebruikers, bijdragers en hergebruikers.</p> <p>De algemene voorwaarden zijn voor de duidelijkheid onderverdeeld in verschillende delen:</p> <p>"Gebruikers" zijn personen die de Open Food Facts-website bezoeken of de Open Food Facts-toepassingen gebruiken en toegang hebben tot de inhoud en/of de hulpmiddelen en services die zij leveren. <br> &rarr; <a href="#use"> specifieke voorwaarden voor gebruikers </a> </p> <p>"Bijdragers" zijn personen of entiteiten die inhoud aan de site en/of database toevoegen en/of deze inhoud bewerken. <br> &rarr; <a href="#contribution"> specifieke voorwaarden voor bijdragers </a> </p> <p>"Hergebruikers" zijn individuen of entiteiten die een deel van of de gehele inhoud van de site en/of van de database hergebruiken en/of opnieuw publiceren. <br> &rarr; <a href="#re-use"> specifieke voorwaarden voor hergebruikers </a> </p> <p>Bovenop de specifieke voorwaarden en bepalingen, zijn de <a href="#general"> algemene voorwaarden </a> op iedereen van toepassing.</p> <p>&nbsp;</p> <h2 id="use">Termen en voorwaarden voor gebruikers</h2> <h3 id="protection_of_personal_information">Bescherming van persoonlijke informatie</h3> <p>Om te profiteren van bepaalde functies, is het mogelijk om een persoonlijk account op de site te maken. Gebruikers stemmen ermee in om echte informatie in te voeren als ze zich registreren, en om deze bij te werken als deze verandert. De persoonlijke informatie wordt verzameld om de service mogelijk te maken en wordt niet verkocht of doorgegeven aan derden.</p> <p>Deze site is ingeschreven bij de Franse commissie "Computers and Freedom" (CNIL) onder nummer 1528436. In overeenstemming met de Franse "Computers and Freedom" -wet («informatique et libertés») van 6 januari 1978 en gewijzigd in 2004, hebt u recht op toegang tot en correctie van de gegevens die betrekking hebben op u. Om dit recht uit te oefenen, kunt u een e-mail sturen naar [email protected]</p> <h3>Nauwkeurigheid van de verstrekte informatie en gegevens</h3> <p>Open Food Facts garandeert niet de juistheid van de gegevens die aanwezig is op de site en in de database (inclusief, maar niet beperkt tot, de productgegevens: foto's, barcode, naam, generieke naam, hoeveelheid, verpakking, merken, categorieën, oorsprong, labels, certificeringen, prijzen, verpakkingscodes, ingrediënten, additieven, allergenen, sporen, voedingsfeiten, ecologische gegevens, enz.).</p> <p>De informatie en gegevens worden ingevoerd door bijdragers aan de site. Dit kan fouten bevatten, bijvoorbeeld als gevolg van onnauwkeurige informatie op labels en verpakkingen, handmatige invoer van gegevens of verwerking van gegevens.</p> <p>Om de gegevens door gebruikers te laten verifiëren, worden bijdragers uitgenodigd om foto's van labels en verpakkingen met de gegevens te uploaden.</p> <p>Gebruikers die fouten vinden, worden uitgenodigd om deze te corrigeren door bijdrager te worden. Het duurt slechts een paar minuten om je als bijdrager te registreren en om een productpagina te corrigeren.</p> <h3>Volledigheid van gegevens</h3> <p>Open Food Facts garandeert niet de volledigheid van de gegevens die aanwezig zijn op de site en in de database.</p> <p>Het feit dat een product op de site of in de database aanwezig is, garandeert niet dat alle gegevens met betrekking tot het product aanwezig zijn. Gebruikers die ontbrekende informatie of gegevens vinden, kunnen het product bewerken en toevoegen aan de productpagina.</p> <p>Niet alle voedingsmiddelen zijn aanwezig op Open Food Facts, gezien het grote aantal levensmiddelen dat in de wereld bestaat en het aantal nieuwe producten dat elke dag wordt gemaakt.</p> <p>De gemiddelden en andere statistische informatie worden berekend op basis van producten en gegevens die aanwezig zijn in de Open Food Facts-database en niet op alle bestaande producten op de markt. Evenzo worden de vergelijkingen met gemiddelden en productvergelijkingen vastgesteld op basis van producten en gegevens die in de database Open Food Facts aanwezig zijn.</p> <h3>Let op</h3> <p>De informatie en gegevens worden alleen verstrekt ter indicatieve informatie. Het kan fouten bevatten en mag niet voor medische doeleinden worden gebruikt.</p> <h3>Vrijwaringsclausule</h3> <p>De dienst wordt geleverd met al zijn fouten. Open Food Facts garandeert niet dat de dienst in overeenstemming is met een bepaald gebruik en garandeert niet dat deze verenigbaar is met andere diensten van derden.</p> <p>Evenzo worden de gegevens geleverd met al haar fouten. Open Food Facts garandeert niet de nauwkeurigheid, volledigheid en conformiteit voor een bepaald gebruik.</p> <p>De service kan tijdelijk worden gestopt voor onderhoud of om redenen die buiten de controle van Open Food Facts liggen, zoals technische problemen (hardware of software).</p> <p>De uitgever van Open Food Facts kan niet aansprakelijk worden gesteld voor eventuele schade, direct of indirect, of enig verlies van gegevens, vanwege het gebruik of de onmogelijkheid om zijn diensten te gebruiken, of voor de toegang tot of de onmogelijkheid om toegang te krijgen tot de inhoud van de website. diensten, of op het mogelijke feit dat de informatie en gegevens niet juist, volledig of volledig zijn.</p> <p>&nbsp;</p> <h2 id="contribution">Termen en voorwaarden voor bijdrager</h2> <h3>Registreren als een bijdrager</h3> <p>Voor het toevoegen en/of bewerken van gegevens, met name productgegevens, moeten bijdragers zich registreren op de site.</p> <p>Bijdragers stemmen ermee in om echte gegevens in te voeren wanneer ze zich registreren en om deze bij te werken als deze verandert.</p> <p>Bijdragen zijn openbaar</h3> <p>Alle bijdragen worden gearchiveerd en de bewerkingsgeschiedenis van de productpagina's is openbaar. Bovendien zijn de lijsten met producten toegevoegd of bewerkt door een specifieke bijdrager openbaar.</p> <p>Om de integriteit en traceerbaarheid van informatie en gegevens te waarborgen, kan het publieke karakter van bijdragen niet worden ingetrokken.</p> <h3>Licenties van bijdragen</h3> <p>Door gegevens en/of foto's toe te voegen, gaan bijdragers ermee akkoord om hun bijdragen onherroepelijk te plaatsen onder de <a href="https://opendatacommons.org/licenses/dbcl/1.0/">Database inhoud Licence 1.0</a> voor informatie en gegevens, en onder de <a href="https://creativecommons.org/licenses/by-sa/3.0/deed.en">Creative Commons Attribution ShareAlike licentie</a> voor foto's.</p> <h3>Attributie van de bijdragen</h3> <p>Bijdragers gaan ermee akkoord dat ze door een hergebruiker worden gecrediteerd door een link naar het product waaraan ze bijdragen.</p> <h3>Bijdragebronnen</h3> <p>Bijdragers stemmen ermee in alleen een bijdrage te leveren met gegevens en foto's waarvoor zij de reproductierechten bezitten.</p> <p>Bijdragers komen overeen om informatie over Open Food Facts, gegevens en foto's van andere websites (inclusief andere productdatabases, e-commerce websites, producenten sites etc.) toe te voegen.</p> <p>Gegevens die door bijdragers worden toegevoegd, moeten rechtstreeks afkomstig zijn van het etiket en de verpakking van het product.</p> <p>Foto's die door bijdragers zijn toegevoegd, moeten door de bijdragers zelf zijn gemaakt.</p> <h3>Foto's van de gegevens</h3> <p>Om de verificatie van de gegevens, en de correctie van mogelijke fouten mogelijk te maken, moeten bijdragers foto's van de verpakking met de gegevens uploaden naar Open Food Facts.</p> <h3>Bijdrageverificatie</h3> <p>Open Food Facts is niet verplicht om de juistheid en volledigheid van de gegevens te verifiëren.</p> <h3>Bijdrages bewerken</h3> <p>Bijdragen kunnen door andere bijdragers worden bewerkt, gecorrigeerd of aangevuld.</p> <h3>Verwijdering van bijdragen</h3> <p>Bijdragen kunnen worden verwijderd als ze niet voldoen aan de service of niet voldoen aan deze voorwaarden en bepalingen (bijvoorbeeld non-foodproducten, onnauwkeurige of onvolledige bijdragen, niet-persoonlijke bijdragen, enz.).</p> <p>In geval van herhaalde inbreuk kunnen alle bijdragen van een bijdrager worden verwijderd en kan zijn/haar toegang tot de site worden herroepen.</p> <h3>Vandalisme en publicatie van valse of onjuiste gegevens</h3> <p>Bijdragers die vrijwillig informatie of gegevens verwijderen en/of die onjuiste gegevensinformatie toevoegen, wordt de toegang tot de site ontzegd en juridische stappen tegen hen kunnen worden ondernomen. </p> <p>&nbsp;</p> <h2 id="re-use">Termen en voorwaarden voor hergebruikers</h2> <h3>Nauwkeurigheid en volledigheid van de gegevens</h3> <p>Alle <a href="#use"> gebruiksvoorwaarden </a>, waarschuwingen en beperkingen van verantwoordelijkheid die ze bevatten, zijn ook van toepassing op hergebruikers.</p> <h3>Licenties</h3> <p>Drie licenties zijn van toepassing op de verschillende onderdelen van de productendatabase van Open Food Facts. De licenties zijn vrije licenties die het gebruik en de reproductie van de inhoud voor alle doeleinden, met inbegrip van commercieel gebruik toestaan, onder bepaalde voorwaarden, met name de toekenning en het delen onder dezelfde voorwaarde van afgeleide werken.</p> <p>De Open Food Facts-database is beschikbaar onder de <a href="https://opendatacommons.org/licenses/odbl/1.0/"> Open Database-licentie </a>.</p> <p>Individuele inhoud van de database is beschikbaar onder de <a href="https://opendatacommons.org/licenses/dbcl/1.0/"> database-inhoudslicentie </a>.</p> <p>Productenafbeeldingen zijn beschikbaar onder de licentie <a href="https://creativecommons.org/licenses/by-sa/3.0/deed.en">Creative Commons Attribution ShareAlike</a>. De foto's kunnen grafische elementen bevatten die onderhevig zijn aan auteursrechten of andere rechten, die in sommige gevallen kunnen worden gereproduceerd (citaatrechten of redelijk gebruik). Zie hieronder voor een aantal beperkingen van de licenties. </p> <h3>Licentiebeperking</h3> <p>De licenties die in de bovenstaande paragraaf worden genoemd, hebben alleen betrekking op de rechten die horen bij Open Food Facts en zijn bijdragers. Andere rechten van derden kunnen van toepassing zijn.</p> <p>De Creative Commons Attribution ShareAlike-licentie voor afbeeldingen dekt bijvoorbeeld alleen de foto zelf en de rechten van de bijdragers die de foto hebben gemaakt. Andere rechten van derden kunnen van toepassing zijn, zoals: auteursrecht voor het productontwerp en grafische elementen die het bevat (illustraties, afbeeldingen enz.), Beeldrechten van mensen (bijv. Beroemdheden) op de verpakking, handelsmerkrechten enz.</p> <p>Afhankelijk van het gebruik en de jurisdicties kunnen er uitzonderingen op deze rechten van derden bestaan en mogelijk van toepassing zijn. Zo "Fair use" in de Verenigde Staten en "quoting right" in Europa.</p> <p>Het is de verantwoordelijkheid van individuen en entiteiten die wensen om opnieuw gebruik maken van de informatie, gegevens en/of foto's te controleren door zelf de rechten die evenals kont vrijstellingen van toepassing kunnen zijn op die rechten, afhankelijk van het geplande gebruik en de rechtsgebieden ze in te dienen naar.</p> <h3>Auteurschap en attributie</h3> <p>De personen en entiteiten die informatie, gegevens en/of foto's van de Open Food Facts-site of -database reproduceren of hergebruiken, hebben om de licentie te vermelden en het auteurschap toe te kennen aan Open Food Facts met een link naar https://openfoodfacts.org, de juiste lokale versie (bijv. https://en.openfoodfacts.org) of de productpagina, wanneer de informatie en gegevens die worden gereproduceerd of hergebruikt behoren tot een specifiek product. Een dergelijke attributie is ook noodzakelijk voor afgeleide werken.</p> <h3>Share Alike</h3> <p>Derivatief werk moet onder dezelfde voorwaarden worden gedeeld. De tekst van elke vergunning geeft de exacte voorwaarden voor het delen van afgeleide werken.</p> <h3>Vrijwaringsclausule</h3> <p>De verantwoordelijkheid van Open Food Facts, zijn uitgever en zijn bijdragers, kan niet worden aangewend als een specifiek hergebruik niet voldoet aan de wet. De hergebruiker moet alle nodige voorzorgsmaatregelen en advies aannemen die hij/zij nodig acht. </p> <p>&nbsp;</p> <h2 id="general">Algemene voorwaarden</h2> <p>De algemene voorwaarden zijn van toepassing op iedereen: bezoekers, bijdragers en hergebruikers.</p> <h3>Vertalingen van de voorwaarden voor gebruik, bijdrage en hergebruik</h3> <p>Deze voorwaarden zijn voor informatiedoeleinden vertaald in het Nederlands om het gebruik van de site voor bezoekers, bijdragers en hergebruikers wereldwijd te vergemakkelijken. In geval van tegenstrijdigheid prevalerende oorspronkelijke <a href="https://fr.openfoodfacts.org/conditions-d-utilisation">voorwaarden en voorwaarden in het Frans</a>.</p> <h3>Wijzigingen van de voorwaarden voor gebruik, bijdrage en hergebruik</h3> <p>De voorwaarden voor gebruik, bijdrage en hergebruik kunnen op elk moment worden gewijzigd. Wijzigingen zijn van toepassing als ze gepubliceerd worden op de pagina <a href="https://fr.openfoodfacts.org/conditions-d-utilisation">https://fr.openfoodfacts.org/conditions-d-use</a>. Gebruikers, bijdragers en hergebruikers worden gevraagd om deze voorwaarden regelmatig te raadplegen. </p> <h3>Acceptatie zonder reserve</h3> <p>Het gebruik, bijdragen of hergebruiken van de site en/of informatie, gegevens of foto's van Open Food Facts impliceert volledige aanvaarding van deze voorwaarden voor gebruik, bijdrage en hergebruik. <h3 id="applicable_law">Toepasbaar recht</h3> <p>Het toepabare recht is het Franse recht.</p>
{ "pile_set_name": "Github" }
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ flat varying vec4 vTransformBounds; #ifdef WR_VERTEX_SHADER #define VECS_PER_TRANSFORM 8U uniform HIGHP_SAMPLER_FLOAT sampler2D sTransformPalette; void init_transform_vs(vec4 local_bounds) { vTransformBounds = local_bounds; } struct Transform { mat4 m; mat4 inv_m; bool is_axis_aligned; }; Transform fetch_transform(int id) { Transform transform; transform.is_axis_aligned = (id >> 24) == 0; int index = id & 0x00ffffff; // Create a UV base coord for each 8 texels. // This is required because trying to use an offset // of more than 8 texels doesn't work on some versions // of macOS. ivec2 uv = get_fetch_uv(index, VECS_PER_TRANSFORM); ivec2 uv0 = ivec2(uv.x + 0, uv.y); transform.m[0] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(0, 0)); transform.m[1] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(1, 0)); transform.m[2] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(2, 0)); transform.m[3] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(3, 0)); transform.inv_m[0] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(4, 0)); transform.inv_m[1] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(5, 0)); transform.inv_m[2] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(6, 0)); transform.inv_m[3] = TEXEL_FETCH(sTransformPalette, uv0, 0, ivec2(7, 0)); return transform; } // Return the intersection of the plane (set up by "normal" and "point") // with the ray (set up by "ray_origin" and "ray_dir"), // writing the resulting scaler into "t". bool ray_plane(vec3 normal, vec3 pt, vec3 ray_origin, vec3 ray_dir, out float t) { float denom = dot(normal, ray_dir); if (abs(denom) > 1e-6) { vec3 d = pt - ray_origin; t = dot(d, normal) / denom; return t >= 0.0; } return false; } // Apply the inverse transform "inv_transform" // to the reference point "ref" in CSS space, // producing a local point on a Transform plane, // set by a base point "a" and a normal "n". vec4 untransform(vec2 ref, vec3 n, vec3 a, mat4 inv_transform) { vec3 p = vec3(ref, -10000.0); vec3 d = vec3(0, 0, 1.0); float t = 0.0; // get an intersection of the Transform plane with Z axis vector, // originated from the "ref" point ray_plane(n, a, p, d, t); float z = p.z + d.z * t; // Z of the visible point on the Transform vec4 r = inv_transform * vec4(ref, z, 1.0); return r; } // Given a CSS space position, transform it back into the Transform space. vec4 get_node_pos(vec2 pos, Transform transform) { // get a point on the scroll node plane vec4 ah = transform.m * vec4(0.0, 0.0, 0.0, 1.0); vec3 a = ah.xyz / ah.w; // get the normal to the scroll node plane vec3 n = transpose(mat3(transform.inv_m)) * vec3(0.0, 0.0, 1.0); return untransform(pos, n, a, transform.inv_m); } #endif //WR_VERTEX_SHADER #ifdef WR_FRAGMENT_SHADER float signed_distance_rect(vec2 pos, vec2 p0, vec2 p1) { vec2 d = max(p0 - pos, pos - p1); return length(max(vec2(0.0), d)) + min(0.0, max(d.x, d.y)); } float init_transform_fs(vec2 local_pos) { // Get signed distance from local rect bounds. float d = signed_distance_rect( local_pos, vTransformBounds.xy, vTransformBounds.zw ); // Find the appropriate distance to apply the AA smoothstep over. float aa_range = compute_aa_range(local_pos); // Only apply AA to fragments outside the signed distance field. return distance_aa(aa_range, d); } float init_transform_rough_fs(vec2 local_pos) { return point_inside_rect( local_pos, vTransformBounds.xy, vTransformBounds.zw ); } #endif //WR_FRAGMENT_SHADER
{ "pile_set_name": "Github" }
# Allen Institute Software License - This software license is the 2-clause BSD # license plus a third clause that prohibits redistribution for commercial # purposes without further permission. # # Copyright 2017. Allen Institute. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Redistributions for commercial purposes are not permitted without the # Allen Institute's written permission. # For purposes of this license, commercial purposes is the incorporation of the # Allen Institute's software into anything for which you will charge fees or # other compensation. Contact [email protected] for commercial licensing # opportunities. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import numpy as np import sys if sys.version_info < (3, 3): from collections import Iterable else: from collections.abc import Iterable import json import uuid import datetime import dateutil def dict_to_indexed_array(dc, order=None): ''' Given a dictionary and an ordered arr, build a concatenation of the dictionary's values and an index describing how that concatenation can be unpacked ''' if order is None: order = dc.keys() data = [] index = [] counter = 0 for key in order: if isinstance(dc[key], (np.ndarray, list)): extended = dc[key] if isinstance(dc[key], Iterable): extended = [x for x in dc[key]] else: extended = [dc[key]] counter += len(extended) index.append(counter) data.append(extended) data = np.concatenate(data) return index, data class JSONEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): return o.isoformat() elif isinstance(o, uuid.UUID): return str(o) return json.JSONEncoder.default(self, o) def hook(json_dict): for key, value in json_dict.items(): if key == 'experiment_date': json_dict[key] = dateutil.parser.parse(value) elif key == 'behavior_session_uuid': json_dict[key] = uuid.UUID(value) else: pass return json_dict
{ "pile_set_name": "Github" }
/* Localizable.strings SwiftAssetsPickerController Created by Maxim on 7/22/15. Copyright (c) 2015 Maxim Bilan. All rights reserved. */ "All Photos" = "All Photos"; "Favorites" = "Favorites"; "Panoramas" = "Panoramas"; "Videos" = "Videos"; "Time Lapse" = "Time Lapse"; "Recently Deleted" = "Recently Deleted"; "User Album" = "User Album"; "Photos" = "Photos"; "Cancel" = "Cancel"; "Done" = "Done";
{ "pile_set_name": "Github" }
<div layout="row" class="top-axis"> <md-toolbar class='md-toolbar-tools knowage-blue swapaxes swap-axis-area' ng-click="swapAxis()"> &nbsp; </md-toolbar> <md-toolbar id="topaxis" flex topaxis class='md-toolbar-tools knowage-blue dimension-top-toolbar' ng-drop="true" ng-drop-success="dropTop($data,$event)" layout="row"> <div class="top-shift" ng-show="topSliderNeeded"> <md-button class="md-icon-button" aria-label="Favorite" ng-click="dimensionShift('left')" ><!-- topSliderNeeded --> <md-icon md-font-icon="fa fa-arrow-circle-o-left fa-2x" ></md-icon> </md-button> </div> <div class="top-axis-container" layout="row"> <div ng-repeat="column in columns | limitTo:maxCols:topStart" layout="row" class="top-axis-element"> <div class="filter-toolbar-element" ng-drag="true" layout="row" ng-drag-data="column" layout-align="start center" ng-drag-success="dragSuccess('top',$index)" id="top-{{column.name}}"> <md-button class="md-icon-button" ng-click="showMultiHierDialog(e,column)" ng-show="column.hierarchies.length > 1"> <md-icon class="fa fa-sitemap icon-color-white" ng-click="showMultiHierDialog(e,column)"></md-icon> </md-button> <div class="name" title="{{column.caption}}" flex> {{cutName(column.caption, 0, column.hierarchies.length > 1)}} </div> <md-button class="md-icon-button" aria-label="Favorite" ng-click="openFiltersDialogAsync(e, column, null)"> <md-icon md-font-icon="fa fa-filter"></md-icon> </md-button> </div> <div ng-hide="hideSwitchIcon(column.positionInAxis,0)"> <!-- column.positionInAxis == columns.length-1 --> <md-button class="md-icon-button top-axis-switch" ng-click="switchPosition(column)"> <md-icon md-font-icon="fa fa-arrows-h" class="icon-color-white"></md-icon> </md-button> </div> </div> <div flex class="axisDropzone">{{translate.load('sbi.olap.drop.dimension')}}</div> </div> <div class="top-shift" ng-show="topSliderNeeded"> <md-button class="md-icon-button" aria-label="Favorite" ng-click="dimensionShift('right')" ><!-- --> <md-icon md-font-icon="fa fa-arrow-circle-o-right fa-2x" ></md-icon> </md-button> </div> <div style="width:2px;height:100%;background-color: white;"></div> <div layout="row" layout-align="center center" style="width:50px;"> <md-button class="md-icon-button" ng-click="toggleRight()" id="right-sidenav-open-btn"> <md-icon class="fa fa-bars"></md-icon> </md-button> </div> </md-toolbar> </div>
{ "pile_set_name": "Github" }
1.3.6.1.2.1.1.1.0|4|NWA3560-N 1.3.6.1.2.1.1.2.0|6|1.3.6.1.4.1.890.1.15
{ "pile_set_name": "Github" }
# frozen_string_literal: true require 'spec_helper' RSpec.describe Terraform::StateVersion do it { is_expected.to be_a FileStoreMounter } it { is_expected.to belong_to(:terraform_state).required } it { is_expected.to belong_to(:created_by_user).class_name('User').optional } describe 'scopes' do describe '.ordered_by_version_desc' do let(:terraform_state) { create(:terraform_state) } let(:versions) { [4, 2, 5, 1, 3] } subject { described_class.ordered_by_version_desc } before do versions.each do |version| create(:terraform_state_version, terraform_state: terraform_state, version: version) end end it { expect(subject.map(&:version)).to eq(versions.sort.reverse) } end end context 'file storage' do subject { create(:terraform_state_version) } before do stub_terraform_state_object_storage(Terraform::StateUploader) end describe '#file' do let(:terraform_state_file) { fixture_file('terraform/terraform.tfstate') } before do subject.file = CarrierWaveStringFile.new(terraform_state_file) subject.save! end it 'returns the saved file' do expect(subject.file.read).to eq(terraform_state_file) end end describe '#file_store' do it 'returns the value' do [ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store| subject.update!(file_store: store) expect(subject.file_store).to eq(store) end end end describe '#update_file_store' do context 'when file is stored in object storage' do it 'sets file_store to remote' do expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE) end end context 'when file is stored locally' do before do stub_terraform_state_object_storage(enabled: false) end it 'sets file_store to local' do expect(subject.file_store).to eq(ObjectStorage::Store::LOCAL) end end end end end
{ "pile_set_name": "Github" }
apiVersion: apps/v1 kind: StatefulSet metadata: name: {{ template "cp-kafka.fullname" . }} labels: app: {{ template "cp-kafka.name" . }} chart: {{ template "cp-kafka.chart" . }} release: {{ .Release.Name }} heritage: {{ .Release.Service }} spec: selector: matchLabels: app: {{ template "cp-kafka.name" . }} release: {{ .Release.Name }} serviceName: {{ template "cp-kafka.fullname" . }}-headless podManagementPolicy: {{ .Values.podManagementPolicy }} replicas: {{ default 3 .Values.brokers }} updateStrategy: type: {{ .Values.updateStrategy }} template: metadata: labels: app: {{ template "cp-kafka.name" . }} release: {{ .Release.Name }} {{- if or .Values.podAnnotations .Values.prometheus.jmx.enabled }} annotations: {{- range $key, $value := .Values.podAnnotations }} {{ $key }}: {{ $value | quote }} {{- end }} {{- if .Values.prometheus.jmx.enabled }} prometheus.io/scrape: "true" prometheus.io/port: {{ .Values.prometheus.jmx.port | quote }} {{- end }} {{- end }} spec: affinity: podAntiAffinity: preferredDuringSchedulingIgnoredDuringExecution: - weight: 1 podAffinityTerm: labelSelector: matchExpressions: - key: "app" operator: In values: - {{ template "cp-kafka.name" . }} - key: "release" operator: In values: - {{ .Release.Name }} topologyKey: "kubernetes.io/hostname" containers: {{- if .Values.prometheus.jmx.enabled }} - name: prometheus-jmx-exporter image: "{{ .Values.prometheus.jmx.image }}:{{ .Values.prometheus.jmx.imageTag }}" imagePullPolicy: "{{ .Values.prometheus.jmx.imagePullPolicy }}" command: - java - -XX:+UnlockExperimentalVMOptions - -XX:+UseCGroupMemoryLimitForHeap - -XX:MaxRAMFraction=1 - -XshowSettings:vm - -jar - jmx_prometheus_httpserver.jar - {{ .Values.prometheus.jmx.port | quote }} - /etc/jmx-kafka/jmx-kafka-prometheus.yml ports: - containerPort: {{ .Values.prometheus.jmx.port }} resources: {{ toYaml .Values.prometheus.jmx.resources | indent 10 }} volumeMounts: - name: jmx-config mountPath: /etc/jmx-kafka {{- end }} - name: {{ template "cp-kafka.name" . }}-broker {{- if .Values.global.enterprise }} image: "{{ .Values.enterpriseImage }}:{{ .Values.imageTag }}" {{- else }} image: "{{ .Values.image }}:{{ .Values.imageTag }}" {{- end }} imagePullPolicy: "{{ .Values.imagePullPolicy }}" ports: - containerPort: 9092 name: kafka {{- if .Values.prometheus.jmx.enabled }} - containerPort: {{ .Values.jmx.port }} name: jmx {{- end }} {{- if .Values.nodeport.enabled }} {{- $brokers := .Values.brokers | int }} {{- $root := . }} {{- range $i, $e := until $brokers }} - containerPort: {{ add $root.Values.nodeport.firstListenerPort $i }} name: nodeport-{{ $i }} {{- end }} {{- end }} resources: {{ toYaml .Values.resources | indent 10 }} env: - name: POD_IP valueFrom: fieldRef: fieldPath: status.podIP - name: HOST_IP valueFrom: fieldRef: fieldPath: status.hostIP - name: POD_NAME valueFrom: fieldRef: fieldPath: metadata.name - name: POD_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace - name: KAFKA_HEAP_OPTS value: {{ .Values.heapOptions }} - name: KAFKA_ZOOKEEPER_CONNECT value: {{ include "cp-kafka.cp-zookeeper.service-name" . | quote }} - name: KAFKA_LOG_DIRS value: {{ include "cp-kafka.log.dirs" . | quote }} {{- if .Values.global.enterprise }} - name: KAFKA_METRIC_REPORTERS value: "io.confluent.metrics.reporter.ConfluentMetricsReporter" - name: CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS value: {{ printf "PLAINTEXT://%s:9092" (include "cp-kafka.cp-kafka-headless.fullname" .) | quote }} {{- end }} {{- range $key, $value := .Values.configurationOverrides }} - name: {{ printf "KAFKA_%s" $key | replace "." "_" | upper | quote }} value: {{ $value | quote }} {{- end }} {{- range $key, $value := .Values.customEnv }} - name: {{ $key | quote }} value: {{ $value | quote }} {{- end }} {{- if .Values.jmx.port }} - name: KAFKA_JMX_PORT value: "{{ .Values.jmx.port }}" {{- end }} # This is required because the Downward API does not yet support identification of # pod numbering in statefulsets. Thus, we are required to specify a command which # allows us to extract the pod ID for usage as the Kafka Broker ID. # See: https://github.com/kubernetes/kubernetes/issues/31218 command: - sh - -exc - | export KAFKA_BROKER_ID=${HOSTNAME##*-} && \ export KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://${POD_NAME}.{{ template "cp-kafka.fullname" . }}-headless.${POD_NAMESPACE}:9092{{ include "cp-kafka.configuration.advertised.listeners" . }} && \ exec /etc/confluent/docker/run volumeMounts: {{- $disksPerBroker := .Values.persistence.disksPerBroker | int }} {{- range $k, $e := until $disksPerBroker }} - name: datadir-{{$k}} mountPath: /opt/kafka/data-{{$k}} {{- end }} {{- if .Values.imagePullSecrets }} imagePullSecrets: {{ toYaml .Values.imagePullSecrets | indent 8 }} {{- end }} volumes: {{- if not .Values.persistence.enabled }} - name: datadir-0 emptyDir: {} {{- end }} {{- if .Values.prometheus.jmx.enabled }} - name: jmx-config configMap: name: {{ template "cp-kafka.fullname" . }}-jmx-configmap {{- end }} {{- if .Values.nodeSelector }} nodeSelector: {{ toYaml .Values.nodeSelector | indent 8 }} {{- end }} {{- if .Values.tolerations }} tolerations: {{ toYaml .Values.tolerations | indent 8 }} {{- end }} {{- if .Values.persistence.enabled }} volumeClaimTemplates: {{- $disksPerBroker := .Values.persistence.disksPerBroker | int }} {{- $root := . }} {{- range $k, $e := until $disksPerBroker }} - metadata: name: datadir-{{$k}} spec: accessModes: [ "ReadWriteOnce" ] resources: requests: storage: "{{ $root.Values.persistence.size }}" {{- if $root.Values.persistence.storageClass }} {{- if (eq "-" $root.Values.persistence.storageClass) }} storageClassName: "" {{- else }} storageClassName: "{{ $root.Values.persistence.storageClass }}" {{- end }} {{- end }} {{- end }} {{- end }}
{ "pile_set_name": "Github" }
// Copyright (c) 2020 Marshall A. Greenblatt. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the name Chromium Embedded // Framework nor the names of its contributors may be used to endorse // or promote products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // --------------------------------------------------------------------------- // // This file was generated by the CEF translator tool and should not edited // by hand. See the translator.README.txt file in the tools directory for // more information. // // $hash=71def746b63431f9aa779bbb67e85bc2e0176615$ // #ifndef CEF_INCLUDE_CAPI_VIEWS_CEF_BUTTON_DELEGATE_CAPI_H_ #define CEF_INCLUDE_CAPI_VIEWS_CEF_BUTTON_DELEGATE_CAPI_H_ #pragma once #include "include/capi/views/cef_view_delegate_capi.h" #ifdef __cplusplus extern "C" { #endif struct _cef_button_t; /// // Implement this structure to handle Button events. The functions of this // structure will be called on the browser process UI thread unless otherwise // indicated. /// typedef struct _cef_button_delegate_t { /// // Base structure. /// cef_view_delegate_t base; /// // Called when |button| is pressed. /// void(CEF_CALLBACK* on_button_pressed)(struct _cef_button_delegate_t* self, struct _cef_button_t* button); /// // Called when the state of |button| changes. /// void(CEF_CALLBACK* on_button_state_changed)( struct _cef_button_delegate_t* self, struct _cef_button_t* button); } cef_button_delegate_t; #ifdef __cplusplus } #endif #endif // CEF_INCLUDE_CAPI_VIEWS_CEF_BUTTON_DELEGATE_CAPI_H_
{ "pile_set_name": "Github" }
################################################################################ ## Comment # Speak your mind with the hash symbol. They go from the symbol to the end of # the line. ################################################################################ ## Table # Tables (also known as hash tables or dictionaries) are collections of # key/value pairs. They appear in square brackets on a line by themselves. [table] key = "value" # Yeah, you can do this. # Nested tables are denoted by table names with dots in them. Name your tables # whatever crap you please, just don't use #, ., [ or ]. [table.subtable] key = "another value" # You don't need to specify all the super-tables if you don't want to. TOML # knows how to do it for you. # [x] you # [x.y] don't # [x.y.z] need these [x.y.z.w] # for this to work ################################################################################ ## Inline Table # Inline tables provide a more compact syntax for expressing tables. They are # especially useful for grouped data that can otherwise quickly become verbose. # Inline tables are enclosed in curly braces `{` and `}`. No newlines are # allowed between the curly braces unless they are valid within a value. [table.inline] name = { first = "Tom", last = "Preston-Werner" } point = { x = 1, y = 2 } ################################################################################ ## String # There are four ways to express strings: basic, multi-line basic, literal, and # multi-line literal. All strings must contain only valid UTF-8 characters. [string.basic] basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." [string.multiline] # The following strings are byte-for-byte equivalent: key1 = "One\nTwo" key2 = """One\nTwo""" key3 = """ One Two""" [string.multiline.continued] # The following strings are byte-for-byte equivalent: key1 = "The quick brown fox jumps over the lazy dog." key2 = """ The quick brown \ fox jumps over \ the lazy dog.""" key3 = """\ The quick brown \ fox jumps over \ the lazy dog.\ """ [string.literal] # What you see is what you get. winpath = 'C:\Users\nodejs\templates' winpath2 = '\\ServerX\admin$\system32\' quoted = 'Tom "Dubs" Preston-Werner' regex = '<\i\c*\s*>' [string.literal.multiline] regex2 = '''I [dw]on't need \d{2} apples''' lines = ''' The first newline is trimmed in raw strings. All other whitespace is preserved. ''' ################################################################################ ## Integer # Integers are whole numbers. Positive numbers may be prefixed with a plus sign. # Negative numbers are prefixed with a minus sign. [integer] key1 = +99 key2 = 42 key3 = 0 key4 = -17 [integer.underscores] # For large numbers, you may use underscores to enhance readability. Each # underscore must be surrounded by at least one digit. key1 = 1_000 key2 = 5_349_221 key3 = 1_2_3_4_5 # valid but inadvisable ################################################################################ ## Float # A float consists of an integer part (which may be prefixed with a plus or # minus sign) followed by a fractional part and/or an exponent part. [float.fractional] key1 = +1.0 key2 = 3.1415 key3 = -0.01 [float.exponent] key1 = 5e+22 key2 = 1e6 key3 = -2E-2 [float.both] key = 6.626e-34 [float.underscores] key1 = 9_224_617.445_991_228_313 key2 = 1e1_00 ################################################################################ ## Boolean # Booleans are just the tokens you're used to. Always lowercase. [boolean] True = true False = false ################################################################################ ## Datetime # Datetimes are RFC 3339 dates. [datetime] key1 = 1979-05-27T07:32:00Z key2 = 1979-05-27T00:32:00-07:00 key3 = 1979-05-27T00:32:00.999999-07:00 ################################################################################ ## Array # Arrays are square brackets with other primitives inside. Whitespace is # ignored. Elements are separated by commas. Data types may not be mixed. [array] key1 = [ 1, 2, 3 ] key2 = [ "red", "yellow", "green" ] key3 = [ [ 1, 2 ], [3, 4, 5] ] #key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok # Arrays can also be multiline. So in addition to ignoring whitespace, arrays # also ignore newlines between the brackets. Terminating commas are ok before # the closing bracket. key5 = [ 1, 2, 3 ] key6 = [ 1, 2, # this is ok ] ################################################################################ ## Array of Tables # These can be expressed by using a table name in double brackets. Each table # with the same double bracketed name will be an element in the array. The # tables are inserted in the order encountered. [[products]] name = "Hammer" sku = 738594937 [[products]] [[products]] name = "Nail" sku = 284758393 color = "gray" # You can create nested arrays of tables as well. [[fruit]] name = "apple" [fruit.physical] color = "red" shape = "round" [[fruit.variety]] name = "red delicious" [[fruit.variety]] name = "granny smith" [[fruit]] name = "banana" [[fruit.variety]] name = "plantain"
{ "pile_set_name": "Github" }
// Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package b import "a" // ---------------------------------------------------------------------------- // Basic declarations const Pi = 3.14 // Pi var MaxInt int // MaxInt type T struct{} // T var V T // v func F(x int) int {} // F func (x *T) M() {} // M // Corner cases: association with (presumed) predeclared types // Always under the package functions list. func NotAFactory() int {} // Associated with uint type if AllDecls is set. func UintFactory() uint {} // Associated with uint type if AllDecls is set. func uintFactory() uint {} // Should only appear if AllDecls is set. type uint struct{} // overrides a predeclared type uint // ---------------------------------------------------------------------------- // Exported declarations associated with non-exported types must always be shown. type notExported int const C notExported = 0 const ( C1 notExported = iota C2 c3 C4 C5 ) var V notExported var V1, V2, v3, V4, V5 notExported var ( U1, U2, u3, U4, U5 notExported u6 notExported U7 notExported = 7 ) func F1() notExported {} func f2() notExported {}
{ "pile_set_name": "Github" }
f True = 1 f False = 2 f _ = 3
{ "pile_set_name": "Github" }
<html> <head> <script> function log(message) { document.getElementById("result").innerHTML += message + "<br>"; } function loadJSFile(){ var s = document.createElement('script') s.setAttribute("type", "text/javascript") s.setAttribute("src", "resources/load-deferrer-script-element.js") document.getElementsByTagName("head")[0].appendChild(s); } jsLoaded = false; runningModal = false; // This line will load external script into memory. loadJSFile(); function runModal() { jsLoaded = true; loadJSFile(); runningModal = true; alert("Scripts should not be running in the background!"); runningModal = false; } </script> </head> <body> <p>This tests the bug https://bugs.webkit.org/show_bug.cgi?id=38910. Click the button, wait 5 seconds and close it. The test passes if no error messages show up in the page!</p> <input id="button" type="button" value="click me" onclick="runModal()"/> <p id="result"></p> </body> </html>
{ "pile_set_name": "Github" }
/* Copyright 2016 Goldman Sachs. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.gs.fw.common.mithra; public class MithraConfigurationException extends MithraBusinessException { public MithraConfigurationException(String message) { super(message); } }
{ "pile_set_name": "Github" }
<%= form.inputs :first_name, :last_name, :login, :email %> #{password_input_block}
{ "pile_set_name": "Github" }
<%@ page language="java" pageEncoding="UTF-8" contentType="text/html; charset=UTF-8"%> <%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c"%> <%@ taglib uri="http://java.sun.com/jsp/jstl/fmt" prefix="fmt"%> <%@ taglib uri="http://java.sun.com/jsp/jstl/functions" prefix="fn"%> <div align="center"> <form id="designForm" method="post" style="padding:0 20px;"> <div class="design_title">窗设计信息表</div> <div> <table class="tableForm_title"> <tr> <td>客户地址:${designVo.clientAddress }</td><td></td> <td>客户姓名:${designVo.clientName }</td><td></td> <td>下单日期:<input name="orderTimeStr" value="${designVo.orderTimeStr }" data-options="required:true" class="easyui-datebox" /></td><td></td> <td>流水单号:${designVo.billCode }</td> </tr> </table> </div> <table class="tableForm tableForm_design"> <!-- <tr> <td colspan="18"> <a href="#" class="easyui-linkbutton" data-options="iconCls:'icon-add'">添加产品</a> </td> </tr> --> <tr> <th rowspan="2" style="width:80px;">产品编号</th> <th rowspan="2" style="width:80px;">产品名称</th> <th rowspan="2" style="width:80px;">安装位置</th> <th rowspan="2" style="width:30px;">樘数</th> <th rowspan="2" style="width:80px;">开启扇数量</th> <th rowspan="2" style="width:80px;">五金配件</th> <th rowspan="2" style="width:50px;">墙厚(mm)</th> <th rowspan="2" style="width:80px;">窗台高度(mm)</th> <th colspan="2" style="width:80px;">产品尺寸(mm)</th> <th colspan="2" style="width:80px;">颜色</th> <th colspan="2" style="width:80px;">玻璃/百叶</th> <th rowspan="2" style="width:50px;">包套</th> <th rowspan="2" style="width:50px;">安全系统</th> <th rowspan="2" ><a href="#" class="easyui-linkbutton c8" onclick="addDesignDetail(2)">添加产品设计明细</a></th> </tr> <tr> <th style="width:50px;">宽</th> <th style="width:50px;">高</th> <th style="width:50px;">内</th> <th style="width:50px;">外</th> <th style="width:50px;">玻璃</th> <th style="width:50px;">百叶</th> </tr> <tr id="remark_tr"> <th>备注</td> <td colspan="16" > <input name="remark" data-options="multiline:true," style="height:80px;width:98%;" value="${designVo.remark }" class="easyui-textbox" /> </td> </tr> </table> <div class="design_desc"> 填表说明: <font color="red" >1、</font>断桥需说明内外颜色,非断桥内外颜色一致; <font color="red" >2、</font>洞口尺寸与产品二选一填写; <font color="red" >3、</font>产品序号窗的话按C1,C2 如此类推命名。 </div> <input type="hidden" name="id" value="${designVo.id}"/> <input type="hidden" name="appointId" value="${designVo.appointId}"/> <input type="hidden" name="clientAddress" value="${designVo.clientAddress}"/> <input type="hidden" name="clientName" value="${designVo.clientName}"/> <input type="hidden" name="designType" value="${designVo.designType}"/> <input type="hidden" name="clientPhone" value="${designVo.clientPhone}"/> <input type="hidden" name="billCode" value="${designVo.billCode}"/> </form> </div> <script> $(function(){ addDesignDetail(2); }) </script>
{ "pile_set_name": "Github" }
/** * @class Ext.calendar.template.DayBody * @extends Ext.XTemplate * <p>This is the template used to render the scrolling body container used in {@link Ext.calendar.DayView DayView} and * {@link Ext.calendar.WeekView WeekView}. This template is automatically bound to the underlying event store by the * calendar components and expects records of type {@link Ext.calendar.EventRecord}.</p> * <p>Note that this template would not normally be used directly. Instead you would use the {@link Ext.calendar.DayViewTemplate} * that internally creates an instance of this template along with a {@link Ext.calendar.DayHeaderTemplate}.</p> * @constructor * @param {Object} config The config object */ Ext.define('Ext.calendar.template.DayBody', { extend: 'Ext.XTemplate', requires: [ 'Ext.calendar.util.Date' ], constructor: function(config){ Ext.apply(this, config); this.callParent([ '<table class="ext-cal-bg-tbl" cellspacing="0" cellpadding="0">', '<tbody>', '<tr height="1">', '<td class="ext-cal-gutter"></td>', '<td colspan="{dayCount}">', '<div class="ext-cal-bg-rows">', '<div class="ext-cal-bg-rows-inner">', '<tpl for="times">', '<div class="ext-cal-bg-row">', '<div class="ext-cal-bg-row-div ext-row-{[xindex]}"></div>', '</div>', '</tpl>', '</div>', '</div>', '</td>', '</tr>', '<tr>', '<td class="ext-cal-day-times">', '<tpl for="times">', '<div class="ext-cal-bg-row">', '<div class="ext-cal-day-time-inner">{.}</div>', '</div>', '</tpl>', '</td>', '<tpl for="days">', '<td class="ext-cal-day-col">', '<div class="ext-cal-day-col-inner">', '<div id="{[this.id]}-day-col-{.:date("Ymd")}" class="ext-cal-day-col-gutter"></div>', '</div>', '</td>', '</tpl>', '</tr>', '</tbody>', '</table>' ]); }, // private applyTemplate : function(o){ this.today = Ext.calendar.util.Date.today(); this.dayCount = this.dayCount || 1; var i = 0, days = [], dt = Ext.Date.clone(o.viewStart), times = []; for(; i<this.dayCount; i++){ days[i] = Ext.calendar.util.Date.add(dt, {days: i}); } // use a fixed DST-safe date so times don't get skipped on DST boundaries dt = Ext.Date.clearTime(new Date('5/26/1972')); for(i=0; i<24; i++){ times.push(Ext.Date.format(dt, 'ga')); dt = Ext.calendar.util.Date.add(dt, {hours: 1}); } return this.applyOut({ days: days, dayCount: days.length, times: times }, []).join(''); }, apply: function(values) { return this.applyTemplate.apply(this, arguments); } });
{ "pile_set_name": "Github" }
'use strict'; angular.module("ngLocale", [], ["$provide", function($provide) { var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"}; $provide.value("$locale", { "DATETIME_FORMATS": { "AMPMS": [ "\u0635", "\u0645" ], "DAY": [ "\u0627\u0644\u0623\u062d\u062f", "\u0627\u0644\u0627\u062b\u0646\u064a\u0646", "\u0627\u0644\u062b\u0644\u0627\u062b\u0627\u0621", "\u0627\u0644\u0623\u0631\u0628\u0639\u0627\u0621", "\u0627\u0644\u062e\u0645\u064a\u0633", "\u0627\u0644\u062c\u0645\u0639\u0629", "\u0627\u0644\u0633\u0628\u062a" ], "MONTH": [ "\u064a\u0646\u0627\u064a\u0631", "\u0641\u0628\u0631\u0627\u064a\u0631", "\u0645\u0627\u0631\u0633", "\u0623\u0628\u0631\u064a\u0644", "\u0645\u0627\u064a\u0648", "\u064a\u0648\u0646\u064a\u0648", "\u064a\u0648\u0644\u064a\u0648", "\u0623\u063a\u0633\u0637\u0633", "\u0633\u0628\u062a\u0645\u0628\u0631", "\u0623\u0643\u062a\u0648\u0628\u0631", "\u0646\u0648\u0641\u0645\u0628\u0631", "\u062f\u064a\u0633\u0645\u0628\u0631" ], "SHORTDAY": [ "\u0627\u0644\u0623\u062d\u062f", "\u0627\u0644\u0627\u062b\u0646\u064a\u0646", "\u0627\u0644\u062b\u0644\u0627\u062b\u0627\u0621", "\u0627\u0644\u0623\u0631\u0628\u0639\u0627\u0621", "\u0627\u0644\u062e\u0645\u064a\u0633", "\u0627\u0644\u062c\u0645\u0639\u0629", "\u0627\u0644\u0633\u0628\u062a" ], "SHORTMONTH": [ "\u064a\u0646\u0627\u064a\u0631", "\u0641\u0628\u0631\u0627\u064a\u0631", "\u0645\u0627\u0631\u0633", "\u0623\u0628\u0631\u064a\u0644", "\u0645\u0627\u064a\u0648", "\u064a\u0648\u0646\u064a\u0648", "\u064a\u0648\u0644\u064a\u0648", "\u0623\u063a\u0633\u0637\u0633", "\u0633\u0628\u062a\u0645\u0628\u0631", "\u0623\u0643\u062a\u0648\u0628\u0631", "\u0646\u0648\u0641\u0645\u0628\u0631", "\u062f\u064a\u0633\u0645\u0628\u0631" ], "fullDate": "EEEE\u060c d MMMM\u060c y", "longDate": "d MMMM\u060c y", "medium": "dd\u200f/MM\u200f/yyyy h:mm:ss a", "mediumDate": "dd\u200f/MM\u200f/yyyy", "mediumTime": "h:mm:ss a", "short": "d\u200f/M\u200f/yyyy h:mm a", "shortDate": "d\u200f/M\u200f/yyyy", "shortTime": "h:mm a" }, "NUMBER_FORMATS": { "CURRENCY_SYM": "\u00a3", "DECIMAL_SEP": "\u066b", "GROUP_SEP": "\u066c", "PATTERNS": [ { "gSize": 0, "lgSize": 0, "macFrac": 0, "maxFrac": 3, "minFrac": 0, "minInt": 1, "negPre": "", "negSuf": "-", "posPre": "", "posSuf": "" }, { "gSize": 0, "lgSize": 0, "macFrac": 0, "maxFrac": 2, "minFrac": 2, "minInt": 1, "negPre": "\u00a4\u00a0", "negSuf": "-", "posPre": "\u00a4\u00a0", "posSuf": "" } ] }, "id": "ar-kw", "pluralCat": function (n) { if (n == 0) { return PLURAL_CATEGORY.ZERO; } if (n == 1) { return PLURAL_CATEGORY.ONE; } if (n == 2) { return PLURAL_CATEGORY.TWO; } if (n == (n | 0) && n % 100 >= 3 && n % 100 <= 10) { return PLURAL_CATEGORY.FEW; } if (n == (n | 0) && n % 100 >= 11 && n % 100 <= 99) { return PLURAL_CATEGORY.MANY; } return PLURAL_CATEGORY.OTHER;} }); }]);
{ "pile_set_name": "Github" }
# Locally computed: sha256 4c1f83dcd120469fceef749050cb29fa666fa4666bd308dfe92e933a4c200d55 nscd-0.52.c
{ "pile_set_name": "Github" }
<?php namespace Alchemy\Tests\Tools; use Iterator; use RuntimeException; use SplFileObject; class CsvFileIterator implements Iterator { private $file; private $key = 0; private $current; public function __construct($file, $delimiter = ',', $enclosure = '"', $escape = '\\') { $this->file = new SplFileObject($file, 'r'); $this->file->setFlags(SplFileObject::SKIP_EMPTY | SplFileObject::DROP_NEW_LINE); $this->file->setCsvControl($delimiter, $enclosure, $escape); } public function rewind() { $this->file->rewind(); $this->current = $this->fgetcsv(); $this->key = 0; } public function valid() { return !$this->file->eof(); } public function key() { return $this->key; } public function current() { return $this->current; } public function next() { $this->current = $this->fgetcsv(); $this->key++; } private function fgetcsv() { do { $line = $this->file->fgetcsv(); } while (isset($line[0]) && '#' === substr($line[0], 0, 1)); return $line; } }
{ "pile_set_name": "Github" }
/*! * # Semantic UI - Site * http://github.com/semantic-org/semantic-ui/ * * * Copyright 2014 Contributors * Released under the MIT license * http://opensource.org/licenses/MIT * */ /******************************* Theme *******************************/ @type : 'global'; @element : 'site'; @import (multiple) '../../theme.config'; /******************************* Page *******************************/ .loadFonts(); html, body { height: 100%; } html { font-size: @emSize; } body { margin: 0px; padding: 0px; min-width: @pageMinWidth; background: @pageBackground; font-family: @pageFont; font-size: @fontSize; line-height: @lineHeight; color: @textColor; font-smoothing: @fontSmoothing; } /******************************* Headers *******************************/ h1, h2, h3, h4, h5 { font-family: @headerFont; line-height: @headerLineHeight; margin: @headerMargin; font-weight: @headerFontWeight; padding: 0em; } h1 { min-height: 1rem; font-size: @h1; } h2 { font-size: @h2; } h3 { font-size: @h3; } h4 { font-size: @h4; } h5 { font-size: @h5; } h1:first-child, h2:first-child, h3:first-child, h4:first-child, h5:first-child { margin-top: 0em; } h1:last-child, h2:last-child, h3:last-child, h4:last-child, h5:last-child { margin-bottom: 0em; } /******************************* Text *******************************/ p { margin: @paragraphMargin; line-height: @paragraphLineHeight; } p:first-child { margin-top: 0em; } p:last-child { margin-bottom: 0em; } /*------------------- Links --------------------*/ a { color: @linkColor; text-decoration: @linkUnderline; } a:hover { color: @linkHoverColor; } /******************************* Highlighting *******************************/ /* Site */ ::-webkit-selection { background-color: @highlightBackground; color: @highlightColor; } ::-moz-selection { background-color: @highlightBackground; color: @highlightColor; } ::selection { background-color: @highlightBackground; color: @highlightColor; } /* Form */ textarea::-webkit-selection, input::-webkit-selection { background-color: @inputHighlightBackground; color: @inputHighlightColor; } textarea::-moz-selection, input::-moz-selection { background-color: @inputHighlightBackground; color: @inputHighlightColor; } textarea::selection, input::selection { background-color: @inputHighlightBackground; color: @inputHighlightColor; } .loadUIOverrides();
{ "pile_set_name": "Github" }
# $Id: ja.po,v 1.1.2.1 2008/06/15 13:09:11 imagine Exp $ # # Japanese translation of Drupal (cck) # Copyright 2008 0829 <[email protected]> # Generated from files: # userreference.info,v 1.8 2008/04/23 18:02:38 dww Exp # userreference.module,v 1.106 2008/06/01 22:00:36 karens Exp # text.info,v 1.9 2008/04/23 18:02:31 dww Exp # text.module,v 1.95 2008/06/01 21:59:56 karens Exp # optionwidgets.info,v 1.7 2008/04/23 18:02:24 dww Exp # optionwidgets.module,v 1.69 2008/06/03 12:25:45 karens Exp # number.info,v 1.7 2008/04/23 18:02:16 dww Exp # number.module,v 1.91 2008/05/13 16:40:00 karens Exp # nodereference.info,v 1.8 2008/04/23 18:02:07 dww Exp # nodereference.module,v 1.138 2008/05/31 18:12:50 karens Exp # fieldgroup.info,v 1.6 2008/04/23 18:01:58 dww Exp # fieldgroup.module,v 1.79 2008/06/02 16:33:20 karens Exp # content_permissions.info,v 1.2 2008/04/23 18:01:52 dww Exp # content_permissions.install,v 1.1 2008/04/25 02:03:14 karens Exp # content_permissions.module,v 1.5 2008/05/11 19:20:09 dopry Exp # content_copy.info,v 1.6 2008/04/23 18:01:48 dww Exp # content_copy.module,v 1.27 2008/05/29 22:27:04 karens Exp # content.admin.inc,v 1.181 2008/06/03 12:25:44 karens Exp # content.crud.inc,v 1.76 2008/06/03 14:16:55 karens Exp # content.node_form.inc,v 1.7 2008/05/28 20:40:45 karens Exp # content.token.inc,v 1.5 2008/04/25 01:24:40 karens Exp # content.views.inc,v 1.68 2008/05/29 11:16:14 karens Exp # example_field.php,v 1.5 2008/04/23 08:24:06 karens Exp # simple_field.php,v 1.5 2008/04/23 08:24:06 karens Exp # content.info,v 1.6 2007/07/04 23:46:29 yched Exp # content.module,v 1.301 2008/06/03 12:52:23 karens Exp # msgid "" msgstr "" "Project-Id-Version: Drupal 6.x\n" "POT-Creation-Date: 2008-06-06 11:21+0900\n" "PO-Revision-Date: 2008-06-13 11:29+0900\n" "Last-Translator: 0829 <[email protected]>\n" "Language-Team: DRUPAL*DRUPAL <[email protected]>\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n!=1);\n" #: modules/cck/modules/userreference/userreference.info:0 msgid "User Reference" msgstr "ユーザ参照" #: modules/cck/modules/userreference/userreference.info:0 msgid "Defines a field type for referencing a user from a node." msgstr "ユーザを参照するためのフィールドタイプを定義します。" #: modules/cck/modules/userreference/userreference.info:0 #: modules/cck/modules/text/text.info:0 #: modules/cck/modules/optionwidgets/optionwidgets.info:0 #: modules/cck/modules/number/number.info:0 #: modules/cck/modules/nodereference/nodereference.info:0 #: modules/cck/modules/fieldgroup/fieldgroup.info:0 #: modules/cck/modules/content_permissions/content_permissions.info:0 #: modules/cck/modules/content_copy/content_copy.info:0 #: modules/cck/content.info:0 msgid "CCK" msgstr "CCK" #: modules/cck/modules/userreference/userreference.module:52 msgid "User reference" msgstr "ユーザ参照" #: modules/cck/modules/userreference/userreference.module:53 msgid "Store the ID of a related user as an integer value." msgstr "参照ユーザの ID を整数の値としてデータベースに保存します。" #: modules/cck/modules/userreference/userreference.module:71 msgid "User roles that can be referenced" msgstr "参照するロール" #: modules/cck/modules/userreference/userreference.module:77 msgid "User status that can be referenced" msgstr "参照を可能とするユーザの状態" #: modules/cck/modules/userreference/userreference.module:79 msgid "Active" msgstr "アクティブ" #: modules/cck/modules/userreference/userreference.module:79 msgid "Blocked" msgstr "ブロック" #: modules/cck/modules/userreference/userreference.module:122 msgid "%name: Invalid user." msgstr "%name: 無効なユーザです。" #: modules/cck/modules/userreference/userreference.module:146 #: modules/cck/modules/text/text.module:194 #: modules/cck/examples/example_field.php:383 #: modules/cck/examples/simple_field.php:327 msgid "Default" msgstr "デフォルト" #: modules/cck/modules/userreference/userreference.module:151 #: modules/cck/modules/text/text.module:66;199 #: modules/cck/examples/example_field.php:158;388 #: modules/cck/examples/simple_field.php:332 msgid "Plain text" msgstr "プレーンテキスト" #: modules/cck/modules/userreference/userreference.module:198 #: modules/cck/modules/optionwidgets/optionwidgets.module:62 #: modules/cck/modules/nodereference/nodereference.module:303 msgid "Select list" msgstr "選択リスト" #: modules/cck/modules/userreference/userreference.module:206 #: modules/cck/modules/nodereference/nodereference.module:311 msgid "Autocomplete text field" msgstr "オートコンプリートテキストフィールド" #: modules/cck/modules/userreference/userreference.module:253 msgid "Reverse link" msgstr "バックリンク" #: modules/cck/modules/userreference/userreference.module:255 msgid "No" msgstr "いいえ" #: modules/cck/modules/userreference/userreference.module:255 msgid "Yes" msgstr "はい" #: modules/cck/modules/userreference/userreference.module:257 msgid "If selected, a reverse link back to the referencing node will displayed on the referenced user record." msgstr "有効にした場合、ノードから参照されたユーザのアカウントページに参照元ノードへのバックリンクが表示されます。" #: modules/cck/modules/userreference/userreference.module:439 #: modules/cck/modules/nodereference/nodereference.module:544 #: modules/cck/modules/fieldgroup/fieldgroup.module:289 msgid "none" msgstr "なし" #: modules/cck/modules/userreference/userreference.module:586 msgid "Related content" msgstr "関連するコンテンツ" #: modules/cck/modules/userreference/userreference.module:15 msgid "Userreference autocomplete" msgstr "ユーザ参照オートコンプリート" #: modules/cck/modules/userreference/userreference.module:0 msgid "userreference" msgstr "ユーザ参照" #: modules/cck/modules/text/text.info:0 #: modules/cck/modules/text/text.module:49 #: modules/cck/examples/example_field.php:107 #: modules/cck/examples/simple_field.php:115 msgid "Text" msgstr "テキスト" #: modules/cck/modules/text/text.info:0 msgid "Defines simple text field types." msgstr "シンプルなテキストのフィールドタイプを定義します。" #: modules/cck/modules/text/text.module:50 msgid "Store text in the database." msgstr "テキストをデータベースに保存します。" #: modules/cck/modules/text/text.module:66 #: modules/cck/examples/example_field.php:158 msgid "Filtered text (user selects input format)" msgstr "フィルタされたテキスト(選択した入力書式)" #: modules/cck/modules/text/text.module:69 #: modules/cck/examples/example_field.php:161 msgid "Text processing" msgstr "テキストの処理" #: modules/cck/modules/text/text.module:75 #: modules/cck/examples/example_field.php:167 #: modules/cck/examples/simple_field.php:164 msgid "Maximum length" msgstr "最大文字長" #: modules/cck/modules/text/text.module:78 #: modules/cck/examples/example_field.php:170 #: modules/cck/examples/simple_field.php:167 msgid "The maximum length of the field in characters. Leave blank for an unlimited size." msgstr "フィールドで使用可能な文字列の最大の長さです。 入力できる文字数を無制限にしたい場合は空欄にしてください。" #: modules/cck/modules/text/text.module:82 #: modules/cck/modules/number/number.module:123 msgid "Allowed values" msgstr "使用する値" #: modules/cck/modules/text/text.module:88 #: modules/cck/modules/number/number.module:129 #: modules/cck/examples/example_field.php:174 msgid "Allowed values list" msgstr "使用する値のリスト" #: modules/cck/modules/text/text.module:92 #: modules/cck/modules/number/number.module:133 #: modules/cck/examples/example_field.php:178 msgid "The possible values this field can contain. Enter one value per line, in the format key|label. The key is the value that will be stored in the database and it must match the field storage type, %type. The label is optional and the key will be used as the label if no label is specified." msgstr "このフィールドで使用する値のリストを、1行あたり 1つの値として \"キー|ラベル\" という形式で入力してください。 キーはデータベースに格納される値になりますのでフィールドの型が %type と一致していなければなりません。 また、ラベルはオプションの設定項目ですので、ラベルを指定しない場合はキーがラベルとして利用されます。" #: modules/cck/modules/text/text.module:96 #: modules/cck/modules/number/number.module:137 #: modules/cck/includes/content.admin.inc:879 #: modules/cck/examples/example_field.php:182 msgid "PHP code" msgstr "PHP コード" #: modules/cck/modules/text/text.module:102 #: modules/cck/modules/number/number.module:143 #: modules/cck/includes/content.admin.inc:897 #: modules/cck/examples/example_field.php:188 msgid "Code" msgstr "コード" #: modules/cck/modules/text/text.module:105 #: modules/cck/modules/number/number.module:146 #: modules/cck/examples/example_field.php:191 msgid "Advanced usage only: PHP code that returns a keyed array of allowed values. Should not include &lt;?php ?&gt; delimiters. If this field is filled out, the array returned by this code will override the allowed values list above." msgstr "PHP に精通した方専用: 使用する値のキー配列として返す PHP コードを &lt;?php ?&gt; を含めずに記入してください。 このフィールドが入力されている場合、このコードによって返される値は上記で設定した使用する値のリストを上書きします。" #: modules/cck/modules/text/text.module:150 #: modules/cck/modules/number/number.module:218 #: modules/cck/examples/example_field.php:278 msgid "Illegal value for %name." msgstr "%name の規則に反している値です。" #: modules/cck/modules/text/text.module:159 #: modules/cck/examples/example_field.php:287 #: modules/cck/examples/simple_field.php:231 msgid "%label is longer than %max characters." msgstr "%label が %max 文字を超えています。" #: modules/cck/modules/text/text.module:204 #: modules/cck/examples/example_field.php:393 msgid "Trimmed" msgstr "トリミング" #: modules/cck/modules/text/text.module:254 #: modules/cck/modules/number/number.module:332 #: modules/cck/examples/example_field.php:476 #: modules/cck/examples/simple_field.php:400 msgid "Text field" msgstr "テキストフィールド" #: modules/cck/modules/text/text.module:262 msgid "Text area (multiple rows)" msgstr "テキストエリア(複数行)" #: modules/cck/modules/text/text.module:314 #: modules/cck/examples/example_field.php:551 #: modules/cck/examples/simple_field.php:430 msgid "Rows" msgstr "行数" #: modules/cck/modules/text/text.module:323 #: modules/cck/examples/example_field.php:560 #: modules/cck/examples/simple_field.php:438 msgid "\"Rows\" must be a positive integer." msgstr "\"行数\" は正の整数でなければなりません。" #: modules/cck/modules/text/text.module:0 msgid "text" msgstr "テキスト" #: modules/cck/modules/optionwidgets/optionwidgets.info:0 msgid "Option Widgets" msgstr "オプションウィジェット" #: modules/cck/modules/optionwidgets/optionwidgets.info:0 msgid "Defines selection, check box and radio button widgets for text and numeric fields." msgstr "テキストと数値のフィールドタイプにチェックボックスやラジオボタンの選択ウィジェットを定義します。" #: modules/cck/modules/optionwidgets/optionwidgets.module:10 msgid "Create a list of options as a list in <strong>Allowed values</strong> or as an array in PHP code. These values will be the same for %field in all content types." msgstr "<strong>使用する値のリスト</strong> または PHP コードを利用してオプションのリストを作成してください。 設定したリストの値は %field フィールドが表示されるすべてのコンテンツタイプで適用されます。" #: modules/cck/modules/optionwidgets/optionwidgets.module:12 msgid "For a 'single on/off checkbox' widget, define the 'off' value first, then the 'on' value in the <strong>Allowed values</strong> section. Note that the checkbox will be labeled with the label of the 'on' value." msgstr "'シングル ON/OFF チェックボックス' ウィジェットを利用する場合は、最初に 'OFF' の値、次に 'ON' の値の順で、<strong>使用する値のリスト</strong>セクションに定義します。 このチェックボックスでは、'ON' の値のラベルがラベルとして使用されることに注意してください。" #: modules/cck/modules/optionwidgets/optionwidgets.module:15 msgid "The 'checkboxes/radio buttons' widget will display checkboxes if the multiple values option is selected for this field, otherwise radios will be displayed." msgstr "'チェックボックス/ラジオボタン' ウィジェットでは、複数選択のオプションが設定されている場合はチェックボックスが、そうでない場合にはラジオボタンが表示されます。" #: modules/cck/modules/optionwidgets/optionwidgets.module:70 msgid "Check boxes/radio buttons" msgstr "チェックボックス/ラジオボタン" #: modules/cck/modules/optionwidgets/optionwidgets.module:78 msgid "Single on/off checkbox" msgstr "シングル ON/OFF チェックボックス" #: modules/cck/modules/optionwidgets/optionwidgets.module:364 msgid "N/A" msgstr "N/A" #: modules/cck/modules/optionwidgets/optionwidgets.module:0 msgid "optionwidgets" msgstr "オプションウィジェット" #: modules/cck/modules/number/number.info:0 msgid "Number" msgstr "数値" #: modules/cck/modules/number/number.info:0 msgid "Defines numeric field types." msgstr "数値のフィールドタイプを定義します。" #: modules/cck/modules/number/number.module:41 msgid "Integer" msgstr "整数" #: modules/cck/modules/number/number.module:42 msgid "Store a number in the database as an integer." msgstr "数字を整数の値としてデータベースに保存します。" #: modules/cck/modules/number/number.module:49 msgid "Decimal" msgstr "小数" #: modules/cck/modules/number/number.module:50 msgid "Store a number in the database in a fixed decimal format." msgstr "数字を小数の値としてデータベースに保存します。" #: modules/cck/modules/number/number.module:57 msgid "Float" msgstr "浮動小数点数" #: modules/cck/modules/number/number.module:58 msgid "Store a number in the database in a floating point format." msgstr "数字を浮動小数点数の値としてデータベースに保存します。" #: modules/cck/modules/number/number.module:76 msgid "Minimum" msgstr "最小値" #: modules/cck/modules/number/number.module:81 msgid "Maximum" msgstr "最大値" #: modules/cck/modules/number/number.module:88 msgid "Precision" msgstr "精度" #: modules/cck/modules/number/number.module:89 msgid "The total number of digits to store in the database, including those to the right of the decimal." msgstr "データベースに保存する桁数の総数(小数点以下の桁数と小数点記号を含む)を選択してください。" #: modules/cck/modules/number/number.module:95 msgid "Scale" msgstr "スケール" #: modules/cck/modules/number/number.module:96 msgid "The number of digits to the right of the decimal." msgstr "小数点以下の桁数を選択してください。" #: modules/cck/modules/number/number.module:102 msgid "Decimal marker" msgstr "小数点記号" #: modules/cck/modules/number/number.module:103 msgid "The character users will input to mark the decimal point in forms." msgstr "フォーム内に入力された内容の小数点を評価するために利用する記号を選択してください。" #: modules/cck/modules/number/number.module:109 msgid "Prefix" msgstr "接頭語" #: modules/cck/modules/number/number.module:112 msgid "Define a string that should be prefixed to the value, like $ or €. Leave blank for none. Separate singular and plural values with a pipe (pound|pounds)." msgstr "値に接頭語として付記する $ や € のような記号を定義してください。 接頭語を付記しない場合は空欄にしてください。 単数形と複数形は、'pound|pounds' のようにパイプ(|)で区切って入力します。" #: modules/cck/modules/number/number.module:116 msgid "Suffix" msgstr "接尾語" #: modules/cck/modules/number/number.module:119 msgid "Define a string that should suffixed to the value, like m², m/s², kb/s. Leave blank for none. Separate singular and plural values with a pipe (pound|pounds)." msgstr "値に接尾語として付記する m², m/s², kb/s のような記号を定義してください。 接尾語を付記しない場合は空欄にしてください。 単数形と複数形は、'pound|pounds' のようにパイプ(|)で区切って入力します。" #: modules/cck/modules/number/number.module:152 msgid "\"Minimum\" must be a number." msgstr "\"最小値\" は数値でなければなりません。" #: modules/cck/modules/number/number.module:155 msgid "\"Maximum\" must be a number." msgstr "\"最大値\" は数値でなければなりません。" #: modules/cck/modules/number/number.module:212 msgid "The value of %name may be no smaller than %min." msgstr "%name には %min 未満の値を入力することはできません。" #: modules/cck/modules/number/number.module:215 msgid "The value of %name may be no larger than %max." msgstr "%name には %max より大きい値を入力することはできません。" #: modules/cck/modules/number/number.module:253 msgid "unformatted" msgstr "フォーマットなし" #: modules/cck/modules/number/number.module:466 msgid "Only numbers and decimals are allowed in %field. %start was changed to %value." msgstr "%field フィールドには、数字(小数を含む)のみが入力できます。 %start は %value に変更されました。" #: modules/cck/modules/number/number.module:484 msgid "Only numbers are allowed in %field. %start was changed to %value." msgstr "%field フィールドには、数字のみが入力できます。 %start は %value に変更されました。" #: modules/cck/modules/number/number.module:503 msgid "Only numbers and the decimal character (%decimal) are allowed in %field. %start was changed to %value." msgstr "%field フィールドには、数字と小数点記号(%decimal)のみが入力できます。 %start は %value に変更されました。" #: modules/cck/modules/number/number.module:0 msgid "number" msgstr "数値" #: modules/cck/modules/nodereference/nodereference.info:0 msgid "Node Reference" msgstr "ノード参照" #: modules/cck/modules/nodereference/nodereference.info:0 msgid "Defines a field type for referencing one node from another." msgstr "他のノードを参照するためのフィールドタイプを定義します。" #: modules/cck/modules/nodereference/nodereference.module:71 msgid "Node reference" msgstr "ノード参照" #: modules/cck/modules/nodereference/nodereference.module:72 msgid "Store the ID of a related node as an integer value." msgstr "参照ノードの ID を整数の値としてデータベースに保存します。" #: modules/cck/modules/nodereference/nodereference.module:90 msgid "Content types that can be referenced" msgstr "参照するコンテンツタイプ" #: modules/cck/modules/nodereference/nodereference.module:101 msgid "Existing Views" msgstr "既存のビュー" #: modules/cck/modules/nodereference/nodereference.module:108 msgid "Advanced - Nodes that can be referenced (View)" msgstr "高度な設定 - 参照するノード(ビュー)" #: modules/cck/modules/nodereference/nodereference.module:114 msgid "View" msgstr "表示" #: modules/cck/modules/nodereference/nodereference.module:117 msgid "Choose the \"Views module\" view that selects the nodes that can be referenced.<br />Note :<ul><li>Only views that have fields will work for this purpose.</li><li>This will discard the \"Content types\" settings above. Use the view's \"filters\" section instead.</li><li>Use the view's \"fields\" section to display additional informations about candidate nodes on node creation/edition form.</li><li>Use the view's \"sort criteria\" section to determine the order in which candidate nodes will be displayed.</li></ul>" msgstr "\"ビューモジュール\" で定義されたビューの中から、参照するノードとして表示するものを選択してください。<br />注意 :<ul><li>ノード参照として機能するフィールドを持つビューのみが選択できます。</li><li>ここで設定を行なった場合、上記の \"コンテンツタイプ\" は適用されません。 ビューの \"フィルタ\" セクションで代替となる設定を行ってください。</li><li>コンテンツの作成・編集フォームに参照するノードの追加説明を表示する場合にはビューの \"フィールド\" セクションで設定を行ってください。</li><li>参照するノードの順序を決定する場合にはビューの \"並べ替えの基準\" セクションで設定を行ってください。</li></ul>" #: modules/cck/modules/nodereference/nodereference.module:121 msgid "View arguments" msgstr "ビューのアーギュメント" #: modules/cck/modules/nodereference/nodereference.module:124 msgid "Provide a comma separated list of arguments to pass to the view." msgstr "ビューへ渡すアーギュメントのリストをコンマ(,)で区切って設定してください。" #: modules/cck/modules/nodereference/nodereference.module:175 msgid "%name : This post can't be referenced." msgstr "%name: この投稿は参照できませんでした。" #: modules/cck/modules/nodereference/nodereference.module:200 msgid "Title (link)" msgstr "タイトル(リンクあり)" #: modules/cck/modules/nodereference/nodereference.module:205 msgid "Title (no link)" msgstr "タイトル(リンクなし)" #: modules/cck/modules/nodereference/nodereference.module:210 #: modules/cck/content.module:1612 msgid "Full node" msgstr "完全なノード" #: modules/cck/modules/nodereference/nodereference.module:215 #: modules/cck/content.module:1611 msgid "Teaser" msgstr "ティーザー" #: modules/cck/modules/nodereference/nodereference.module:518 msgid "%name: Title mismatch. Please check your selection." msgstr "%name: タイトルが不適当です。 選択内容を確認してください。" #: modules/cck/modules/nodereference/nodereference.module:15 msgid "Nodereference autocomplete" msgstr "ノード参照オートコンプリート" #: modules/cck/modules/nodereference/nodereference.module:0 msgid "nodereference" msgstr "ノード参照" #: modules/cck/modules/fieldgroup/fieldgroup.info:0 msgid "Fieldgroup" msgstr "フィールドグループ" #: modules/cck/modules/fieldgroup/fieldgroup.info:0 msgid "Create field groups for CCK fields." msgstr "CCK フィールドにフィールドグループを作成します。" #: modules/cck/modules/fieldgroup/fieldgroup.module:99 msgid "Add" msgstr "追加" #: modules/cck/modules/fieldgroup/fieldgroup.module:106;359 #: modules/cck/includes/content.admin.inc:152;353 msgid "Save" msgstr "保存" #: modules/cck/modules/fieldgroup/fieldgroup.module:117 #: modules/cck/includes/content.admin.inc:163;364;573;798 msgid "Label" msgstr "ラベル" #: modules/cck/modules/fieldgroup/fieldgroup.module:125 msgid "These settings apply to the group in the node editing form." msgstr "これらの設定は、コンテンツの編集フォームのグループ構成として適用されます。" #: modules/cck/modules/fieldgroup/fieldgroup.module:129 msgid "Style" msgstr "スタイル" #: modules/cck/modules/fieldgroup/fieldgroup.module:132 msgid "always open" msgstr "常に開く" #: modules/cck/modules/fieldgroup/fieldgroup.module:133 msgid "collapsible" msgstr "折りたたみ(開)" #: modules/cck/modules/fieldgroup/fieldgroup.module:134 msgid "collapsed" msgstr "折りたたみ(閉)" #: modules/cck/modules/fieldgroup/fieldgroup.module:139 #: modules/cck/includes/content.admin.inc:846 msgid "Help text" msgstr "ヘルプテキスト" #: modules/cck/modules/fieldgroup/fieldgroup.module:142 msgid "Instructions to present to the user on the editing form." msgstr "フォームの編集時にユーザに表示する説明です。" #: modules/cck/modules/fieldgroup/fieldgroup.module:148 msgid "These settings apply to the group on node display." msgstr "これらの設定は、ノードに表示されるフォームのグループ構成として適用されます。" #: modules/cck/modules/fieldgroup/fieldgroup.module:152 msgid "Description" msgstr "説明" #: modules/cck/modules/fieldgroup/fieldgroup.module:155 msgid "A description of the group." msgstr "このグループについての説明です。" #: modules/cck/modules/fieldgroup/fieldgroup.module:190 msgid "The group name %name already exists." msgstr "グループ名 %name は既に存在します。" #: modules/cck/modules/fieldgroup/fieldgroup.module:194 msgid "The group name %name is invalid." msgstr "グループ名 %name は無効です。" #: modules/cck/modules/fieldgroup/fieldgroup.module:229 msgid "Are you sure you want to remove the group %label?" msgstr "本当に、%label グループを取り外してよろしいですか?" #: modules/cck/modules/fieldgroup/fieldgroup.module:231 msgid "This action cannot be undone." msgstr "この操作は元に戻すことができませんので、十分に注意して実行してください。" #: modules/cck/modules/fieldgroup/fieldgroup.module:232 #: modules/cck/includes/content.admin.inc:99;115;733 msgid "Remove" msgstr "取り外す" #: modules/cck/modules/fieldgroup/fieldgroup.module:232 #: modules/cck/includes/content.admin.inc:733 msgid "Cancel" msgstr "キャンセル" #: modules/cck/modules/fieldgroup/fieldgroup.module:240 msgid "The group %group_name has been removed." msgstr "%group_name グループを取り外しました。" #: modules/cck/modules/fieldgroup/fieldgroup.module:343 msgid "Display in group" msgstr "表示するグループ" #: modules/cck/modules/fieldgroup/fieldgroup.module:346 msgid "Select a group, in which the field will be displayed on the editing form." msgstr "コンテンツの追加・編集フォームでフィールドをグループ化して表示する場合、そのグループ名を選択してください。" #: modules/cck/modules/fieldgroup/fieldgroup.module:29 msgid "Add group" msgstr "グループの追加" #: modules/cck/modules/fieldgroup/fieldgroup.module:37;44 msgid "Edit group" msgstr "グループの編集" #: modules/cck/modules/fieldgroup/fieldgroup.module:0 msgid "fieldgroup" msgstr "フィールドグループ" #: modules/cck/modules/content_permissions/content_permissions.info:0 msgid "Content Permissions" msgstr "コンテンツ権限" #: modules/cck/modules/content_permissions/content_permissions.info:0 msgid "Set field-level permissions for CCK fields." msgstr "CCK フィールドにフィールドレベルの権限を設定します。" #: modules/cck/modules/content_permissions/content_permissions.install:7 msgid "Please <a href=\"!url\">configure your field permissions</a> immediately. All fields are inaccessible by default." msgstr "直ちに<a href=\"!url\">フィールド権限の構成</a>を行ってください。 デフォルトですべてのフィールドがアクセス不可に設定されています。" #: modules/cck/modules/content_permissions/content_permissions.module:9 msgid "edit " msgstr "編集" #: modules/cck/modules/content_permissions/content_permissions.module:9;10 msgid "field_name" msgstr "フィールド名" #: modules/cck/modules/content_permissions/content_permissions.module:10 msgid "view " msgstr "表示" #: modules/cck/modules/content_permissions/content_permissions.module:0 msgid "content_permissions" msgstr "コンテンツ権限" #: modules/cck/modules/content_copy/content_copy.info:0 msgid "Content Copy" msgstr "コンテンツコピー" #: modules/cck/modules/content_copy/content_copy.info:0 msgid "Enables ability to import/export field definitions." msgstr "フィールド定義をインポートまたはエクスポートできるようにします。" #: modules/cck/modules/content_copy/content_copy.module:80 msgid "This form will process a content type and one or more fields from that type and export the settings. The export created by this process can be copied and pasted as an import into the current or any other database. The import will add the fields to into an existing content type or create a new content type that includes the selected fields." msgstr "このフォームでは、コンテンツタイプとフィールドの設定をエクスポートするための処理を行います。 ここでエクスポートされたものを、現在のデータベースまたはその他のデータベースへコピーすることでインポートができます。 インポートでは既存のコンテンツタイプにフィールドを追加したり、選択したフィールドを含む新しいコンテンツタイプを作成したりすることができます。" #: modules/cck/modules/content_copy/content_copy.module:86 msgid "Types" msgstr "タイプ" #: modules/cck/modules/content_copy/content_copy.module:90 msgid "Select the content type to export." msgstr "エクスポートするコンテンツタイプを選択してください。" #: modules/cck/modules/content_copy/content_copy.module:115 msgid "Groups" msgstr "グループ" #: modules/cck/modules/content_copy/content_copy.module:119 msgid "Select the group definitions to export from %type." msgstr "%type からエクスポートするグループの定義を選択してください。" #: modules/cck/modules/content_copy/content_copy.module:125 #: modules/cck/content.module:119 msgid "Fields" msgstr "フィールド" #: modules/cck/modules/content_copy/content_copy.module:129 msgid "Select the field definitions to export from %type." msgstr "%type からエクスポートするフィールドの定義を選択してください。" #: modules/cck/modules/content_copy/content_copy.module:139 msgid "Export data" msgstr "エクスポートデータ" #: modules/cck/modules/content_copy/content_copy.module:144 msgid "Copy the export text and paste it into another content type using the import function." msgstr "エクスポートされたテキストをコピーして、インポート機能を利用した他のコンテンツタイプへ貼り付けてください。" #: modules/cck/modules/content_copy/content_copy.module:154;38 msgid "Export" msgstr "エクスポート" #: modules/cck/modules/content_copy/content_copy.module:227 msgid "This form will import field definitions exported from another content type or another database.<br/>Note that fields cannot be duplicated within the same content type, so imported fields will be added only if they do not already exist in the selected type." msgstr "このフォームでは、他のコンテンツタイプやデータベースからエクスポートされたフィールド定義をインポートします。<br />同一のコンテンツタイプ内では重複するフィールドを作成できないため、インポートされたフィールドのうち、選択したコンテンツタイプ内にまだ作成されていないフィールドのみが追加されるということに注意してください。" #: modules/cck/modules/content_copy/content_copy.module:230 msgid "<Create>" msgstr "<新規作成>" #: modules/cck/modules/content_copy/content_copy.module:232 msgid "Content type" msgstr "コンテンツタイプ" #: modules/cck/modules/content_copy/content_copy.module:233 msgid "Select the content type to import these fields into.<br/>Select &lt;Create&gt; to create a new content type to contain the fields." msgstr "フィールドをインポートするコンテンツタイプを選択してください。<br />&lt;新規作成&gt; を選択した場合、フィールドを含む新しいコンテンツタイプが作成されます。" #: modules/cck/modules/content_copy/content_copy.module:238 msgid "Import data" msgstr "インポートデータ" #: modules/cck/modules/content_copy/content_copy.module:240 msgid "Paste the text created by a content export into this field." msgstr "エクスポートされた内容をこのフィールド内にコピーしてください。" #: modules/cck/modules/content_copy/content_copy.module:244;46 msgid "Import" msgstr "インポート" #: modules/cck/modules/content_copy/content_copy.module:270 msgid "The import data is not valid import text." msgstr "入力したインポートデータは有効なインポートテキストではありません。" #: modules/cck/modules/content_copy/content_copy.module:318 msgid "The following modules must be enabled for this import to work: %modules." msgstr "インポートした内容を動作させるためには次のモジュールを有効にする必要があります。: %modules" #: modules/cck/modules/content_copy/content_copy.module:324;338 msgid "<create>" msgstr "<新規作成>" #: modules/cck/modules/content_copy/content_copy.module:326 msgid "The content type %type already exists in this database." msgstr "%type コンテンツタイプは既にデータベースに存在しています。" #: modules/cck/modules/content_copy/content_copy.module:333 msgid "Exiting. No import performed." msgstr "インポートを終了します。 インポートは実行されませんでした。" #: modules/cck/modules/content_copy/content_copy.module:355 msgid "An error has occurred adding the content type %type.<br/>Please check the errors displayed for more details." msgstr "%type コンテンツタイプの追加でエラーが発生しました。<br />詳細については、表示されたエラーを確認してください。" #: modules/cck/modules/content_copy/content_copy.module:380 msgid "The imported field %field_label (%field_name) was not added to %type because that field already exists in %type." msgstr "インポートした %field_label (%field_name) フィールドは %type 内に既に存在しているため追加されませんでした。" #: modules/cck/modules/content_copy/content_copy.module:389 msgid "The field %field_label (%field_name) was added to the content type %type." msgstr "%field_label (%field_name) フィールドが %type コンテンツタイプに追加されました。" #: modules/cck/modules/content_copy/content_copy.module:503 msgid "An error occurred when exporting the 'display settings' data for the field %field_name.<br/>The db error is: '%db_err'." msgstr "%field_name フィールドの 'フィールドの表示' データのエクスポート時にエラーが発生しました。<br />データベースエラー: '%db_err'" #: modules/cck/modules/content_copy/content_copy.module:0 msgid "content_copy" msgstr "コンテンツコピー" #: modules/cck/includes/content.admin.inc:15;552;804 msgid "Field name" msgstr "フィールド名" #: modules/cck/includes/content.admin.inc:15;582;593;810 msgid "Field type" msgstr "フィールドタイプ" #: modules/cck/includes/content.admin.inc:15 msgid "Used in" msgstr "利用" #: modules/cck/includes/content.admin.inc:38 msgid "No fields have been defined for any content type yet." msgstr "すべてのコンテンツタイプでフィールドが定義されていません。" #: modules/cck/includes/content.admin.inc:63;242 msgid "There are no fields configured for this content type. You can !link." msgstr "このコンテンツタイプで設定されたフィールドはありません。 !link を行うことができます。" #: modules/cck/includes/content.admin.inc:64;243 msgid "Add a new field" msgstr "新しいフィールドの追加" #: modules/cck/includes/content.admin.inc:88 msgid "To change the order of a field, grab a drag-and-drop handle under the Label column and drag the field to a new location in the list. (Grab a handle by clicking and holding the mouse while hovering over a handle icon.) Remember that your changes will not be saved until you click the Save button at the bottom of the page." msgstr "フィールドの順序を変更するには、ラベル部分にあるドラッグ&ドロップハンドルをつかんでリスト内の新しい場所までドラッグしてください。(ハンドルアイコンの上でマウスボタンを押したままの状態にすることでハンドルをつかむことができます。) ページ下部の保存ボタンをクリックするまでは、行った変更が保存されないことに注意してください。" #: modules/cck/includes/content.admin.inc:98;114 msgid "Configure" msgstr "設定" #: modules/cck/includes/content.admin.inc:163 msgid "Name" msgstr "名前" #: modules/cck/includes/content.admin.inc:163 msgid "Type" msgstr "タイプ" #: modules/cck/includes/content.admin.inc:163 msgid "Weight" msgstr "ウェイト" #: modules/cck/includes/content.admin.inc:163 msgid "Operations" msgstr "操作" #: modules/cck/includes/content.admin.inc:270;311 msgid "Above" msgstr "上部" #: modules/cck/includes/content.admin.inc:271 msgid "Inline" msgstr "インライン" #: modules/cck/includes/content.admin.inc:272;297;312;320 msgid "<Hidden>" msgstr "<非表示>" #: modules/cck/includes/content.admin.inc:315 msgid "no styling" msgstr "スタイルなし" #: modules/cck/includes/content.admin.inc:316 msgid "simple" msgstr "シンプル" #: modules/cck/includes/content.admin.inc:317 msgid "fieldset" msgstr "フィールドセット" #: modules/cck/includes/content.admin.inc:318 msgid "fieldset - collapsible" msgstr "フィールドセット - 折りたたみ(開)" #: modules/cck/includes/content.admin.inc:319 msgid "fieldset - collapsed" msgstr "フィールドセット - 折りたたみ(閉)" #: modules/cck/includes/content.admin.inc:364 msgid "Field" msgstr "フィールド" #: modules/cck/includes/content.admin.inc:411 msgid "Your settings have been saved." msgstr "設定が保存されました。" #: modules/cck/includes/content.admin.inc:428 msgid "No field modules are enabled. You need to <a href=\"!modules_url\">enable one</a>, such as text.module, before you can add new fields." msgstr "フィールドを追加するためのモジュールが有効になっていません。 新しいフィールドを追加する前に、Text モジュール等の <a href=\"!modules_url\">モジュールを有効化</a> する必要があります。" #: modules/cck/includes/content.admin.inc:470 msgid "Add existing field" msgstr "既存のフィールドを追加" #: modules/cck/includes/content.admin.inc:479 #: modules/cck/content.module:173 msgid "Add field" msgstr "フィールドの追加" #: modules/cck/includes/content.admin.inc:497 msgid "Added field %label." msgstr "%label フィールドを追加しました。" #: modules/cck/includes/content.admin.inc:500 msgid "There was a problem adding field %label." msgstr "%label フィールドの追加中に問題が発生しました。" #: modules/cck/includes/content.admin.inc:540 msgid "Create new field" msgstr "フィールドの新規作成" #: modules/cck/includes/content.admin.inc:547 msgid "Edit basic information" msgstr "基本情報の編集" #: modules/cck/includes/content.admin.inc:556 msgid "The machine-readable name of the field." msgstr "コンピュータが扱うことのできる、このフィールドの名前を入力してください。" #: modules/cck/includes/content.admin.inc:560 msgid " This name cannot be changed." msgstr " この名前は変更できません。" #: modules/cck/includes/content.admin.inc:568 msgid " This name cannot be changed later! The name will be prefixed with 'field_' and can include lowercase unaccented letters, numbers, and underscores. The length of the name, including the prefix, is limited to no more than 32 letters." msgstr " この名前は後から変更することができません! フィールド名には 'field_' という接頭語が追加されます。 名前に使用できる文字はアクセント記号のない英小文字、数字、アンダースコア(_)のみです。 フィールド名は接頭語を含んで 32 文字以内にしてください。" #: modules/cck/includes/content.admin.inc:576 msgid "A human-readable name to be used as the label for this field in the %type content type." msgstr "人が読むことのできる、このフィールドの名前を入力してください。 この名前は %type コンテンツタイプのラベルとして使用されます。" #: modules/cck/includes/content.admin.inc:586 msgid "The type of data you would like to store in the database with this field." msgstr "データベースに保存する、このフィールドのデータ型を選択してください。" #: modules/cck/includes/content.admin.inc:596 msgid "The type of data you would like to store in the database with this field. This option cannot be changed." msgstr "データベースに保存する、このフィールドのデータ型を選択してください。 このオプションは変更できません。" #: modules/cck/includes/content.admin.inc:601;817 msgid "Widget type" msgstr "ウィジェットタイプ" #: modules/cck/includes/content.admin.inc:605 msgid "The type of form element you would like to present to the user when creating this field in the %type content type." msgstr "使用したいフォームエレメントのタイプを選択してください。 選択したウィジェットタイプは %type コンテンツタイプのフィールドに適用されます。" #: modules/cck/includes/content.admin.inc:619 msgid "Continue" msgstr "継続" #: modules/cck/includes/content.admin.inc:640 msgid "The field name %field_name is invalid. The name must include only lowercase unaccentuated letters, numbers, and underscores." msgstr "フィールド名 %field_name は無効です。 フィールド名に使用できる文字は、アクセント記号のない英小文字、数字、アンダースコア(_)のみです。" #: modules/cck/includes/content.admin.inc:644 msgid "The field name %field_name is too long. The name is limited to 32 characters, including the 'field_' prefix." msgstr "フィールド名 %field_name は最大文字数を超えています。 フィールド名は 'field_' 接頭語を含む 32 文字までに制限されています。" #: modules/cck/includes/content.admin.inc:648 msgid "The field name %field_name already exists." msgstr "フィールド名 %field_name は既に存在します。" #: modules/cck/includes/content.admin.inc:652 msgid "The name 'field_instance' is a reserved name." msgstr "フィールド名 'field_instance' は予約語です。" #: modules/cck/includes/content.admin.inc:684 msgid "Created field %label." msgstr "%label フィールドを作成しました。" #: modules/cck/includes/content.admin.inc:688 msgid "There was a problem creating field %label." msgstr "%label フィールドの作成中に問題が発生しました。" #: modules/cck/includes/content.admin.inc:697 msgid "Update field %label." msgstr "%label フィールドをアップデートしました。" #: modules/cck/includes/content.admin.inc:701 msgid "There was a problem updating field %label." msgstr "%label フィールドの更新中に問題が発生しました。" #: modules/cck/includes/content.admin.inc:729 msgid "Are you sure you want to remove the field %field?" msgstr "本当に、%field フィールドを取り外してよろしいですか?" #: modules/cck/includes/content.admin.inc:732 msgid "If you have any content left in this field, it will be lost. This action cannot be undone." msgstr "このフィールドに含まれるすべての内容を削除します。 この操作は元に戻すことができません。" #: modules/cck/includes/content.admin.inc:751 msgid "Removed field %field from %type." msgstr "%type から %field フィールドを取り外しました。" #: modules/cck/includes/content.admin.inc:756 msgid "There was a problem deleting %field from %type." msgstr "%label フィールドの削除中に問題が発生しました。" #: modules/cck/includes/content.admin.inc:782;824 msgid "Change basic information" msgstr "基本情報の変更" #: modules/cck/includes/content.admin.inc:794 msgid "%type basic information" msgstr "%type 基本情報" #: modules/cck/includes/content.admin.inc:831 msgid "%type settings" msgstr "%type 設定" #: modules/cck/includes/content.admin.inc:832 msgid "These settings apply only to the %field field as it appears in the %type content type." msgstr "これらの設定は %type コンテンツタイプの %field フィールドのみに適用されます。" #: modules/cck/includes/content.admin.inc:849 msgid "Instructions to present to the user below this field on the editing form." msgstr "コンテンツの追加・編集フォームで、このフィールドの下部に表示するユーザへの説明です。" #: modules/cck/includes/content.admin.inc:857 msgid "Default value" msgstr "デフォルト値" #: modules/cck/includes/content.admin.inc:901 msgid "Advanced usage only: PHP code that returns a default value. Should not include &lt;?php ?&gt; delimiters. If this field is filled out, the value returned by this code will override any value specified above. Expected format :<pre>!sample</pre>Using !link_devel's 'devel load' tab on a %type content page might help you figure out the expected format." msgstr "PHP に精通した方専用: デフォルト値として返す PHP コードを &lt;?php ?&gt; を含めずに記入してください。 このフィールドが入力されている場合、このコードによって返される値は上記で指定したすべての値を上書きします。<br />期待されるフォーマット: <pre>!sample</pre>%type コンテンツページで !link_devel の 'Devel ロード' タブを使用することが、予想フォーマットの理解を助けるかもしれません。" #: modules/cck/includes/content.admin.inc:910 msgid "Global settings" msgstr "全般の設定" #: modules/cck/includes/content.admin.inc:911 msgid "These settings apply to the %field field in every content type in which it appears." msgstr "これらの設定は %field フィールドが表示されるすべてのコンテンツタイプで適用されます。" #: modules/cck/includes/content.admin.inc:915 msgid "Required" msgstr "必須" #: modules/cck/includes/content.admin.inc:920 msgid "Number of values" msgstr "値の数" #: modules/cck/includes/content.admin.inc:921 msgid "Unlimited" msgstr "無制限" #: modules/cck/includes/content.admin.inc:923 msgid "Select a specific number of values for this field, or 'Unlimited' to provide an 'Add more' button so the users can add as many values as they like." msgstr "このフィールドが持つことのできる値の数を選択してください。 'アイテムの追加' ボタンを表示し自由に多くの値を追加できるようにしたい場合は、'無制限' を選択してください。" #: modules/cck/includes/content.admin.inc:923 msgid "Warning! Changing this setting after data has been created could result in the loss of data!" msgstr "警告!データ入力後にこの設定を変更すると、データの損失を招く可能性があります!" #: modules/cck/includes/content.admin.inc:938 msgid "Save field settings" msgstr "フィールド設定の保存" #: modules/cck/includes/content.admin.inc:1031 msgid "The default value PHP code returned an incorrect value.<br/>Expected format: <pre>!sample</pre> Returned value: @value" msgstr "デフォルト値として返される PHP コードの値に誤った値が返されました。<br />期待されるフォーマット: <pre>!sample</pre>返された値: @value" #: modules/cck/includes/content.admin.inc:1068 msgid "The default value PHP code created @value which is invalid." msgstr "デフォルト値として返された PHP コードの値 @value は無効です。" #: modules/cck/includes/content.admin.inc:1072 msgid "The default value is invalid." msgstr "デフォルト値は無効です。" #: modules/cck/includes/content.admin.inc:1090 msgid "Saved field %label." msgstr "%label フィールドを保存しました。" #: modules/cck/includes/content.admin.inc:1398 msgid "Processing" msgstr "処理中" #: modules/cck/includes/content.admin.inc:1399 msgid "The update has encountered an error." msgstr "アップデートはエラーに遭遇しました。" #: modules/cck/includes/content.admin.inc:1413 msgid "The database has been altered and data has been migrated or deleted." msgstr "データベースが変更され、データは移動または削除されました。" #: modules/cck/includes/content.admin.inc:1416 msgid "An error occurred and database alteration did not complete." msgstr "エラーが発生したため、データベースの変更は完了しませんでした。" #: modules/cck/includes/content.admin.inc:1519 msgid "Processing %title" msgstr "%title を処理しています。" #: modules/cck/includes/content.admin.inc:1417 msgid "1 item successfully processed:" msgid_plural "@count items successfully processed:" msgstr[0] "1 アイテムが正常に処理されました:" msgstr[1] "@count アイテムが正常に処理されました:" #: modules/cck/includes/content.crud.inc:563;600 #: modules/cck/content.module:568;575;0 msgid "content" msgstr "コンテンツ" #: modules/cck/includes/content.crud.inc:563 msgid "Content fields table %old_name has been renamed to %new_name and field instances have been updated." msgstr "コンテンツフィールドテーブル %old_name の名前が %new_name に変更され、フィールドのインスタンスもアップデートされました。" #: modules/cck/includes/content.crud.inc:600 msgid "The content fields table %name has been deleted." msgstr "コンテンツフィールドテーブル %name が削除されました。" #: modules/cck/includes/content.node_form.inc:195 msgid "Add another !field value" msgstr "!field フィールドの値の追加" #: modules/cck/includes/content.node_form.inc:196 msgid "If the amount of boxes above isn't enough, click here to add more items." msgstr "上の内容量が不足した場合は、ここをクリックしてアイテムを追加できます。" #: modules/cck/includes/content.token.inc:60 msgid "Referenced node ID" msgstr "参照ノード ID" #: modules/cck/includes/content.token.inc:61 msgid "Referenced node title" msgstr "参照ノードタイトル" #: modules/cck/includes/content.token.inc:62 msgid "Formatted HTML link to the node" msgstr "HTML フォーマットのノードへのリンク" #: modules/cck/includes/content.token.inc:86 msgid "Raw number value" msgstr "数値(RAW データ)" #: modules/cck/includes/content.token.inc:87 msgid "Formatted number value" msgstr "フォーマットされた数値" #: modules/cck/includes/content.token.inc:110 msgid "Raw, unfiltered text" msgstr "フィルタを通していないテキスト(RAW データ)" #: modules/cck/includes/content.token.inc:111 msgid "Formatted and filtered text" msgstr "フォーマットおよびフィルタされたテキスト" #: modules/cck/includes/content.token.inc:133 msgid "Referenced user ID" msgstr "参照ユーザの ID" #: modules/cck/includes/content.token.inc:134 msgid "Referenced user name" msgstr "参照ユーザの名前" #: modules/cck/includes/content.token.inc:135 msgid "Formatted HTML link to referenced user" msgstr "HTML フォーマットの参照ユーザへのリンク" #: modules/cck/includes/content.views.inc:57;94 #: modules/cck/content.info:0 msgid "Content" msgstr "コンテンツ" #: modules/cck/includes/content.views.inc:96 msgid "Appears in : @types" msgstr "発行: @types" #: modules/cck/includes/content.views.inc:294 msgid "Format" msgstr "フォーマット" #: modules/cck/includes/content.views.inc:375 msgid "Group multiple values" msgstr "値をグループ化する" #: modules/cck/includes/content.views.inc:382 msgid "Show" msgstr "表示" #: modules/cck/includes/content.views.inc:383 msgid " values," msgstr " 個の値、" #: modules/cck/includes/content.views.inc:390 msgid "Starting from" msgstr "開始する値" #: modules/cck/includes/content.views.inc:394 msgid "Start from last values" msgstr "最後の値から開始する" #: modules/cck/examples/example_field.php:484 msgid "Text area" msgstr "テキストエリア" #: modules/cck/content.info:0 msgid "Allows administrators to define new content types." msgstr "管理者が新しいコンテンツタイプを定義できるようにします。" #: modules/cck/content.module:25 msgid "The content module, a required component of the Content Construction Kit (CCK), allows administrators to associate custom fields with content types. In Drupal, content types are used to define the characteristics of a post, including the title and description of the fields displayed on its add and edit pages. Using the content module (and the other helper modules included in CCK), custom fields beyond the default \"Title\" and \"Body\" may be added. CCK features are accessible through tabs on the <a href=\"@content-types\">content types administration page</a>. (See the <a href=\"@node-help\">node module help page</a> for more information about content types.)" msgstr "コンテンツモジュールは、Content Construction Kit (CCK) の必須のコンポーネントであり、管理者がコンテンツタイプにカスタムフィールドを追加するための機能を提供します。 Drupal では、コンテンツの追加および編集時に、コンテンツの入力フィールドにタイトルや説明を表示することによって、投稿するコンテンツに異なる特徴を定義するために、コンテンツタイプが使用されます。 コンテンツモジュール(および CCK に含まれる他のヘルパーモジュール)を使用して、デフォルトの \"タイトル\" や \"本文\" 以外のカスタムフィールドを追加することができます。 CCK の機能へは、<a href=\"@content-types\">コンテンツタイプの管理ページ</a> のタブメニューからアクセスできます。 (コンテンツタイプについての詳細は <a href=\"@node-help\">Node モジュールのヘルプページ</a> を参照してください。 )" #: modules/cck/content.module:26 msgid "When adding a custom field to a content type, you determine its type (whether it will contain text, numbers, or references to other objects) and how it will be displayed (either as a text field or area, a select box, checkbox, radio button, or autocompleting field). A field may have multiple values (i.e., a \"person\" may have multiple e-mail addresses) or a single value (i.e., an \"employee\" has a single employee identification number). As you add and edit fields, CCK automatically adjusts the structure of the database as necessary. CCK also provides a number of other features, including intelligent caching for your custom data, an import and export facility for content type definitions, and integration with other contributed modules." msgstr "カスタムフィールドをコンテンツタイプに追加する場合、追加するフィールドのタイプ(テキスト、数字、他のオブジェクトの参照の何れか)と、フィールドの表示スタイル(テキストフィールド、テキストエリア、選択ボックス、チェックボックス、ラジオボタン、オートコンプリートフィールドの何れか)を決定します。 フィールドは複数の値(例: \"個人\" は複数の E-mail アドレスを持つかもしれません)もしくは、ひとつの値(例: \"従業員\" は固有の社員番号を持ちます)を持つことができます。 フィールドの追加や編集を行うと、CCK は必要に応じて自動的にデータベースの構造を調節します。 また CCK は他にも、カスタムデータのためのインテリジェント・キャッシング、コンテンツタイプ定義のためのインポート/エクスポート機能、他の寄与モジュールの統合などを含む多くの特徴も提供します。" #: modules/cck/content.module:27 msgid "Custom field types are provided by a set of optional modules included with CCK (each module provides a different type). The <a href=\"@modules\">modules page</a> allows you to enable or disable CCK components. A default installation of CCK includes:" msgstr "カスタムフィールドのタイプは CCK に含まれるオプションモジュールから、それぞれ別のタイプとして提供します。 <a href=\"@modules\">モジュールページ</a> から CCK コンポーネントの有効/無効を切り替えることができます。 CCK のデフォルトインストールは次を含みます。:" #: modules/cck/content.module:29 msgid "<em>number</em>, which adds numeric field types, in integer, decimal or floating point form. You may define a set of allowed inputs, or specify an allowable range of values. A variety of common formats for displaying numeric data are available." msgstr "<storng>数字</storng>は、数値形式のフィールドタイプとして、整数、小数、浮動小数点のデータ型フィールドを追加します。 入力値として、使用する値のリストを定義するか、もしくは、入力可能な値の範囲を指定することができます。 数値データのデータの表示に関して、多様な共通フォーマットを利用できます。" #: modules/cck/content.module:30 msgid "<em>text</em>, which adds text field types. A text field may contain plain text only, or optionally, may use Drupal's input format filters to securely manage rich text input. Text input fields may be either a single line (text field), multiple lines (text area), or for greater input control, a select box, checkbox, or radio buttons. If desired, CCK can validate the input to a set of allowed values." msgstr "<storng>テキスト</storng>は、テキストフィールドタイプのフィールドを追加します。 テキストフィールドにはプレーンテキストのみを含むことができますが、オプションとして、安全なリッチテキストの入力のために Drupal の入力書式を使用することもできます。 テキストの入力フィールドには、一般的なテキストフィールドのような 1行の入力フォームやテキストエリアのような複数行の入力フォームが使用できます。 また、入力内容をコントロールしたい場合などのために、選択ボックス、チェックボックス、ラジオボタンも使用できます。 必要に応じて、入力値に使用する値のリストを設定することも可能です。" #: modules/cck/content.module:31 msgid "<em>nodereference</em>, which creates custom references between Drupal nodes. By adding a <em>nodereference</em> field and two different content types, for instance, you can easily create complex parent/child relationships between data (multiple \"employee\" nodes may contain a <em>nodereference</em> field linking to an \"employer\" node)." msgstr "<storng>ノード参照</storng>は、サイト内の他のノードを参照するためのフィールドを追加します。 <storng>ノード参照</storng>フィールドを使うことで、2つの異なるコンテンツタイプ間での複雑な親子関係を簡単に作成することができます。 例えば、複数の \"従業員\" ノードに 1つの \"雇用主\" ノードを参照する<storng>ノード参照</storng>フィールドを持たせることができます。" #: modules/cck/content.module:32 msgid "<em>userreference</em>, which creates custom references to your sites' user accounts. By adding a <em>userreference</em> field, you can create complex relationships between your site's users and posts. To track user involvement in a post beyond Drupal's standard <em>Authored by</em> field, for instance, add a <em>userreference</em> field named \"Edited by\" to a content type to store a link to an editor's user account page." msgstr "<storng>ユーザ参照</storng>は、サイト内のユーザアカウントを参照するためのフィールドを追加します。 <storng>ユーザ参照</storng>フィールドを使うことで、サイトユーザと投稿間での複雑な親子関係を作成することができます。 例えば、\"編集者\" という<storng>ユーザ参照</storng>フィールドによって、Drupal 標準の<storng>投稿者</storng>フィールドを使用するだけでは軌跡に表示されないノードへのリンクを、それぞれの編集者のユーザアカウントページに追加することができます。" #: modules/cck/content.module:33 msgid "<em>fieldgroup</em>, which creates collapsible fieldsets to hold a group of related fields. A fieldset may either be open or closed by default. The order of your fieldsets, and the order of fields within a fieldset, is managed via a drag-and-drop interface provided by content module." msgstr "<storng>フィールドグループ</storng>は、関連するフィールドをグループ化するための折りたたみ可能なフィールドセットを追加します。 フィールドセットのデフォルトの状態として、開いている状態または閉じている状態のどちらかを選択できます。 フィールドセットおよびフィールドセット内のフィールドの順序は、コンテンツモジュールが提供するドラッグ&ドロップインターフェイスによって管理されます。" #: modules/cck/content.module:35 msgid "For more information, see the online handbook entry for <a href=\"@handbook-cck\">CCK</a> or the <a href=\"@project-cck\">CCK project page</a>." msgstr "詳細については、Drupal handbook の <a href=\"@handbook-cck\">CCK</a> ページまたは <a href=\"@project-cck\">CCK プロジェクト</a>ページで確認できます。" #: modules/cck/content.module:41 msgid "Configure how this content type's fields and field labels should be displayed when it's viewed in teaser and full-page mode." msgstr "このコンテンツタイプで作成されたノードが要約ページや全文ページとして表示されたときの、フィールドやラベルの表示方法について設定します。" #: modules/cck/content.module:44 msgid "Configure how this content type's fields should be displayed when it's rendered in the following contexts." msgstr "このコンテンツタイプのフィールドについて、以下の場面で表示される時の表示方法を設定してください。" #: modules/cck/content.module:48 msgid "Control the order of fields in the input form." msgstr "入力フォームに表示するフィールドの順序をコントロールします。" #: modules/cck/content.module:479 msgid "This field is required." msgstr "このフィールドの入力は必須です。" #: modules/cck/content.module:483 msgid "!title: !required" msgstr "!title: !required" #: modules/cck/content.module:486 msgid "Order" msgstr "並べ替え順" #: modules/cck/content.module:516 msgid "Add another item" msgstr "アイテムの追加" #: modules/cck/content.module:1616 msgid "RSS Item" msgstr "RSS アイテム" #: modules/cck/content.module:1618 msgid "Search Index" msgstr "検索インデックス" #: modules/cck/content.module:1619 msgid "Search Result" msgstr "検索結果" #: modules/cck/content.module:1957 msgid "Language" msgstr "言語" #: modules/cck/content.module:1960 msgid "Taxonomy" msgstr "タクソノミー" #: modules/cck/content.module:1963 msgid "File attachments" msgstr "ファイルの添付" #: modules/cck/content.module:568 msgid "Updating field type %type with module %module." msgstr "%module モジュールによって %type タイプフィールドが更新されました。" #: modules/cck/content.module:575 msgid "Updating widget type %type with module %module." msgstr "%module モジュールによって %type タイプウィジェットが更新されました。" #: modules/cck/content.module:142 msgid "Manage fields" msgstr "フィールドの管理" #: modules/cck/content.module:151 msgid "Display fields" msgstr "フィールドの表示" #: modules/cck/content.module:160 msgid "General" msgstr "全般" #: modules/cck/content.module:166 msgid "Advanced" msgstr "高度な設定" #: modules/cck/content.module:194 msgid "Remove field" msgstr "フィールドの取り外し"
{ "pile_set_name": "Github" }
@model List<Microsoft.AspNetCore.Html.HtmlString> @{ ViewData["Title"] = "Index"; } <h2>Show tenant hierarchy</h2> @foreach (var html in @Model) { @html <p>&nbsp;</p><p> </p> }
{ "pile_set_name": "Github" }
// This file was automatically generated on Thu May 29 07:24:54 2008 // by libs/config/tools/generate.cpp // Copyright John Maddock 2002-4. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org/libs/config for the most recent version.// // Revision $Id: no_rvalue_references_pass.cpp 79537 2012-07-15 15:59:05Z marshall $ // // Test file for macro BOOST_NO_CXX11_RVALUE_REFERENCES // This file should compile, if it does not then // BOOST_NO_CXX11_RVALUE_REFERENCES should be defined. // See file boost_no_rvalue_references.ipp for details // Must not have BOOST_ASSERT_CONFIG set; it defeats // the objective of this file: #ifdef BOOST_ASSERT_CONFIG # undef BOOST_ASSERT_CONFIG #endif #include <boost/config.hpp> #include "test.hpp" #ifndef BOOST_NO_CXX11_RVALUE_REFERENCES #include "boost_no_rvalue_references.ipp" #else namespace boost_no_rvalue_references = empty_boost; #endif int main( int, char *[] ) { return boost_no_rvalue_references::test(); }
{ "pile_set_name": "Github" }
const process = require('process'); const webdriver = require('selenium-webdriver'); const {until} = require('selenium-webdriver'); process.env['Path'] = process.env['Path'] + ';' + process.env['ChromeWebDriver']; describe('sampleFunctionalTests', function () { this.timeout(0); let driver; var capabilities = webdriver.Capabilities.chrome(); capabilities.set('chromeOptions', {'args': ['--no-sandbox']}); before(async () => { driver = new webdriver.Builder() .forBrowser('chrome') .withCapabilities(capabilities) .build(); await driver.manage().setTimeouts({pageLoad: 300000}); }) after((done) => { driver.quit() .then(() => done()) .catch(() => { done(); }); }); it('Assert page title', async() => { var startTimestamp = Date.now() var endTimestamp = startTimestamp + 60*10*1000; while(true) { try { await driver.get(process.env['webAppUrl']); await driver.wait(until.titleIs('Node.js Application'), 2000); break; } catch(err) { var currentTimestamp = Date.now() if(currentTimestamp > endTimestamp) { console.log("##vso[task.logissue type=error;]Failed with error " + err) throw new Error('Failed with error ' + err); } await new Promise(resolve=>{ setTimeout(resolve,5000) }); } } }); });
{ "pile_set_name": "Github" }
{ "IsLinux" : true, "UseLinuxVersion": false, "SkipGssNtlmSspTests": true, "osVersion": "Alpine 3.8", "tagTemplates": [ "alpine-#shorttag#" ], "OptionalTests": [ "test-deps", "test-deps-musl" ], "TestProperties": { "size": 255 } }
{ "pile_set_name": "Github" }
/** * Copyright (c) 2014-present, Facebook, Inc. All rights reserved. * * You are hereby granted a non-exclusive, worldwide, royalty-free license to use, * copy, modify, and distribute this software in source code or binary form for use * in connection with the web services and APIs provided by Facebook. * * As with any software that integrates with the Facebook platform, your use of * this software is subject to the Facebook Developer Principles and Policies * [http://developers.facebook.com/policy/]. This copyright notice shall be * included in all copies or substantial portions of the software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ namespace Facebook.Unity { internal interface IInternalResult : IResult { /// <summary> /// Gets the callback identifier. /// </summary> /// <value>A unique ID for this callback. This value should only be used internally.</value> string CallbackId { get; } } }
{ "pile_set_name": "Github" }
Scott Test Reporter - Tests =========================== These projects ensure that Scott remains bug free.
{ "pile_set_name": "Github" }
// Read an INI file into easy-to-access name/value pairs. // inih and INIReader are released under the New BSD license (see LICENSE.txt). // Go to the project home page for more info: // // http://code.google.com/p/inih/ #ifndef __INIREADER_H__ #define __INIREADER_H__ #include <map> #include <set> #include <string> // Read an INI file into easy-to-access name/value pairs. (Note that I've gone // for simplicity here rather than speed, but it should be pretty decent.) class INIReader { public: // Construct INIReader and parse given filename. See ini.h for more info // about the parsing. INIReader(std::string filename); ~INIReader(); // Return the result of ini_parse(), i.e., 0 on success, line number of // first error on parse error, or -1 on file open error. int ParseError(); // Get a string value from INI file, returning default_value if not found. std::string Get(std::string section, std::string name, std::string default_value); // Get an integer (long) value from INI file, returning default_value if // not found or not a valid integer (decimal "1234", "-1234", or hex "0x4d2"). long GetInteger(std::string section, std::string name, long default_value); // Get a real (floating point double) value from INI file, returning // default_value if not found or not a valid floating point value // according to strtod(). double GetReal(std::string section, std::string name, double default_value); // Get a boolean value from INI file, returning default_value if not found or if // not a valid true/false value. Valid true values are "true", "yes", "on", "1", // and valid false values are "false", "no", "off", "0" (not case sensitive). bool GetBoolean(std::string section, std::string name, bool default_value); // Returns all the section names from the INI file, in alphabetical order, but in the // original casing std::set<std::string> GetSections() const; // Returns all the field names from a section in the INI file, in alphabetical order, // but in the original casing. Returns an empty set if the field name is unknown std::set<std::string> GetFields(std::string section) const; private: int _error; std::map<std::string, std::string> _values; // Because we want to retain the original casing in _fields, but // want lookups to be case-insensitive, we need both _fields and _values std::set<std::string> _sections; std::map<std::string, std::set<std::string>*> _fields; static std::string MakeKey(std::string section, std::string name); static int ValueHandler(void* user, const char* section, const char* name, const char* value); }; #endif // __INIREADER_H__
{ "pile_set_name": "Github" }
/* Copyright (C) 2017-2020 Open Information Security Foundation * * You can copy, redistribute or modify this Program under the terms of * the GNU General Public License version 2 as published by the Free * Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * version 2 along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ /** * \file * * \author Eric Leblond <[email protected]> * * Match on ftp command used to trigger a ftp data transfer */ #include "suricata-common.h" #include "util-unittest.h" #include "detect-parse.h" #include "detect-engine.h" #include "detect-engine-state.h" #include "app-layer-ftp.h" #include "detect-ftpdata.h" /** * \brief Regex for parsing our keyword options */ #define PARSE_REGEX "^\\s*(stor|retr)\\s*$" static DetectParseRegex parse_regex; /* Prototypes of functions registered in DetectFtpdataRegister below */ static int DetectFtpdataMatch(DetectEngineThreadCtx *, Flow *, uint8_t, void *, void *, const Signature *, const SigMatchCtx *); static int DetectFtpdataSetup (DetectEngineCtx *, Signature *, const char *); static void DetectFtpdataFree (DetectEngineCtx *, void *); #ifdef UNITTESTS static void DetectFtpdataRegisterTests (void); #endif static int DetectEngineInspectFtpdataGeneric(ThreadVars *tv, DetectEngineCtx *de_ctx, DetectEngineThreadCtx *det_ctx, const Signature *s, const SigMatchData *smd, Flow *f, uint8_t flags, void *alstate, void *txv, uint64_t tx_id); static int g_ftpdata_buffer_id = 0; /** * \brief Registration function for ftpcommand: keyword * * This function is called once in the 'lifetime' of the engine. */ void DetectFtpdataRegister(void) { /* keyword name: this is how the keyword is used in a rule */ sigmatch_table[DETECT_FTPDATA].name = "ftpdata_command"; /* description: listed in "suricata --list-keywords=all" */ sigmatch_table[DETECT_FTPDATA].desc = "match FTP command triggering a FTP data channel"; sigmatch_table[DETECT_FTPDATA].url = "/rules/ftp-keywords.html#ftpdata-command"; sigmatch_table[DETECT_FTPDATA].AppLayerTxMatch = DetectFtpdataMatch; /* setup function is called during signature parsing, when the ftpcommand * keyword is encountered in the rule */ sigmatch_table[DETECT_FTPDATA].Setup = DetectFtpdataSetup; /* free function is called when the detect engine is freed. Normally at * shutdown, but also during rule reloads. */ sigmatch_table[DETECT_FTPDATA].Free = DetectFtpdataFree; /* registers unittests into the system */ #ifdef UNITTESTS sigmatch_table[DETECT_FTPDATA].RegisterTests = DetectFtpdataRegisterTests; #endif DetectAppLayerInspectEngineRegister("ftpdata_command", ALPROTO_FTPDATA, SIG_FLAG_TOSERVER, 0, DetectEngineInspectFtpdataGeneric); DetectAppLayerInspectEngineRegister("ftpdata_command", ALPROTO_FTPDATA, SIG_FLAG_TOCLIENT, 0, DetectEngineInspectFtpdataGeneric); g_ftpdata_buffer_id = DetectBufferTypeGetByName("ftpdata_command"); /* set up the PCRE for keyword parsing */ DetectSetupParseRegexes(PARSE_REGEX, &parse_regex); } static int DetectEngineInspectFtpdataGeneric(ThreadVars *tv, DetectEngineCtx *de_ctx, DetectEngineThreadCtx *det_ctx, const Signature *s, const SigMatchData *smd, Flow *f, uint8_t flags, void *alstate, void *txv, uint64_t tx_id) { return DetectEngineInspectGenericList(tv, de_ctx, det_ctx, s, smd, f, flags, alstate, txv, tx_id); } /** * \brief This function is used to check matches from the FTP App Layer Parser * * \param t pointer to thread vars * \param det_ctx pointer to the pattern matcher thread * \param p pointer to the current packet * \param m pointer to the sigmatch * \retval 0 no match * \retval 1 match */ static int DetectFtpdataMatch(DetectEngineThreadCtx *det_ctx, Flow *f, uint8_t flags, void *state, void *txv, const Signature *s, const SigMatchCtx *m) { const DetectFtpdataData *ftpcommandd = (const DetectFtpdataData *) m; const FtpDataState *ftp_state = (const FtpDataState *)state; if (ftp_state == NULL) return 0; if (ftpcommandd->command == ftp_state->command) { /* Only match if the flow is in the good direction */ if ((flags & STREAM_TOSERVER) && (ftpcommandd->command == FTP_COMMAND_RETR)) { return 0; } else if ((flags & STREAM_TOCLIENT) && (ftpcommandd->command == FTP_COMMAND_STOR)) { return 0; } return 1; } return 0; } /** * \brief This function is used to parse ftpcommand options passed via ftpcommand: keyword * * \param ftpcommandstr Pointer to the user provided ftpcommand options * * \retval ftpcommandd pointer to DetectFtpdataData on success * \retval NULL on failure */ static DetectFtpdataData *DetectFtpdataParse(const char *ftpcommandstr) { DetectFtpdataData *ftpcommandd = NULL; char arg1[5] = ""; int ov[MAX_SUBSTRINGS]; int ret = DetectParsePcreExec(&parse_regex, ftpcommandstr, 0, 0, ov, MAX_SUBSTRINGS); if (ret != 2) { SCLogError(SC_ERR_PCRE_MATCH, "parse error, ret %" PRId32 "", ret); goto error; } int res = pcre_copy_substring((char *) ftpcommandstr, ov, MAX_SUBSTRINGS, 1, arg1, sizeof(arg1)); if (res < 0) { SCLogError(SC_ERR_PCRE_GET_SUBSTRING, "pcre_copy_substring failed"); goto error; } SCLogDebug("Arg1 \"%s\"", arg1); ftpcommandd = SCMalloc(sizeof (DetectFtpdataData)); if (unlikely(ftpcommandd == NULL)) goto error; if (!strcmp(arg1, "stor")) { ftpcommandd->command = FTP_COMMAND_STOR; } else if (!strcmp(arg1, "retr")) { ftpcommandd->command = FTP_COMMAND_RETR; } else { SCLogError(SC_ERR_NOT_SUPPORTED, "Invalid command value"); goto error; } return ftpcommandd; error: if (ftpcommandd) SCFree(ftpcommandd); return NULL; } /** * \brief parse the options from the 'ftpcommand' keyword in the rule into * the Signature data structure. * * \param de_ctx pointer to the Detection Engine Context * \param s pointer to the Current Signature * \param str pointer to the user provided ftpcommand options * * \retval 0 on Success * \retval -1 on Failure */ static int DetectFtpdataSetup(DetectEngineCtx *de_ctx, Signature *s, const char *str) { if (DetectSignatureSetAppProto(s, ALPROTO_FTPDATA) != 0) return -1; DetectFtpdataData *ftpcommandd = DetectFtpdataParse(str); if (ftpcommandd == NULL) return -1; SigMatch *sm = SigMatchAlloc(); if (sm == NULL) { DetectFtpdataFree(de_ctx, ftpcommandd); return -1; } sm->type = DETECT_FTPDATA; sm->ctx = (void *)ftpcommandd; SigMatchAppendSMToList(s, sm, g_ftpdata_buffer_id); return 0; } /** * \brief this function will free memory associated with DetectFtpdataData * * \param ptr pointer to DetectFtpdataData */ static void DetectFtpdataFree(DetectEngineCtx *de_ctx, void *ptr) { DetectFtpdataData *ftpcommandd = (DetectFtpdataData *)ptr; /* do more specific cleanup here, if needed */ SCFree(ftpcommandd); } #ifdef UNITTESTS static int DetectFtpdataParseTest01(void) { DetectFtpdataData *ftpcommandd = DetectFtpdataParse("stor"); FAIL_IF_NULL(ftpcommandd); FAIL_IF(!(ftpcommandd->command == FTP_COMMAND_STOR)); DetectFtpdataFree(NULL, ftpcommandd); PASS; } static int DetectFtpdataSignatureTest01(void) { DetectEngineCtx *de_ctx = DetectEngineCtxInit(); FAIL_IF_NULL(de_ctx); Signature *sig = DetectEngineAppendSig(de_ctx, "alert ip any any -> any any (ftpdata_command:stor; sid:1; rev:1;)"); FAIL_IF_NULL(sig); sig = DetectEngineAppendSig(de_ctx, "alert ip any any -> any any (ftpdata_command:retr; sid:2; rev:1;)"); FAIL_IF_NULL(sig); sig = DetectEngineAppendSig(de_ctx, "alert ip any any -> any any (ftpdata_command:xxx; sid:3; rev:1;)"); FAIL_IF_NOT_NULL(sig); DetectEngineCtxFree(de_ctx); PASS; } /** * \brief this function registers unit tests for DetectFtpdata */ static void DetectFtpdataRegisterTests(void) { UtRegisterTest("DetectFtpdataParseTest01", DetectFtpdataParseTest01); UtRegisterTest("DetectFtpdataSignatureTest01", DetectFtpdataSignatureTest01); } #endif /* UNITTESTS */
{ "pile_set_name": "Github" }
<h3>Want to help make this project awesome? Check out our repo.</h3> <a color="accent" href="https://github.com/bfwg/angular-spring-jwt-starter" mat-raised-button mat-ripple> <span>GITHUB</span> </a>
{ "pile_set_name": "Github" }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ import * as msRest from "@azure/ms-rest-js"; import * as Models from "../models"; import * as Mappers from "../models/projectsMappers"; import * as Parameters from "../models/parameters"; import { MLTeamAccountManagementClientContext } from "../mLTeamAccountManagementClientContext"; /** Class representing a Projects. */ export class Projects { private readonly client: MLTeamAccountManagementClientContext; /** * Create a Projects. * @param {MLTeamAccountManagementClientContext} client Reference to the service client. */ constructor(client: MLTeamAccountManagementClientContext) { this.client = client; } /** * Gets the properties of the specified machine learning project. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param [options] The optional parameters * @returns Promise<Models.ProjectsGetResponse> */ get(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, options?: msRest.RequestOptionsBase): Promise<Models.ProjectsGetResponse>; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param callback The callback */ get(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, callback: msRest.ServiceCallback<Models.Project>): void; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param options The optional parameters * @param callback The callback */ get(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Project>): void; get(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Project>, callback?: msRest.ServiceCallback<Models.Project>): Promise<Models.ProjectsGetResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, workspaceName, projectName, options }, getOperationSpec, callback) as Promise<Models.ProjectsGetResponse>; } /** * Creates or updates a project with the specified parameters. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param parameters The parameters for creating or updating a project. * @param [options] The optional parameters * @returns Promise<Models.ProjectsCreateOrUpdateResponse> */ createOrUpdate(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.Project, options?: msRest.RequestOptionsBase): Promise<Models.ProjectsCreateOrUpdateResponse>; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param parameters The parameters for creating or updating a project. * @param callback The callback */ createOrUpdate(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.Project, callback: msRest.ServiceCallback<Models.Project>): void; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param parameters The parameters for creating or updating a project. * @param options The optional parameters * @param callback The callback */ createOrUpdate(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.Project, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Project>): void; createOrUpdate(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.Project, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Project>, callback?: msRest.ServiceCallback<Models.Project>): Promise<Models.ProjectsCreateOrUpdateResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, workspaceName, projectName, parameters, options }, createOrUpdateOperationSpec, callback) as Promise<Models.ProjectsCreateOrUpdateResponse>; } /** * Deletes a project. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param [options] The optional parameters * @returns Promise<msRest.RestResponse> */ deleteMethod(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse>; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param callback The callback */ deleteMethod(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, callback: msRest.ServiceCallback<void>): void; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param options The optional parameters * @param callback The callback */ deleteMethod(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<void>): void; deleteMethod(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, workspaceName, projectName, options }, deleteMethodOperationSpec, callback); } /** * Updates a project with the specified parameters. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param parameters The parameters for updating a machine learning team account. * @param [options] The optional parameters * @returns Promise<Models.ProjectsUpdateResponse> */ update(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.ProjectUpdateParameters, options?: msRest.RequestOptionsBase): Promise<Models.ProjectsUpdateResponse>; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param parameters The parameters for updating a machine learning team account. * @param callback The callback */ update(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.ProjectUpdateParameters, callback: msRest.ServiceCallback<Models.Project>): void; /** * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param projectName The name of the machine learning project under a team account workspace. * @param parameters The parameters for updating a machine learning team account. * @param options The optional parameters * @param callback The callback */ update(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.ProjectUpdateParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Project>): void; update(resourceGroupName: string, accountName: string, workspaceName: string, projectName: string, parameters: Models.ProjectUpdateParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Project>, callback?: msRest.ServiceCallback<Models.Project>): Promise<Models.ProjectsUpdateResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, workspaceName, projectName, parameters, options }, updateOperationSpec, callback) as Promise<Models.ProjectsUpdateResponse>; } /** * Lists all the available machine learning projects under the specified workspace. * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param [options] The optional parameters * @returns Promise<Models.ProjectsListByWorkspaceResponse> */ listByWorkspace(accountName: string, workspaceName: string, resourceGroupName: string, options?: msRest.RequestOptionsBase): Promise<Models.ProjectsListByWorkspaceResponse>; /** * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param callback The callback */ listByWorkspace(accountName: string, workspaceName: string, resourceGroupName: string, callback: msRest.ServiceCallback<Models.ProjectListResult>): void; /** * @param accountName The name of the machine learning team account. * @param workspaceName The name of the machine learning team account workspace. * @param resourceGroupName The name of the resource group to which the machine learning team * account belongs. * @param options The optional parameters * @param callback The callback */ listByWorkspace(accountName: string, workspaceName: string, resourceGroupName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProjectListResult>): void; listByWorkspace(accountName: string, workspaceName: string, resourceGroupName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProjectListResult>, callback?: msRest.ServiceCallback<Models.ProjectListResult>): Promise<Models.ProjectsListByWorkspaceResponse> { return this.client.sendOperationRequest( { accountName, workspaceName, resourceGroupName, options }, listByWorkspaceOperationSpec, callback) as Promise<Models.ProjectsListByWorkspaceResponse>; } /** * Lists all the available machine learning projects under the specified workspace. * @param nextPageLink The NextLink from the previous successful call to List operation. * @param [options] The optional parameters * @returns Promise<Models.ProjectsListByWorkspaceNextResponse> */ listByWorkspaceNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.ProjectsListByWorkspaceNextResponse>; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param callback The callback */ listByWorkspaceNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ProjectListResult>): void; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param options The optional parameters * @param callback The callback */ listByWorkspaceNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProjectListResult>): void; listByWorkspaceNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProjectListResult>, callback?: msRest.ServiceCallback<Models.ProjectListResult>): Promise<Models.ProjectsListByWorkspaceNextResponse> { return this.client.sendOperationRequest( { nextPageLink, options }, listByWorkspaceNextOperationSpec, callback) as Promise<Models.ProjectsListByWorkspaceNextResponse>; } } // Operation Specifications const serializer = new msRest.Serializer(Mappers); const getOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningExperimentation/accounts/{accountName}/workspaces/{workspaceName}/projects/{projectName}", urlParameters: [ Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.workspaceName, Parameters.projectName ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.Project }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer }; const createOrUpdateOperationSpec: msRest.OperationSpec = { httpMethod: "PUT", path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningExperimentation/accounts/{accountName}/workspaces/{workspaceName}/projects/{projectName}", urlParameters: [ Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.workspaceName, Parameters.projectName ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], requestBody: { parameterPath: "parameters", mapper: { ...Mappers.Project, required: true } }, responses: { 200: { bodyMapper: Mappers.Project }, 201: { bodyMapper: Mappers.Project }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer }; const deleteMethodOperationSpec: msRest.OperationSpec = { httpMethod: "DELETE", path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningExperimentation/accounts/{accountName}/workspaces/{workspaceName}/projects/{projectName}", urlParameters: [ Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.workspaceName, Parameters.projectName ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: {}, 204: {}, default: { bodyMapper: Mappers.ErrorResponse } }, serializer }; const updateOperationSpec: msRest.OperationSpec = { httpMethod: "PATCH", path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningExperimentation/accounts/{accountName}/workspaces/{workspaceName}/projects/{projectName}", urlParameters: [ Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.workspaceName, Parameters.projectName ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], requestBody: { parameterPath: "parameters", mapper: { ...Mappers.ProjectUpdateParameters, required: true } }, responses: { 200: { bodyMapper: Mappers.Project }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer }; const listByWorkspaceOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningExperimentation/accounts/{accountName}/workspaces{workspaceName}/projects", urlParameters: [ Parameters.subscriptionId, Parameters.accountName, Parameters.workspaceName, Parameters.resourceGroupName ], queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.ProjectListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer }; const listByWorkspaceNextOperationSpec: msRest.OperationSpec = { httpMethod: "GET", baseUrl: "https://management.azure.com", path: "{nextLink}", urlParameters: [ Parameters.nextPageLink ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.ProjectListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer };
{ "pile_set_name": "Github" }
pngsuite -------- (c) Willem van Schaik, 1999 Permission to use, copy, and distribute these images for any purpose and without fee is hereby granted. These 15 images are part of the much larger PngSuite test-set of images, available for developers of PNG supporting software. The complete set, available at http:/www.schaik.com/pngsuite/, contains a variety of images to test interlacing, gamma settings, ancillary chunks, etc. The images in this directory represent the basic PNG color-types: grayscale (1-16 bit deep), full color (8 or 16 bit), paletted (1-8 bit) and grayscale or color images with alpha channel. You can use them to test the proper functioning of PNG software. filename depth type ------------ ------ -------------- basn0g01.png 1-bit grayscale basn0g02.png 2-bit grayscale basn0g04.png 4-bit grayscale basn0g08.png 8-bit grayscale basn0g16.png 16-bit grayscale basn2c08.png 8-bit truecolor basn2c16.png 16-bit truecolor basn3p01.png 1-bit paletted basn3p02.png 2-bit paletted basn3p04.png 4-bit paletted basn3p08.png 8-bit paletted basn4a08.png 8-bit gray with alpha basn4a16.png 16-bit gray with alpha basn6a08.png 8-bit RGBA basn6a16.png 16-bit RGBA Here is the correct result of typing "pngtest -m *.png" in this directory: Testing basn0g01.png: PASS (524 zero samples) Filter 0 was used 32 times Testing basn0g02.png: PASS (448 zero samples) Filter 0 was used 32 times Testing basn0g04.png: PASS (520 zero samples) Filter 0 was used 32 times Testing basn0g08.png: PASS (3 zero samples) Filter 1 was used 9 times Filter 4 was used 23 times Testing basn0g16.png: PASS (1 zero samples) Filter 1 was used 1 times Filter 2 was used 31 times Testing basn2c08.png: PASS (6 zero samples) Filter 1 was used 5 times Filter 4 was used 27 times Testing basn2c16.png: PASS (592 zero samples) Filter 1 was used 1 times Filter 4 was used 31 times Testing basn3p01.png: PASS (512 zero samples) Filter 0 was used 32 times Testing basn3p02.png: PASS (448 zero samples) Filter 0 was used 32 times Testing basn3p04.png: PASS (544 zero samples) Filter 0 was used 32 times Testing basn3p08.png: PASS (4 zero samples) Filter 0 was used 32 times Testing basn4a08.png: PASS (32 zero samples) Filter 1 was used 1 times Filter 4 was used 31 times Testing basn4a16.png: PASS (64 zero samples) Filter 0 was used 1 times Filter 1 was used 2 times Filter 2 was used 1 times Filter 4 was used 28 times Testing basn6a08.png: PASS (160 zero samples) Filter 1 was used 1 times Filter 4 was used 31 times Testing basn6a16.png: PASS (1072 zero samples) Filter 1 was used 4 times Filter 4 was used 28 times libpng passes test Willem van Schaik <[email protected]> October 1999
{ "pile_set_name": "Github" }
package org.stepik.android.view.course_list.delegate import androidx.fragment.app.DialogFragment import androidx.fragment.app.FragmentActivity import org.stepic.droid.analytic.Analytic import org.stepic.droid.core.ScreenManager import org.stepic.droid.ui.dialogs.LoadingProgressDialogFragment import org.stepic.droid.util.ProgressHelper import org.stepik.android.domain.course.analytic.CourseViewSource import org.stepik.android.domain.course_list.model.CourseListItem import org.stepik.android.domain.last_step.model.LastStep import org.stepik.android.model.Course import org.stepik.android.presentation.course_continue.CourseContinueView class CourseContinueViewDelegate( private val activity: FragmentActivity, private val analytic: Analytic, private val screenManager: ScreenManager ) : CourseContinueView { private val progressDialogFragment: DialogFragment = LoadingProgressDialogFragment.newInstance() override fun setBlockingLoading(isLoading: Boolean) { if (isLoading) { ProgressHelper.activate(progressDialogFragment, activity.supportFragmentManager, LoadingProgressDialogFragment.TAG) } else { ProgressHelper.dismiss(activity.supportFragmentManager, LoadingProgressDialogFragment.TAG) } } override fun showCourse(course: Course, source: CourseViewSource, isAdaptive: Boolean) { if (isAdaptive) { screenManager.continueAdaptiveCourse(activity, course) } else { screenManager.showCourseModules(activity, course, source) } } override fun showSteps(course: Course, source: CourseViewSource, lastStep: LastStep) { screenManager.continueCourse(activity, course.id, source, lastStep) } fun onCourseClicked(courseListItem: CourseListItem.Data) { analytic.reportEvent(Analytic.Interaction.CLICK_COURSE) if (courseListItem.course.enrollment != 0L) { screenManager.showCourseModules(activity, courseListItem.course, courseListItem.source) } else { screenManager.showCourseDescription(activity, courseListItem.course, courseListItem.source) } } }
{ "pile_set_name": "Github" }
/*===========================================================================*\ * * * OpenMesh * * Copyright (C) 2001-2015 by Computer Graphics Group, RWTH Aachen * * www.openmesh.org * * * *---------------------------------------------------------------------------* * This file is part of OpenMesh. * * * * OpenMesh is free software: you can redistribute it and/or modify * * it under the terms of the GNU Lesser General Public License as * * published by the Free Software Foundation, either version 3 of * * the License, or (at your option) any later version with the * * following exceptions: * * * * If other files instantiate templates or use macros * * or inline functions from this file, or you compile this file and * * link it with other files to produce an executable, this file does * * not by itself cause the resulting executable to be covered by the * * GNU Lesser General Public License. This exception does not however * * invalidate any other reasons why the executable file might be * * covered by the GNU Lesser General Public License. * * * * OpenMesh is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU Lesser General Public License for more details. * * * * You should have received a copy of the GNU LesserGeneral Public * * License along with OpenMesh. If not, * * see <http://www.gnu.org/licenses/>. * * * \*===========================================================================*/ /*===========================================================================*\ * * * $Revision: 1188 $ * * $Date: 2015-01-05 16:34:10 +0100 (Mo, 05 Jan 2015) $ * * * \*===========================================================================*/ //============================================================================= // // Helper Functions for binary reading / writing // //============================================================================= #ifndef OPENMESH_SR_BINARY_HH #define OPENMESH_SR_BINARY_HH //== INCLUDES ================================================================= #include <OpenMesh/Core/System/config.h> // -------------------- STL #include <typeinfo> #include <stdexcept> #include <sstream> #include <numeric> // accumulate // -------------------- OpenMesh //== NAMESPACES =============================================================== namespace OpenMesh { namespace IO { //============================================================================= //----------------------------------------------------------------------------- const static size_t UnknownSize(size_t(-1)); //----------------------------------------------------------------------------- // struct binary, helper for storing/restoring #define X \ std::ostringstream msg; \ msg << "Type not supported: " << typeid(value_type).name(); \ throw std::logic_error(msg.str()) /// \struct binary SR_binary.hh <OpenMesh/Core/IO/SR_binary.hh> /// /// The struct defines how to store and restore the type T. /// It's used by the OM reader/writer modules. /// /// The following specialization are provided: /// - Fundamental types except \c long \c double /// - %OpenMesh vector types /// - %OpenMesh::StatusInfo /// - std::string (max. length 65535) /// /// \todo Complete documentation of members template < typename T > struct binary { typedef T value_type; static const bool is_streamable = false; static size_t size_of(void) { return UnknownSize; } static size_t size_of(const value_type&) { return UnknownSize; } static size_t store( std::ostream& /* _os */, const value_type& /* _v */, bool /* _swap=false */) { X; return 0; } static size_t restore( std::istream& /* _is */, value_type& /* _v */, bool /* _swap=false */) { X; return 0; } }; #undef X //============================================================================= } // namespace IO } // namespace OpenMesh //============================================================================= #endif // OPENMESH_SR_RBO_HH defined //=============================================================================
{ "pile_set_name": "Github" }
--- namespace: Titanium.Filesystem.File.setExecutable type: method description: | Set the execute bit on the file or directory at the path specified by this Filesystem.File object. Return true if the file or directory at this path is executable after the operation completes. since: 0.2.0 platforms: [osx, linux, win32] returns: Boolean parameters: []
{ "pile_set_name": "Github" }
// name: expconn4.mo // keywords: // status: correct // cflags: -d=newInst // // expandable connector EC RealInput ri; end EC; connector RealInput = input Real; model M EC ec; RealInput ri; equation connect(ec.ri, ri); end M; // Result: // class M // input Real ec.ri; // input Real ri; // equation // ec.ri = ri; // end M; // endResult
{ "pile_set_name": "Github" }
{ "_args": [ [ { "raw": "is-extglob@^2.1.0", "scope": null, "escapedName": "is-extglob", "name": "is-extglob", "rawSpec": "^2.1.0", "spec": ">=2.1.0 <3.0.0", "type": "range" }, "/Users/qinliang.ql/Desktop/test/node_modules/glob-stream/node_modules/is-glob" ] ], "_from": "is-extglob@>=2.1.0 <3.0.0", "_id": "[email protected]", "_inCache": true, "_location": "/glob-stream/is-extglob", "_nodeVersion": "6.9.2", "_npmOperationalInternal": { "host": "packages-18-east.internal.npmjs.com", "tmp": "tmp/is-extglob-2.1.1.tgz_1481429063759_0.21102957101538777" }, "_npmUser": { "name": "jonschlinkert", "email": "[email protected]" }, "_npmVersion": "3.10.9", "_phantomChildren": {}, "_requested": { "raw": "is-extglob@^2.1.0", "scope": null, "escapedName": "is-extglob", "name": "is-extglob", "rawSpec": "^2.1.0", "spec": ">=2.1.0 <3.0.0", "type": "range" }, "_requiredBy": [ "/glob-stream/is-glob" ], "_resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "_shasum": "a88c02535791f02ed37c76a1b9ea9773c833f8c2", "_shrinkwrap": null, "_spec": "is-extglob@^2.1.0", "_where": "/Users/qinliang.ql/Desktop/test/node_modules/glob-stream/node_modules/is-glob", "author": { "name": "Jon Schlinkert", "url": "https://github.com/jonschlinkert" }, "bugs": { "url": "https://github.com/jonschlinkert/is-extglob/issues" }, "dependencies": {}, "description": "Returns true if a string has an extglob.", "devDependencies": { "gulp-format-md": "^0.1.10", "mocha": "^3.0.2" }, "directories": {}, "dist": { "shasum": "a88c02535791f02ed37c76a1b9ea9773c833f8c2", "tarball": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" }, "engines": { "node": ">=0.10.0" }, "files": [ "index.js" ], "gitHead": "10a74787acbe79abf02141c5d487950d1b197b15", "homepage": "https://github.com/jonschlinkert/is-extglob", "keywords": [ "bash", "braces", "check", "exec", "expression", "extglob", "glob", "globbing", "globstar", "is", "match", "matches", "pattern", "regex", "regular", "string", "test" ], "license": "MIT", "main": "index.js", "maintainers": [ { "name": "jonschlinkert", "email": "[email protected]" } ], "name": "is-extglob", "optionalDependencies": {}, "readme": "ERROR: No README data found!", "repository": { "type": "git", "url": "git+https://github.com/jonschlinkert/is-extglob.git" }, "scripts": { "test": "mocha" }, "verb": { "toc": false, "layout": "default", "tasks": [ "readme" ], "plugins": [ "gulp-format-md" ], "related": { "list": [ "has-glob", "is-glob", "micromatch" ] }, "reflinks": [ "verb", "verb-generate-readme" ], "lint": { "reflinks": true } }, "version": "2.1.1" }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <!-- Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license --> <html> <head> <meta charset="utf-8"> <title>UI Color Picker &mdash; CKEditor Sample</title> <script src="../../../ckeditor.js"></script> <link rel="stylesheet" href="../../../samples/old/sample.css"> <meta name="ckeditor-sample-name" content="UIColor plugin"> <meta name="ckeditor-sample-group" content="Plugins"> <meta name="ckeditor-sample-description" content="Using the UIColor plugin to pick up skin color."> </head> <body> <h1 class="samples"> <a href="../../../samples/old/index.html">CKEditor Samples</a> &raquo; UI Color Plugin </h1> <div class="warning deprecated"> This sample is not maintained anymore. Check out its <a href="http://sdk.ckeditor.com/samples/uicolorpicker.html">brand new version in CKEditor SDK</a>. </div> <div class="description"> <p> This sample shows how to use the UI Color picker toolbar button to preview the skin color of the editor. <strong>Note:</strong>The UI skin color feature depends on the CKEditor skin compatibility. The Moono and Kama skins are examples of skins that work with it. </p> </div> <form action="../../../samples/sample_posteddata.php" method="post"> <div id="ui-color-plugin"> <p> If the <strong>uicolor</strong> plugin along with the dedicated <strong>UIColor</strong> toolbar button is added to CKEditor, the user will also be able to pick the color of the UI from the color palette available in the <strong>UI Color Picker</strong> dialog window. </p> <p> To insert a CKEditor instance with the <strong>uicolor</strong> plugin enabled, use the following JavaScript call: </p> <pre class="samples"> CKEDITOR.replace( '<em>textarea_id</em>', { <strong>extraPlugins: 'uicolor',</strong> toolbar: [ [ 'Bold', 'Italic' ], [ <strong>'UIColor'</strong> ] ] });</pre> <h2>Used in themed instance</h2> <p> Click the <strong>UI Color Picker</strong> toolbar button to open up a color picker dialog. </p> <p> <textarea cols="80" id="editor1" name="editor1" rows="10">&lt;p&gt;This is some &lt;strong&gt;sample text&lt;/strong&gt;. You are using &lt;a href="http://ckeditor.com/"&gt;CKEditor&lt;/a&gt;.&lt;/p&gt;</textarea> <script> // Replace the <textarea id="editor"> with an CKEditor // instance, using default configurations. CKEDITOR.replace( 'editor1', { extraPlugins: 'uicolor', toolbar: [ [ 'Bold', 'Italic', '-', 'NumberedList', 'BulletedList', '-', 'Link', 'Unlink' ], [ 'FontSize', 'TextColor', 'BGColor' ], [ 'UIColor' ] ] }); </script> </p> <h2>Used in inline instance</h2> <p> Click the below editable region to display floating toolbar, then click <strong>UI Color Picker</strong> button. </p> <div id="editor2" contenteditable="true"> <p>This is some <strong>sample text</strong>. You are using <a data-cke-saved-href="http://ckeditor.com/" href="http://ckeditor.com/">CKEditor</a>.</p> </div> <script> // Disable automatic creation of inline instances. CKEDITOR.disableAutoInline = true; // Replace the <div id="editor3"> with an inline CKEditor instance. CKEDITOR.inline( 'editor2', { extraPlugins: 'uicolor', toolbar: [ [ 'Bold', 'Italic', '-', 'NumberedList', 'BulletedList', '-', 'Link', 'Unlink' ], [ 'FontSize', 'TextColor', 'BGColor' ], [ 'UIColor' ] ] }); </script> </div> <p> <input type="submit" value="Submit"> </p> </form> <div id="footer"> <hr> <p> CKEditor - The text editor for the Internet - <a class="samples" href="http://ckeditor.com/">http://ckeditor.com</a> </p> <p id="copy"> Copyright &copy; 2003-2016, <a class="samples" href="http://cksource.com/">CKSource</a> - Frederico Knabben. All rights reserved. </p> </div> </body> </html>
{ "pile_set_name": "Github" }
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" ) // GroupName is the group name use in this package const GroupName = "scheduling.k8s.io" // SchemeGroupVersion is group version used to register these objects var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1"} // Resource takes an unqualified resource and returns a Group qualified GroupResource func Resource(resource string) schema.GroupResource { return SchemeGroupVersion.WithResource(resource).GroupResource() } var ( // TODO: move SchemeBuilder with zz_generated.deepcopy.go to k8s.io/api. // localSchemeBuilder and AddToScheme will stay in k8s.io/kubernetes. // SchemeBuilder is a collection of functions that add things to a scheme. SchemeBuilder = runtime.NewSchemeBuilder(addKnownTypes) localSchemeBuilder = &SchemeBuilder // AddToScheme applies all the stored functions to the scheme. AddToScheme = localSchemeBuilder.AddToScheme ) // Adds the list of known types to the given scheme. func addKnownTypes(scheme *runtime.Scheme) error { scheme.AddKnownTypes(SchemeGroupVersion, &PriorityClass{}, &PriorityClassList{}, ) metav1.AddToGroupVersion(scheme, SchemeGroupVersion) return nil }
{ "pile_set_name": "Github" }
; DEF file for MS VC++ LIBRARY EXPORTS XML_DefaultCurrent @1 XML_ErrorString @2 XML_ExpatVersion @3 XML_ExpatVersionInfo @4 XML_ExternalEntityParserCreate @5 XML_GetBase @6 XML_GetBuffer @7 XML_GetCurrentByteCount @8 XML_GetCurrentByteIndex @9 XML_GetCurrentColumnNumber @10 XML_GetCurrentLineNumber @11 XML_GetErrorCode @12 XML_GetIdAttributeIndex @13 XML_GetInputContext @14 XML_GetSpecifiedAttributeCount @15 XML_Parse @16 XML_ParseBuffer @17 XML_ParserCreate @18 XML_ParserCreateNS @19 XML_ParserCreate_MM @20 XML_ParserFree @21 XML_SetAttlistDeclHandler @22 XML_SetBase @23 XML_SetCdataSectionHandler @24 XML_SetCharacterDataHandler @25 XML_SetCommentHandler @26 XML_SetDefaultHandler @27 XML_SetDefaultHandlerExpand @28 XML_SetDoctypeDeclHandler @29 XML_SetElementDeclHandler @30 XML_SetElementHandler @31 XML_SetEncoding @32 XML_SetEndCdataSectionHandler @33 XML_SetEndDoctypeDeclHandler @34 XML_SetEndElementHandler @35 XML_SetEndNamespaceDeclHandler @36 XML_SetEntityDeclHandler @37 XML_SetExternalEntityRefHandler @38 XML_SetExternalEntityRefHandlerArg @39 XML_SetNamespaceDeclHandler @40 XML_SetNotStandaloneHandler @41 XML_SetNotationDeclHandler @42 XML_SetParamEntityParsing @43 XML_SetProcessingInstructionHandler @44 XML_SetReturnNSTriplet @45 XML_SetStartCdataSectionHandler @46 XML_SetStartDoctypeDeclHandler @47 XML_SetStartElementHandler @48 XML_SetStartNamespaceDeclHandler @49 XML_SetUnknownEncodingHandler @50 XML_SetUnparsedEntityDeclHandler @51 XML_SetUserData @52 XML_SetXmlDeclHandler @53 XML_UseParserAsHandlerArg @54 ; added with version 1.95.3 XML_ParserReset @55 XML_SetSkippedEntityHandler @56 ; added with version 1.95.5 XML_GetFeatureList @57 XML_UseForeignDTD @58 ; added with version 1.95.6 XML_FreeContentModel @59 XML_MemMalloc @60 XML_MemRealloc @61 XML_MemFree @62 ; added with version 1.95.8 XML_StopParser @63 XML_ResumeParser @64 XML_GetParsingStatus @65
{ "pile_set_name": "Github" }
{ "`": 1, "name": "meeting_leave_content_share_test.config.json %ts", "grids": [ ], "tests": [ { "name": "Meeting Leave Content Share Test %ts", "description": "Meeting leave Content Share Test", "tupleSize": 1, "noOfThreads": 1, "testImpl": "MeetingLeaveContentShareTest.js", "payload": { "url": "http://localhost:8080/", "retry": 2 } } ], "clients": [ { "browserName": "chrome", "version": "78", "platform": "MAC" }, { "browserName": "firefox", "version": "76", "platform": "MAC" } ] }
{ "pile_set_name": "Github" }