text
stringlengths 3
1.05M
|
---|
# Generated by Django 3.0.5 on 2020-08-04 07:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('experts', '0003_expert_picture'),
]
operations = [
migrations.AddField(
model_name='expert',
name='featured',
field=models.BooleanField(default=False),
),
]
|
# Copyright (c) 2020-20201, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
"""Package containing the blueprint parsers.
See 'base.py' for more information. Concrete parsers are
stored in this package, with a parser per file.
"""
|
#!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: docker_image
short_description: Manage docker images.
description:
- Build, load or pull an image, making the image available for creating containers. Also supports tagging an
image into a repository and archiving an image to a .tar file.
- Since Ansible 2.8, it is recommended to explicitly specify the image's source (I(source) can be C(build),
C(load), C(pull) or C(local)). This will be required from community.general 2.0.0 on.
options:
source:
description:
- "Determines where the module will try to retrieve the image from."
- "Use C(build) to build the image from a C(Dockerfile). I(build.path) must
be specified when this value is used."
- "Use C(load) to load the image from a C(.tar) file. I(load_path) must
be specified when this value is used."
- "Use C(pull) to pull the image from a registry."
- "Use C(local) to make sure that the image is already available on the local
docker daemon, i.e. do not try to build, pull or load the image."
- "Before community.general 2.0.0, the value of this option will be auto-detected
to be backwards compatible, but a warning will be issued if it is not
explicitly specified. From community.general 2.0.0 on, auto-detection will be disabled
and this option will be made mandatory."
type: str
choices:
- build
- load
- pull
- local
build:
description:
- "Specifies options used for building images."
type: dict
suboptions:
cache_from:
description:
- List of image names to consider as cache source.
type: list
elements: str
dockerfile:
description:
- Use with state C(present) and source C(build) to provide an alternate name for the Dockerfile to use when building an image.
- This can also include a relative path (relative to I(path)).
type: str
http_timeout:
description:
- Timeout for HTTP requests during the image build operation. Provide a positive integer value for the number of
seconds.
type: int
path:
description:
- Use with state 'present' to build an image. Will be the path to a directory containing the context and
Dockerfile for building an image.
type: path
required: yes
pull:
description:
- When building an image downloads any updates to the FROM image in Dockerfile.
- The default is currently C(yes). This will change to C(no) in community.general 2.0.0.
type: bool
rm:
description:
- Remove intermediate containers after build.
type: bool
default: yes
network:
description:
- The network to use for C(RUN) build instructions.
type: str
nocache:
description:
- Do not use cache when building an image.
type: bool
default: no
etc_hosts:
description:
- Extra hosts to add to C(/etc/hosts) in building containers, as a mapping of hostname to IP address.
type: dict
args:
description:
- Provide a dictionary of C(key:value) build arguments that map to Dockerfile ARG directive.
- Docker expects the value to be a string. For convenience any non-string values will be converted to strings.
- Requires Docker API >= 1.21.
type: dict
container_limits:
description:
- A dictionary of limits applied to each container created by the build process.
type: dict
suboptions:
memory:
description:
- Set memory limit for build.
type: int
memswap:
description:
- Total memory (memory + swap), -1 to disable swap.
type: int
cpushares:
description:
- CPU shares (relative weight).
type: int
cpusetcpus:
description:
- CPUs in which to allow execution, e.g., "0-3", "0,1".
type: str
use_config_proxy:
description:
- If set to C(yes) and a proxy configuration is specified in the docker client configuration
(by default C($HOME/.docker/config.json)), the corresponding environment variables will
be set in the container being built.
- Needs Docker SDK for Python >= 3.7.0.
type: bool
target:
description:
- When building an image specifies an intermediate build stage by
name as a final stage for the resulting image.
type: str
archive_path:
description:
- Use with state C(present) to archive an image to a .tar file.
type: path
load_path:
description:
- Use with state C(present) to load an image from a .tar file.
- Set I(source) to C(load) if you want to load the image. The option will
be set automatically before community.general 2.0.0 if this option is used (except
if I(path) is specified as well, in which case building will take precedence).
From community.general 2.0.0 on, you have to set I(source) to C(load).
type: path
dockerfile:
description:
- Use with state C(present) and source C(build) to provide an alternate name for the Dockerfile to use when building an image.
- This can also include a relative path (relative to I(path)).
- Please use I(build.dockerfile) instead. This option will be removed in community.general 2.0.0.
type: str
force:
description:
- Use with state I(absent) to un-tag and remove all images matching the specified name. Use with state
C(present) to build, load or pull an image when the image already exists. Also use with state C(present)
to force tagging an image.
- Please stop using this option, and use the more specialized force options
I(force_source), I(force_absent) and I(force_tag) instead.
- This option will be removed in community.general 2.0.0.
type: bool
force_source:
description:
- Use with state C(present) to build, load or pull an image (depending on the
value of the I(source) option) when the image already exists.
type: bool
default: false
force_absent:
description:
- Use with state I(absent) to un-tag and remove all images matching the specified name.
type: bool
default: false
force_tag:
description:
- Use with state C(present) to force tagging an image.
type: bool
default: false
http_timeout:
description:
- Timeout for HTTP requests during the image build operation. Provide a positive integer value for the number of
seconds.
- Please use I(build.http_timeout) instead. This option will be removed in community.general 2.0.0.
type: int
name:
description:
- "Image name. Name format will be one of: name, repository/name, registry_server:port/name.
When pushing or pulling an image the name can optionally include the tag by appending ':tag_name'."
- Note that image IDs (hashes) are not supported.
type: str
required: yes
path:
description:
- Use with state 'present' to build an image. Will be the path to a directory containing the context and
Dockerfile for building an image.
- Set I(source) to C(build) if you want to build the image. The option will
be set automatically before community.general 2.0.0 if this option is used. From community.general 2.0.0
on, you have to set I(source) to C(build).
- Please use I(build.path) instead. This option will be removed in community.general 2.0.0.
type: path
aliases:
- build_path
pull:
description:
- When building an image downloads any updates to the FROM image in Dockerfile.
- Please use I(build.pull) instead. This option will be removed in community.general 2.0.0.
- The default is currently C(yes). This will change to C(no) in community.general 2.0.0.
type: bool
push:
description:
- Push the image to the registry. Specify the registry as part of the I(name) or I(repository) parameter.
type: bool
default: no
rm:
description:
- Remove intermediate containers after build.
- Please use I(build.rm) instead. This option will be removed in community.general 2.0.0.
type: bool
default: yes
nocache:
description:
- Do not use cache when building an image.
- Please use I(build.nocache) instead. This option will be removed in community.general 2.0.0.
type: bool
default: no
repository:
description:
- Full path to a repository. Use with state C(present) to tag the image into the repository. Expects
format I(repository:tag). If no tag is provided, will use the value of the C(tag) parameter or I(latest).
type: str
state:
description:
- Make assertions about the state of an image.
- When C(absent) an image will be removed. Use the force option to un-tag and remove all images
matching the provided name.
- When C(present) check if an image exists using the provided name and tag. If the image is not found or the
force option is used, the image will either be pulled, built or loaded, depending on the I(source) option.
- By default the image will be pulled from Docker Hub, or the registry specified in the image's name. Note that
this will change in community.general 2.0.0, so to make sure that you are pulling, set I(source) to C(pull). To build
the image, provide a I(path) value set to a directory containing a context and Dockerfile, and set I(source)
to C(build). To load an image, specify I(load_path) to provide a path to an archive file. To tag an image to
a repository, provide a I(repository) path. If the name contains a repository path, it will be pushed.
- "*Note:* C(state=build) is DEPRECATED and will be removed in community.general 2.0.0. Specifying C(build) will behave the
same as C(present)."
type: str
default: present
choices:
- absent
- present
- build
tag:
description:
- Used to select an image when pulling. Will be added to the image when pushing, tagging or building. Defaults to
I(latest).
- If I(name) parameter format is I(name:tag), then tag value from I(name) will take precedence.
type: str
default: latest
buildargs:
description:
- Provide a dictionary of C(key:value) build arguments that map to Dockerfile ARG directive.
- Docker expects the value to be a string. For convenience any non-string values will be converted to strings.
- Requires Docker API >= 1.21.
- Please use I(build.args) instead. This option will be removed in community.general 2.0.0.
type: dict
container_limits:
description:
- A dictionary of limits applied to each container created by the build process.
- Please use I(build.container_limits) instead. This option will be removed in community.general 2.0.0.
type: dict
suboptions:
memory:
description:
- Set memory limit for build.
type: int
memswap:
description:
- Total memory (memory + swap), -1 to disable swap.
type: int
cpushares:
description:
- CPU shares (relative weight).
type: int
cpusetcpus:
description:
- CPUs in which to allow execution, e.g., "0-3", "0,1".
type: str
use_tls:
description:
- "DEPRECATED. Whether to use tls to connect to the docker daemon. Set to
C(encrypt) to use TLS. And set to C(verify) to use TLS and verify that
the server's certificate is valid for the server."
- "*Note:* If you specify this option, it will set the value of the I(tls) or
I(validate_certs) parameters if not set to C(no)."
- Will be removed in community.general 2.0.0.
type: str
choices:
- 'no'
- 'encrypt'
- 'verify'
extends_documentation_fragment:
- community.general.docker
- community.general.docker.docker_py_1_documentation
requirements:
- "L(Docker SDK for Python,https://docker-py.readthedocs.io/en/stable/) >= 1.8.0 (use L(docker-py,https://pypi.org/project/docker-py/) for Python 2.6)"
- "Docker API >= 1.20"
author:
- Pavel Antonov (@softzilla)
- Chris Houseknecht (@chouseknecht)
- Sorin Sbarnea (@ssbarnea)
'''
EXAMPLES = '''
- name: Pull an image
community.general.docker_image:
name: pacur/centos-7
source: pull
- name: Tag and push to docker hub
community.general.docker_image:
name: pacur/centos-7:56
repository: dcoppenhagan/myimage:7.56
push: yes
source: local
- name: Tag and push to local registry
community.general.docker_image:
# Image will be centos:7
name: centos
# Will be pushed to localhost:5000/centos:7
repository: localhost:5000/centos
tag: 7
push: yes
source: local
- name: Add tag latest to image
community.general.docker_image:
name: myimage:7.1.2
repository: myimage:latest
# As 'latest' usually already is present, we need to enable overwriting of existing tags:
force_tag: yes
source: local
- name: Remove image
community.general.docker_image:
state: absent
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
- name: Build an image and push it to a private repo
community.general.docker_image:
build:
path: ./sinatra
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
push: yes
source: build
- name: Archive image
community.general.docker_image:
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
archive_path: my_sinatra.tar
source: local
- name: Load image from archive and push to a private registry
community.general.docker_image:
name: localhost:5000/myimages/sinatra
tag: v1
push: yes
load_path: my_sinatra.tar
source: load
- name: Build image and with build args
community.general.docker_image:
name: myimage
build:
path: /path/to/build/dir
args:
log_volume: /var/log/myapp
listen_port: 8080
source: build
- name: Build image using cache source
community.general.docker_image:
name: myimage:latest
build:
path: /path/to/build/dir
# Use as cache source for building myimage
cache_from:
- nginx:latest
- alpine:3.8
source: build
'''
RETURN = '''
image:
description: Image inspection results for the affected image.
returned: success
type: dict
sample: {}
stdout:
description: Docker build output when building an image.
returned: success
type: str
sample: ""
version_added: 1.3.0
'''
import errno
import os
import re
import traceback
from distutils.version import LooseVersion
from ansible_collections.community.general.plugins.module_utils.docker.common import (
clean_dict_booleans_for_docker_api,
docker_version,
AnsibleDockerClient,
DockerBaseClass,
is_image_name_id,
is_valid_tag,
RequestException,
)
from ansible.module_utils._text import to_native
if docker_version is not None:
try:
if LooseVersion(docker_version) >= LooseVersion('2.0.0'):
from docker.auth import resolve_repository_name
else:
from docker.auth.auth import resolve_repository_name
from docker.utils.utils import parse_repository_tag
from docker.errors import DockerException
except ImportError:
# missing Docker SDK for Python handled in module_utils.docker.common
pass
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
parameters = self.client.module.params
self.check_mode = self.client.check_mode
self.source = parameters['source']
build = parameters['build'] or dict()
self.archive_path = parameters.get('archive_path')
self.cache_from = build.get('cache_from')
self.container_limits = build.get('container_limits')
self.dockerfile = build.get('dockerfile')
self.force_source = parameters.get('force_source')
self.force_absent = parameters.get('force_absent')
self.force_tag = parameters.get('force_tag')
self.load_path = parameters.get('load_path')
self.name = parameters.get('name')
self.network = build.get('network')
self.extra_hosts = clean_dict_booleans_for_docker_api(build.get('etc_hosts'))
self.nocache = build.get('nocache', False)
self.build_path = build.get('path')
self.pull = build.get('pull')
self.target = build.get('target')
self.repository = parameters.get('repository')
self.rm = build.get('rm', True)
self.state = parameters.get('state')
self.tag = parameters.get('tag')
self.http_timeout = build.get('http_timeout')
self.push = parameters.get('push')
self.buildargs = build.get('args')
self.use_config_proxy = build.get('use_config_proxy')
# If name contains a tag, it takes precedence over tag parameter.
if not is_image_name_id(self.name):
repo, repo_tag = parse_repository_tag(self.name)
if repo_tag:
self.name = repo
self.tag = repo_tag
if self.state == 'present':
self.present()
elif self.state == 'absent':
self.absent()
def fail(self, msg):
self.client.fail(msg)
def present(self):
'''
Handles state = 'present', which includes building, loading or pulling an image,
depending on user provided parameters.
:returns None
'''
image = self.client.find_image(name=self.name, tag=self.tag)
if not image or self.force_source:
if self.source == 'build':
# Build the image
if not os.path.isdir(self.build_path):
self.fail("Requested build path %s could not be found or you do not have access." % self.build_path)
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.log("Building image %s" % image_name)
self.results['actions'].append("Built image %s from %s" % (image_name, self.build_path))
self.results['changed'] = True
if not self.check_mode:
self.results.update(self.build_image())
elif self.source == 'load':
# Load the image from an archive
if not os.path.isfile(self.load_path):
self.fail("Error loading image %s. Specified path %s does not exist." % (self.name,
self.load_path))
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.results['actions'].append("Loaded image %s from %s" % (image_name, self.load_path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.load_image()
elif self.source == 'pull':
# pull the image
self.results['actions'].append('Pulled image %s:%s' % (self.name, self.tag))
self.results['changed'] = True
if not self.check_mode:
self.results['image'], dummy = self.client.pull_image(self.name, tag=self.tag)
elif self.source == 'local':
if image is None:
name = self.name
if self.tag:
name = "%s:%s" % (self.name, self.tag)
self.client.fail('Cannot find the image %s locally.' % name)
if not self.check_mode and image and image['Id'] == self.results['image']['Id']:
self.results['changed'] = False
if self.archive_path:
self.archive_image(self.name, self.tag)
if self.push and not self.repository:
self.push_image(self.name, self.tag)
elif self.repository:
self.tag_image(self.name, self.tag, self.repository, push=self.push)
def absent(self):
'''
Handles state = 'absent', which removes an image.
:return None
'''
name = self.name
if is_image_name_id(name):
image = self.client.find_image_by_id(name)
else:
image = self.client.find_image(name, self.tag)
if self.tag:
name = "%s:%s" % (self.name, self.tag)
if image:
if not self.check_mode:
try:
self.client.remove_image(name, force=self.force_absent)
except Exception as exc:
self.fail("Error removing image %s - %s" % (name, str(exc)))
self.results['changed'] = True
self.results['actions'].append("Removed image %s" % (name))
self.results['image']['state'] = 'Deleted'
def archive_image(self, name, tag):
'''
Archive an image to a .tar file. Called when archive_path is passed.
:param name - name of the image. Type: str
:return None
'''
if not tag:
tag = "latest"
image = self.client.find_image(name=name, tag=tag)
if not image:
self.log("archive image: image %s:%s not found" % (name, tag))
return
image_name = "%s:%s" % (name, tag)
self.results['actions'].append('Archived image %s to %s' % (image_name, self.archive_path))
self.results['changed'] = True
if not self.check_mode:
self.log("Getting archive of image %s" % image_name)
try:
image = self.client.get_image(image_name)
except Exception as exc:
self.fail("Error getting image %s - %s" % (image_name, str(exc)))
try:
with open(self.archive_path, 'wb') as fd:
if self.client.docker_py_version >= LooseVersion('3.0.0'):
for chunk in image:
fd.write(chunk)
else:
for chunk in image.stream(2048, decode_content=False):
fd.write(chunk)
except Exception as exc:
self.fail("Error writing image archive %s - %s" % (self.archive_path, str(exc)))
image = self.client.find_image(name=name, tag=tag)
if image:
self.results['image'] = image
def push_image(self, name, tag=None):
'''
If the name of the image contains a repository path, then push the image.
:param name Name of the image to push.
:param tag Use a specific tag.
:return: None
'''
repository = name
if not tag:
repository, tag = parse_repository_tag(name)
registry, repo_name = resolve_repository_name(repository)
self.log("push %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
if registry:
self.results['actions'].append("Pushed image %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
self.results['changed'] = True
if not self.check_mode:
status = None
try:
changed = False
for line in self.client.push(repository, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('errorDetail'):
raise Exception(line['errorDetail']['message'])
status = line.get('status')
if status == 'Pushing':
changed = True
self.results['changed'] = changed
except Exception as exc:
if re.search('unauthorized', str(exc)):
if re.search('authentication required', str(exc)):
self.fail("Error pushing image %s/%s:%s - %s. Try logging into %s first." %
(registry, repo_name, tag, str(exc), registry))
else:
self.fail("Error pushing image %s/%s:%s - %s. Does the repository exist?" %
(registry, repo_name, tag, str(exc)))
self.fail("Error pushing image %s: %s" % (repository, str(exc)))
self.results['image'] = self.client.find_image(name=repository, tag=tag)
if not self.results['image']:
self.results['image'] = dict()
self.results['image']['push_status'] = status
def tag_image(self, name, tag, repository, push=False):
'''
Tag an image into a repository.
:param name: name of the image. required.
:param tag: image tag.
:param repository: path to the repository. required.
:param push: bool. push the image once it's tagged.
:return: None
'''
repo, repo_tag = parse_repository_tag(repository)
if not repo_tag:
repo_tag = "latest"
if tag:
repo_tag = tag
image = self.client.find_image(name=repo, tag=repo_tag)
found = 'found' if image else 'not found'
self.log("image %s was %s" % (repo, found))
if not image or self.force_tag:
self.log("tagging %s:%s to %s:%s" % (name, tag, repo, repo_tag))
self.results['changed'] = True
self.results['actions'].append("Tagged image %s:%s to %s:%s" % (name, tag, repo, repo_tag))
if not self.check_mode:
try:
# Finding the image does not always work, especially running a localhost registry. In those
# cases, if we don't set force=True, it errors.
image_name = name
if tag and not re.search(tag, name):
image_name = "%s:%s" % (name, tag)
tag_status = self.client.tag(image_name, repo, tag=repo_tag, force=True)
if not tag_status:
raise Exception("Tag operation failed.")
except Exception as exc:
self.fail("Error: failed to tag image - %s" % str(exc))
self.results['image'] = self.client.find_image(name=repo, tag=repo_tag)
if image and image['Id'] == self.results['image']['Id']:
self.results['changed'] = False
if push:
self.push_image(repo, repo_tag)
def build_image(self):
'''
Build an image
:return: image dict
'''
params = dict(
path=self.build_path,
tag=self.name,
rm=self.rm,
nocache=self.nocache,
timeout=self.http_timeout,
pull=self.pull,
forcerm=self.rm,
dockerfile=self.dockerfile,
decode=True,
)
if self.client.docker_py_version < LooseVersion('3.0.0'):
params['stream'] = True
if self.tag:
params['tag'] = "%s:%s" % (self.name, self.tag)
if self.container_limits:
params['container_limits'] = self.container_limits
if self.buildargs:
for key, value in self.buildargs.items():
self.buildargs[key] = to_native(value)
params['buildargs'] = self.buildargs
if self.cache_from:
params['cache_from'] = self.cache_from
if self.network:
params['network_mode'] = self.network
if self.extra_hosts:
params['extra_hosts'] = self.extra_hosts
if self.use_config_proxy:
params['use_config_proxy'] = self.use_config_proxy
# Due to a bug in docker-py, it will crash if
# use_config_proxy is True and buildargs is None
if 'buildargs' not in params:
params['buildargs'] = {}
if self.target:
params['target'] = self.target
build_output = []
for line in self.client.build(**params):
# line = json.loads(line)
self.log(line, pretty_print=True)
if "stream" in line or "status" in line:
build_line = line.get("stream") or line.get("status")
build_output.append(build_line)
if line.get('error'):
if line.get('errorDetail'):
errorDetail = line.get('errorDetail')
self.fail(
"Error building %s - code: %s, message: %s, logs: %s" % (
self.name,
errorDetail.get('code'),
errorDetail.get('message'),
build_output))
else:
self.fail("Error building %s - message: %s, logs: %s" % (
self.name, line.get('error'), build_output))
return {"stdout": "\n".join(build_output),
"image": self.client.find_image(name=self.name, tag=self.tag)}
def load_image(self):
'''
Load an image from a .tar archive
:return: image dict
'''
try:
self.log("Opening image %s" % self.load_path)
with open(self.load_path, 'rb') as image_tar:
self.log("Loading image from %s" % self.load_path)
self.client.load_image(image_tar)
except EnvironmentError as exc:
if exc.errno == errno.ENOENT:
self.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
except Exception as exc:
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
return self.client.find_image(self.name, self.tag)
def main():
argument_spec = dict(
source=dict(type='str', choices=['build', 'load', 'pull', 'local']),
build=dict(type='dict', options=dict(
cache_from=dict(type='list', elements='str'),
container_limits=dict(type='dict', options=dict(
memory=dict(type='int'),
memswap=dict(type='int'),
cpushares=dict(type='int'),
cpusetcpus=dict(type='str'),
)),
dockerfile=dict(type='str'),
http_timeout=dict(type='int'),
network=dict(type='str'),
nocache=dict(type='bool', default=False),
path=dict(type='path', required=True),
pull=dict(type='bool'),
rm=dict(type='bool', default=True),
args=dict(type='dict'),
use_config_proxy=dict(type='bool'),
target=dict(type='str'),
etc_hosts=dict(type='dict'),
)),
archive_path=dict(type='path'),
container_limits=dict(type='dict', options=dict(
memory=dict(type='int'),
memswap=dict(type='int'),
cpushares=dict(type='int'),
cpusetcpus=dict(type='str'),
), removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
dockerfile=dict(type='str', removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
force=dict(type='bool', removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
force_source=dict(type='bool', default=False),
force_absent=dict(type='bool', default=False),
force_tag=dict(type='bool', default=False),
http_timeout=dict(type='int', removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
load_path=dict(type='path'),
name=dict(type='str', required=True),
nocache=dict(type='bool', default=False, removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
path=dict(type='path', aliases=['build_path'], removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
pull=dict(type='bool', removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
push=dict(type='bool', default=False),
repository=dict(type='str'),
rm=dict(type='bool', default=True, removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
state=dict(type='str', default='present', choices=['absent', 'present', 'build']),
tag=dict(type='str', default='latest'),
use_tls=dict(type='str', choices=['no', 'encrypt', 'verify'], removed_in_version='2.0.0',
removed_from_collection='community.general'), # was Ansible 2.12
buildargs=dict(type='dict', removed_in_version='2.0.0', removed_from_collection='community.general'), # was Ansible 2.12
)
required_if = [
# ('state', 'present', ['source']), -- enable in community.general 2.0.0
# ('source', 'build', ['build']), -- enable in community.general 2.0.0
('source', 'load', ['load_path']),
]
def detect_build_cache_from(client):
return client.module.params['build'] and client.module.params['build'].get('cache_from') is not None
def detect_build_network(client):
return client.module.params['build'] and client.module.params['build'].get('network') is not None
def detect_build_target(client):
return client.module.params['build'] and client.module.params['build'].get('target') is not None
def detect_use_config_proxy(client):
return client.module.params['build'] and client.module.params['build'].get('use_config_proxy') is not None
def detect_etc_hosts(client):
return client.module.params['build'] and bool(client.module.params['build'].get('etc_hosts'))
option_minimal_versions = dict()
option_minimal_versions["build.cache_from"] = dict(docker_py_version='2.1.0', docker_api_version='1.25', detect_usage=detect_build_cache_from)
option_minimal_versions["build.network"] = dict(docker_py_version='2.4.0', docker_api_version='1.25', detect_usage=detect_build_network)
option_minimal_versions["build.target"] = dict(docker_py_version='2.4.0', detect_usage=detect_build_target)
option_minimal_versions["build.use_config_proxy"] = dict(docker_py_version='3.7.0', detect_usage=detect_use_config_proxy)
option_minimal_versions["build.etc_hosts"] = dict(docker_py_version='2.6.0', docker_api_version='1.27', detect_usage=detect_etc_hosts)
client = AnsibleDockerClient(
argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True,
min_docker_version='1.8.0',
min_docker_api_version='1.20',
option_minimal_versions=option_minimal_versions,
)
if client.module.params['state'] == 'build':
client.module.deprecate('The "build" state has been deprecated for a long time. '
'Please use "present", which has the same meaning as "build".',
version='2.0.0', collection_name='community.general') # was Ansible 2.11
client.module.params['state'] = 'present'
if client.module.params['use_tls']:
client.module.deprecate('The "use_tls" option has been deprecated for a long time. '
'Please use the "tls" and "validate_certs" options instead.',
version='2.0.0', collection_name='community.general') # was Ansible 2.11
if not is_valid_tag(client.module.params['tag'], allow_empty=True):
client.fail('"{0}" is not a valid docker tag!'.format(client.module.params['tag']))
build_options = dict(
container_limits='container_limits',
dockerfile='dockerfile',
http_timeout='http_timeout',
nocache='nocache',
path='path',
pull='pull',
rm='rm',
buildargs='args',
)
for option, build_option in build_options.items():
default_value = None
if option in ('rm', ):
default_value = True
elif option in ('nocache', ):
default_value = False
if client.module.params[option] != default_value:
if client.module.params['build'] is None:
client.module.params['build'] = dict()
if client.module.params['build'].get(build_option, default_value) != default_value:
client.fail('Cannot specify both %s and build.%s!' % (option, build_option))
client.module.params['build'][build_option] = client.module.params[option]
client.module.deprecate('Please specify build.%s instead of %s. The %s option '
'has been renamed' % (build_option, option, option),
version='2.0.0', collection_name='community.general') # was Ansible 2.12
if client.module.params['source'] == 'build':
if (not client.module.params['build'] or not client.module.params['build'].get('path')):
client.fail('If "source" is set to "build", the "build.path" option must be specified.')
if client.module.params['build'].get('pull') is None:
client.module.deprecate("The default for build.pull is currently 'yes', but will be changed to "
"'no' in community.general 2.0.0. Please set build.pull explicitly to the value you need",
version='2.0.0', collection_name='community.general') # was Ansible 2.12
client.module.params['build']['pull'] = True # TODO: change to False in community.general 2.0.0
if client.module.params['state'] == 'present' and client.module.params['source'] is None:
# Autodetection. To be removed in community.general 2.0.0.
if (client.module.params['build'] or dict()).get('path'):
client.module.params['source'] = 'build'
elif client.module.params['load_path']:
client.module.params['source'] = 'load'
else:
client.module.params['source'] = 'pull'
client.module.deprecate('The value of the "source" option was determined to be "%s". '
'Please set the "source" option explicitly. Autodetection will '
'be removed in community.general 2.0.0.' % client.module.params['source'],
version='2.0.0', collection_name='community.general') # was Ansible 2.12
if client.module.params['force']:
client.module.params['force_source'] = True
client.module.params['force_absent'] = True
client.module.params['force_tag'] = True
client.module.deprecate('The "force" option will be removed in community.general 2.0.0. Please '
'use the "force_source", "force_absent" or "force_tag" option '
'instead, depending on what you want to force.',
version='2.0.0', collection_name='community.general') # was Ansible 2.12
try:
results = dict(
changed=False,
actions=[],
image={}
)
ImageManager(client, results)
client.module.exit_json(**results)
except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(e), exception=traceback.format_exc())
except RequestException as e:
client.fail('An unexpected requests error occurred when docker-py tried to talk to the docker daemon: {0}'.format(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
|
/**
* @author MPanknin / http://www.redplant.de/
* @author alteredq / http://alteredqualia.com/
*/
THREE.AreaLight = function ( color, intensity ) {
THREE.Light.call( this, color );
this.type = 'AreaLight';
this.normal = new THREE.Vector3( 0, - 1, 0 );
this.right = new THREE.Vector3( 1, 0, 0 );
this.intensity = ( intensity !== undefined ) ? intensity : 1;
this.width = 1.0;
this.height = 1.0;
this.constantAttenuation = 1.5;
this.linearAttenuation = 0.5;
this.quadraticAttenuation = 0.1;
};
THREE.AreaLight.prototype = Object.create( THREE.Light.prototype );
|
"""
Module to handle reading/writing/consistency of DSS keywords
in FITS files.
author(s): M. Kerr, T. Burnett
"""
__version__ = '$Revision: 1.8 $'
#$Header: /nfs/slac/g/glast/ground/cvs/pointlike/python/uw/data/dssman.py,v 1.8 2016/06/22 17:02:50 wallacee Exp $
from astropy.io import fits as pyfits
from collections import deque
import numpy as np
#TODO class(es) for other DSS types, e.g. Region type
DSSKeys = ['TYP','UNI','VAL','REF']
def isfloat(string):
try:
float(string); return True
except (ValueError,TypeError):
return False
def isint(string):
try:
a = int(string)
b = float(string)
return a==b
except (ValueError,TypeError):
return False
def isnum(string):
return isfloat(string)
def num(string):
if isint(string): return int(string)
if isfloat(string): return float(string)
#raise ValueError,'Could not cast %s to float or int'%(string)
return None
class DSSEntry(dict):
def __init__(self,d):
self.update(d)
if 'index' not in self.keys(): self['index'] = -1
def __copy__(self):
return DSSEntry(d) # do we need copy?
def __eq__(self,other):
for key in DSSKeys:
if self[key] != other[key]:
return False
return True
def __ne__(self,other):
return not self.__eq__(other)
def __str__(self):
return '\n'.join(['DS{0}{1}: {2}'.format(key,self['index'],self[key]) for key in DSSKeys])
def to_header_entries(self,index=None):
""" Return tuples of header key/val, e.g. to write to an FT1 file."""
index = index or self['index']
tup = []
for key in DSSKeys:
if self[key] is None: continue
tup.append(['DS{0}{1}'.format(key,index),'{0}'.format(self[key])])
return tup
def roi_info(self):
""" Check if specifies an extraction region."""
if (('POS' in self['TYP']) and
(self['UNI'] == 'deg') and
('CIRCLE' in self['VAL'])):
try:
tup = self['VAL'].split('(')[1][:-1].split(',')
val = [float(x) for x in tup]
assert(len(val)==3)
return val
except Exception:
pass
return None
class DSSSimpleRange(DSSEntry):
""" A DSS entry specifying a single range on a column.
E.g., ZENITH_ANGLE < 100 deg., 1e2 < ENERGY < 1e5."""
def get_bounds(self):
return self['lower'],self['upper']
@staticmethod
def bounds_to_val(lower,upper):
if isnum(lower):
if isnum(upper): val = '{0}:{1}'.format(lower,upper)
else: val = '{0}:'.format(lower)
elif isnum(upper): val = ':{0}'.format(upper)
else: val = None
return val
def set_bounds(self,lower,upper):
val = DSSSimpleRange.bounds_to_val(lower,upper)
if val is None:
raise ValueError('Both lower and upper were empty!')
self['lower'] = num(lower); self['upper'] = num(upper)
def intersection(self,other):
""" Compare with another DSSSimpleRange and update bounds if other
is more restrictive."""
print(other.get_bounds())
print(self.get_bounds())
if (other['lower'] is not None):
if self['lower'] is None: self['lower'] = other['upper']
else: self['lower'] = max(self['lower'],other['lower'])
if (other['upper'] is not None):
if self['upper'] is None: self['upper'] = other['upper']
else: self['upper'] = min(self['upper'],other['upper'])
print(min(self['upper'],other['upper']))
print(self['upper'])
class DSSBitMask(DSSSimpleRange):
""" The only current use for a bit mask is selecting on EVENT_CLASS
in Pass7+ data. As such, only need is to keep DSS keywords
consistent and pass the single masked bit into Data.
The single bit is stored in both the lower and upper field."""
# NOTES on EVENT_CLASS DSS format
# PASS 7 style
# DSTYP2 = 'BIT_MASK(EVENT_CLASS,2)'
# DSUNI2 = 'DIMENSIONLESS'
# DSVAL2 = '1:1 '
# Pass 6 style
# DSTYP3 = 'EVENT_CLASS'
# DSUNI3 = 'dimensionless'
# DSVAL3 = '3:10 '
def __init__(self,d):
super(DSSBitMask,self).__init__(d)
t = self['TYP'][1:-1] #split off parens
self['lower'] = self['upper'] = int(t.split(',')[1])
def intersection(self,other):
""" A bit mask intersection at the level we require is trivial --
the masked bit MUST be the same."""
if (other['lower'] is None) or (other['lower'] is None):
raise ValueError('Not DSSBitMask format')
if (self['lower'] is None) or (self['lower'] is None):
raise ValueError('Not DSSBitMask format')
if self['lower']!=other['lower'] or self['upper']!=other['upper']:
raise ValueError('Bit masks are not compatible!')
return
def DSSFactory(keys,vals):
""" Return the correct type of DSS subclass for header entries."""
# first, parse the values present in the set of header entries
d = dict(TYP=None,UNI=None,VAL=None,REF=None)
for k,v in zip(keys,vals):
d['index'] = int(k[-1])
for l in d.keys():
if l in k : d[l] = v
assert d[l] is not None, 'DSS key {}, value {} not valid'.format(k,v)
# determine if cut is a bitmask
if 'BIT_MASK' in d['TYP']:
return DSSBitMask(d)
# determine if cut is of a simple type
assert d['VAL'] is not None, 'DSS key {}, bad, value {}'.format(keys,vals)
toks = d['VAL'].split(':')
if len(toks)==2:
a,b = toks
# conditions to accept: empty low and numeric high,
# complement, and both are numbers
if ((len(a)==0 and isnum(b)) or
(len(b)==0 and isnum(a)) or
(isnum(a) and isnum(b))):
d['lower'] = num(a); d['upper'] = num(b)
return DSSSimpleRange(d)
return DSSEntry(d)
class DSSEntries(list):
# NB -- may need to handle duplicate entries?
def __init__(self,fits_name,header_key='EVENTS'):
try:
h = pyfits.getheader(fits_name,header_key)
except IOError:
print('Could not find file {0}'.format(fits_name))
return
except IndexError as msg:
print('Invalid header index for fits file %s: %s'% (fits_name,msg))
return
except KeyError as msg:
print('Invalid key for fits file %s: %s'% (fits_name,msg))
return
keys = [x for x in h.keys() if x.startswith('DS')]
if len(keys)==0: return
vals = [h[k] for k in keys]
# list of the indices, not necewssarily in numeric order (THB change 02/23/16)
indeces = sorted(list(set([int(k[-1]) for k in keys])))
kdeque,vdeque = deque(),deque()
counter = 0 # index of list of DSS indeces
for i in xrange(len(keys)):
if int(keys[i][-1])!=indeces[counter]:
self.append(DSSFactory(kdeque,vdeque))
kdeque.clear(); vdeque.clear()
counter += 1
kdeque.append(keys[i]); vdeque.append(vals[i])
self.append(DSSFactory(kdeque,vdeque))
self._remove_duplicates()
def __str__(self):
return '\n'.join([str(x) for x in self])
def __eq__(self,other):
"""Test for equality of DSS entries, independent of order."""
return sorted(self)==sorted(other)
def __ne__(self,other):
return not self.__eq__(other)
def get_simple_dss(self,colname):
""" Return a DSS entry corresponding to a simple cut on colname."""
for idss,dss in enumerate(self):
if dss['TYP'] == colname:
return dss,idss
#Slight kludge to handle weird formatting of EVENT_CLASS bitmasks
if colname=='EVENT_CLASS' and (colname in dss['TYP']):
return dss,idss
return None,None
def delete(self,index):
""" Delete a DSS entry and re-index the remaining ones."""
ret = self.pop(index)
if index < len(self)-1:
for i in xrange(index,len(self)):
self[i]['index'] = i+1
return ret
def write(self,fits_name,header_key='EVENTS'):
f = pyfits.open(fits_name,uint=False)
h = f[header_key]._header
for d in self:
tup = d.to_header_entries()
for t in tup:
h[t[0]] = t[1]
# convert unsigned ints to ints -- this is a kluge but perhaps necessary
for hdu in f:
if not isinstance(hdu,pyfits.BinTableHDU): continue
for icol,col in enumerate(hdu.columns):
if col.format=='1J':
#print ('update %s'%col.name)
data = hdu.data.field(col.name).astype(np.int32) # apply transform
# not sure why above line must be done -- delayed, perhaps?
hdu.columns.change_attrib(col.name,'bzero',0)
# above line is crucial
f.writeto(fits_name,clobber=True)
#f.writeto('/tmp/test.fits',clobber=True)
def roi_info(self,tol=0.01,delete_duplicates=False):
""" Return any information about the extraction region.
Will check for duplicates, which can arise from a slight
mismatch of astroserver and gtselect coordinates, and
optionally delete them."""
roi_info = None
offset = 0
for i in xrange(len(self)):
i += offset
d = self[i]
r = d.roi_info()
if (r is not None):
if (roi_info is None):
roi_info = r
else:
# need to check for compatibility
agree = all((abs(x-y)<tol for x,y in zip(r,roi_info)))
if not agree:
raise ValueError('Found multiple DSS keywords for ROI differing by greater than {0} degrees'.format(tol))
elif delete_duplicates:
self.delete(i)
offset += 1
return roi_info
def _remove_duplicates(self):
duplicates = []
for i, keyword in enumerate(self):
if keyword in self[i+1:]: duplicates.append(i)
offset = 0
for dup in duplicates:
self.delete(dup - offset)
offset += 1
def make_simple_dss(colname,unit,low,high,index=1):
""" Return a DSSSimpleRange object with bounds specified by low/high.
index gives the order of the DSS keyword, e.g. DSVAL1, DSVAL2, etc. """
val = DSSSimpleRange.bounds_to_val(low,high)
if val is None:
raise ValueError('Could not interpret arguments as a simple cut.')
d = dict(REF=None,TYP=colname,UNI=unit,VAL=val,
lower=num(low),upper=num(high),index=index)
return DSSSimpleRange(d)
"""
def process_pixeldata(pd):
# for now, assume all FT1 files have same DSS keywords...
dsse = get_dss_entries(pd.ft1files[0])
colnames = ['ZENITH_ANGLE','THETA','ENERGY','ENERGY']
units = ['deg','deg','MeV','MeV']
ptlvars = ['zenithcut','thetacut','emin','emax']
indices = [1,1,0,1]
for i in xrange(len(ptlvars)):
# check the appropriate simple cut
ptl_var = pd.__dict__[ptlvars[i]]
dss = dsse.get_simple_entry(colnames[i])
if dss is None:
if not pd.quiet:
print ('DSS keywords specified no %s cut. Applying the specified pointlike cut %s = %.1f '%(colnames[i],ptlvars[i],ptl_var))
if indices[i]:
# NB only works for cut variables with range [0,max]...
# TODO -- make work for all cuts (necessary?)
dsse.append(make_simple_dss(colnames[i],units[i],0,ptl_var,index=len(dsse)+1))
continue
dss_var = dss.get_simple_bound(indices[i])
dss_more_stringent = \
(indices[i]==0 and dss_var>ptl_var) or \
(indices[i]==1 and dss_var<ptl_var)
ptl_more_stringent = (not dss_more_stringent) and (ptl_var != dss_var)
sign_string = 'upper bound' if indices[i] else 'lower bound'
if ptl_more_stringent:
dss.set_simple_bound(ptl_var,indices[i])
if not pd.quiet:
print ('Applying more stringent pointlike %s %s cut (%s=%.1f) over that found in DSS keywords (%.1f)'%(colnames[i],sign_string,ptlvars[i],ptl_var,dss_var))
elif dss_more_stringent:
pd.__dict__[ptlvars[i]] = dss_var
if not pd.quiet:
print ('Applying more stringent DSS %s %s cut (%.1f) over that found in pointlike (%s=%.1f)'%(colnames[i],sign_string,dss_var,ptlvars[i],ptl_var))
else:
if not pd.quiet:
print ('Identical cuts on %s %s (%s=%.1f)'%(colnames[i],sign_string,ptlvars[i],ptl_var))
pd.dsse = dsse
"""
class DummyPD(object):
""" For testing."""
def __init__(self):
self.ft1files = ['j1838m0536_p7_07062011-ft1_gtselect.fits']
self.quiet = False
self.zenithcut = 100.
self.thetacut = 66.4
self.emin = 1e2
self.emax = 1e6
|
export default {
route: {
dashboard: '首页',
introduction: '简述',
documentation: '文档',
guide: '引导页',
order: '订单管理',
userOrder: '用户订单',
afterSaleOrder: '售后订单',
sellerOrder: '商户订单',
avatarUpload: '头像上传',
dropzone: 'Dropzone',
sticky: 'Sticky',
countTo: 'CountTo',
statistics: '统计管理',
couponStatistics: '优惠券统计',
fightGroupStatistics: '拼团统计',
integralStatistics: '积分统计',
document: '文章管理',
goods: '商品管理',
goodsEdit: '商品编辑',
goodsManageList: '商品管理',
classification: '商品分类',
activity: '活动管理',
couponTemplate: '优惠券模版管理',
coupon: '优惠券管理',
userCoupon: '用户优惠券',
bannerManage: 'Banner 管理',
stationLetter: '站内信管理',
fightGroup: '拼团管理',
fightGroupActivity: '拼团活动',
fightGroupRule: '拼团规则',
fightGroupGoods: '拼团商品',
fightGroupRecord: '成团记录',
fullExchange: '满赠换购管理',
createArticle: '创建文章',
editArticle: '编辑文章',
articleList: '文章列表',
finance: '财务管理',
fundManage: '资金管理',
tradeManage: '交易流水',
riskManage: '风控管理',
riskRule: '规则配置',
riskBlackList: '黑名单管理',
system: '系统管理',
systemUser: '用户管理',
systemMenu: '菜单管理',
systemRole: '角色管理',
systemConfig: '配置管理',
monitor: '监控服务',
hystrix: 'Hystrix 监控',
interfaceService: '接口Swagger',
activityInterface: '活动中心模块',
adminInterface: '系统平台模块',
goodsInterface: '商品中心模块',
orderInterface: '订单中心模块',
settlementInterface: '结算中心模块',
userInterface: '用户中心模块',
databaseService: '数据库监控',
adminDatabase: '管理数据中心',
activityDatabase: '活动数据中心',
goodsDatabase: '商品数据中心',
orderDatabase: '订单数据中心',
settlementDatabase: '结算数据中心',
userDatabase: '用户中心',
generatorManage: '代码生成'
},
activity: {
banner: {
title: '活动标题',
bannerUrl: 'banner 图片地址',
bannerClickUrl: 'Banner跳转地址',
bannerType: '类型',
shareUrl: '分享地址',
shareTitle: '分享标题',
shareSubtitle: '分享副标题',
shareIcon: '分享图标',
shareChannel: '分享渠道',
createTime: '创建时间',
createUser: '创建人',
status: '状态'
},
coupon: {
templateName: '模板名称',
couponName: '优惠券活动名称',
templateType: '模板类型',
orderAmount: '订单满减金额',
couponAmount: '优惠券金额',
deleteStatus: '是否禁用',
discountType: '优惠券类型',
description: '优惠券说明',
discountStrength: '折扣力度',
phone: '手机号',
username: '用户名'
},
exchange: {
title: ' 活动标题'
},
fightGroup: {
activityName: '活动名称',
activityImg: '活动图片',
activityTime: '活动周期',
rule: '规则',
goods: '商品',
record: '记录',
purchaseLimit: '限购数量',
fightCount: '成团人数',
description: '说明',
time: '开团时间',
master: '团长'
},
station: {
content: '内容'
}
},
goods: {
goodsName: '商品名称',
goodsCode: '商品编码',
goodsType: '商品类型',
description: '商品简介',
goodsStatus: '状态',
goodsAmount: '商品价格',
goodsDiscountAmount: '折扣价格',
integral: '商品积分',
goodsInventory: '库存',
createTime: '创建时间',
classificationName: '分类名称',
classificationImg: '分类图片',
placeOrigin: '产地',
storage: '储存',
goodsImg: '商品LOGO',
bannerFirst: '商品banner第一页',
bannerSecond: '商品banner第二页',
bannerThird: '商品banner第三页',
detailsImgFirst: '商品详情图片 一',
detailsImgSecond: '商品详情图片 二',
detailsImgThird: '商品详情图片 三',
detailsImgFourth: '商品详情图片 四',
detailsImgFifth: '商品详情图片 五'
},
order: {
orderCode: '订单号',
orderStatus: '订单状态',
phone: '手机号码',
username: '用户名',
orderAmount: '订单金额',
integral: '积分',
orderType: '订单类型',
integralAmount: '积分减免金额',
discountAmount: '优惠券减免金额',
address: '送货地址',
createTime: '下单时间'
},
system: {
user: { // 系统管理 用户管理
sysName: '系统用户',
username: '登陆账号',
roleCode: '角色',
password: '密码',
status: '状态',
avatar: '用户头像'
},
role: {
roleName: '角色名称',
roleCode: '角色代码',
status: '状态'
},
config: {
module: '模块',
configName: '配置项名称',
configValue: '配置项值',
status: '状态',
description: '说明'
},
menu: {
menuName: '菜单名称',
permission: '权限',
type: '类型',
createTime: '创建时间'
}
},
navbar: {
logOut: '退出登录',
dashboard: '首页',
github: '项目地址',
screenfull: '全屏',
theme: '换肤',
size: '布局大小'
},
login: {
title: 'Jump 后台管理系统',
logIn: '登录',
username: '账号',
password: '密码',
any: '随便填'
},
permission: {
roles: '你的权限',
switchRoles: '切换权限'
},
guide: {
description: '引导页对于一些第一次进入项目的人很有用,你可以简单介绍下项目的功能。本 Demo 是基于',
button: '打开引导'
},
components: {
documentation: '文档',
tinymceTips: '富文本是管理后台一个核心的功能,但同时又是一个有很多坑的地方。在选择富文本的过程中我也走了不少的弯路,市面上常见的富文本都基本用过了,最终权衡了一下选择了Tinymce。更详细的富文本比较和介绍见',
dropzoneTips: '由于我司业务有特殊需求,而且要传七牛 所以没用第三方,选择了自己封装。代码非常的简单,具体代码你可以在这里看到 @/components/Dropzone',
stickyTips: '当页面滚动到预设的位置会吸附在顶部',
backToTopTips1: '页面滚动到指定位置会在右下角出现返回顶部按钮',
backToTopTips2: '可自定义按钮的样式、show/hide、出现的高度、返回的位置 如需文字提示,可在外部使用Element的el-tooltip元素',
imageUploadTips: '由于我在使用时它只有vue@1版本,而且和mockjs不兼容,所以自己改造了一下,如果大家要使用的话,优先还是使用官方版本。'
},
table: {
dynamicTips1: '固定表头, 按照表头顺序排序',
dynamicTips2: '不固定表头, 按照点击顺序排序',
dragTips1: '默认顺序',
dragTips2: '拖拽后顺序',
title: '标题',
permission: '授权',
type: '类型',
remark: '点评',
search: '搜索',
add: '添加',
export: '导出',
reviewer: '审核人',
id: '序号',
date: '时间',
author: '作者',
readings: '阅读数',
status: '状态',
actions: '操作',
edit: '编辑',
publish: '发布',
draft: '草稿',
delete: '删除',
cancel: '取 消',
confirm: '确 定',
close: '关 闭',
query: '查 看',
detail: '详情',
operationUser: '操作人',
reissueCoupon: '补发优惠券',
downloadTemplate: '下载模板'
},
errorLog: {
tips: '请点击右上角bug小图标',
description: '现在的管理后台基本都是spa的形式了,它增强了用户体验,但同时也会增加页面出问题的可能性,可能一个小小的疏忽就导致整个页面的死锁。好在 Vue 官网提供了一个方法来捕获处理异常,你可以在其中进行错误处理或者异常上报。',
documentation: '文档介绍'
},
excel: {
export: '导出',
selectedExport: '导出已选择项',
placeholder: '请输入文件名(默认excel-list)'
},
zip: {
export: '导出',
placeholder: '请输入文件名(默认file)'
},
theme: {
change: '换肤',
documentation: '换肤文档',
tips: 'Tips: 它区别于 navbar 上的 theme-pick, 是两种不同的换肤方法,各自有不同的应用场景,具体请参考文档。'
},
tagsView: {
refresh: '刷新',
close: '关闭',
closeOthers: '关闭其它',
closeAll: '关闭所有'
},
time: {
beginTime: '开始时间',
endTime: '结束时间'
},
common: {
createTime: '创建时间',
createUser: '创建人'
}
}
|
import requests
from bs4 import BeautifulSoup
from requests.exceptions import ConnectionError, TooManyRedirects
from raccoon_src.utils.web_server_validator import WebServerValidator
from raccoon_src.utils.request_handler import RequestHandler
from raccoon_src.utils.help_utils import HelpUtilities
from raccoon_src.utils.coloring import COLOR, COLORED_COMBOS
from raccoon_src.utils.exceptions import WebAppScannerException, WebServerValidatorException
from raccoon_src.utils.logger import Logger
class WebApplicationScanner:
def __init__(self, host):
self.host = host
self.request_handler = RequestHandler()
self.web_server_validator = WebServerValidator()
self.headers = None
self.robots = None
self.forms = None
self.fuzzable_urls = set()
self.emails = set()
log_file = HelpUtilities.get_output_path("{}/web_scan.txt".format(self.host.target))
self.target_dir = "/".join(log_file.split("/")[:-1])
self.logger = Logger(log_file)
def _detect_cms(self, tries=0):
"""
Detect CMS using whatcms.org.
Has a re-try mechanism because false negatives may occur
:param tries: Count of tries for CMS discovery
"""
# WhatCMS is under CloudFlare which detects and blocks proxied/Tor traffic, hence normal request.
page = requests.get(url="https://whatcms.org/?s={}".format(self.host.target))
soup = BeautifulSoup(page.text, "lxml")
found = soup.select(".panel.panel-success")
if found:
try:
cms = [a for a in soup.select("a") if "/c/" in a.get("href")][0]
self.logger.info("{} CMS detected: target is using {}{}{}".format(
COLORED_COMBOS.GOOD, COLOR.GREEN, cms.get("title"), COLOR.RESET))
except IndexError:
if tries >= 4:
return
else:
self._detect_cms(tries=tries + 1)
else:
if tries >= 4:
return
else:
self._detect_cms(tries=tries + 1)
def _cookie_info(self, jar):
for cookie in jar:
key = cookie.__dict__.get("name")
value = cookie.__dict__.get("value")
domain = cookie.__dict__.get("domain")
secure = cookie.__dict__.get("secure")
http_only = cookie.has_nonstandard_attr("HttpOnly")
try:
if domain in self.host.target or self.host.target in domain:
if not secure or not http_only:
current = "%s Cookie: {%s: %s} -" % (COLORED_COMBOS.GOOD, key, value)
if not secure and not http_only:
current += " both secure and HttpOnly flags are not set"
elif not secure:
current += " secure flag not set"
else:
current += " HttpOnly flag not set"
self.logger.info(current)
except TypeError:
continue
def _server_info(self):
if self.headers.get("server"):
self.logger.info("{} Web server detected: {}{}{}".format(
COLORED_COMBOS.GOOD, COLOR.GREEN, self.headers.get("server"), COLOR.RESET))
def _x_powered_by(self):
if self.headers.get("X-Powered-By"):
self.logger.info("{} X-Powered-By header detected: {}{}{}".format(
COLORED_COMBOS.GOOD, COLOR.GREEN, self.headers.get("X-Powered-By"), COLOR.RESET))
def _anti_clickjacking(self):
if not self.headers.get("X-Frame-Options"):
self.logger.info(
"{} X-Frame-Options header not detected - target might be vulnerable to clickjacking".format(
COLORED_COMBOS.GOOD)
)
def _xss_protection(self):
xss_header = self.headers.get("X-XSS-PROTECTION")
if xss_header and "1" in xss_header:
self.logger.info("{} Found X-XSS-PROTECTION header".format(COLORED_COMBOS.BAD))
def _cors_wildcard(self):
if self.headers.get("Access-Control-Allow-Origin") == "*":
self.logger.info("{} CORS wildcard detected".format(COLORED_COMBOS.GOOD))
def _robots(self):
res = self.request_handler.send(
"GET",
url="{}://{}:{}/robots.txt".format(
self.host.protocol,
self.host.target,
self.host.port
)
)
if res.status_code != 404 and res.text and "<!DOCTYPE html>" not in res.text:
self.logger.info("{} Found robots.txt".format(COLORED_COMBOS.GOOD))
with open("{}/robots.txt".format(self.target_dir), "w") as file:
file.write(res.text)
def _sitemap(self):
res = self.request_handler.send(
"GET",
url="{}://{}:{}/sitemap.xml".format(
self.host.protocol,
self.host.target,
self.host.port
)
)
if res.status_code != 404 and res.text and "<!DOCTYPE html>" not in res.text:
self.logger.info("{} Found sitemap.xml".format(COLORED_COMBOS.GOOD))
with open("{}/sitemap.xml".format(self.target_dir), "w") as file:
file.write(res.text)
def _analyze_hrefs(self, href):
if all(("?" in href, "=" in href, not href.startswith("mailto:"))):
if self.host.naked in href or self.host.target in href:
self.fuzzable_urls.add(href)
elif href.startswith("mailto:"):
self._add_to_emails(href)
def _log_fuzzable_urls(self):
base_target = "{}://{}:{}".format(self.host.protocol, self.host.target, self.host.port)
for url in self.fuzzable_urls:
if url.startswith("/"):
self.logger.debug("\t{}{}".format(base_target, url))
else:
self.logger.debug("\t{}".format(url))
def _log_emails(self):
for email in self.emails:
self.logger.debug("\t{}".format(email[7:]))
def _find_urls(self, soup):
urls = soup.select("a")
if urls:
for url in urls:
href = url.get("href")
if href:
self._analyze_hrefs(href)
if self.fuzzable_urls:
self.logger.info("{} {} fuzzable URLs discovered".format(
COLORED_COMBOS.NOTIFY, len(self.fuzzable_urls)))
self._log_fuzzable_urls()
if self.emails:
self.logger.info("{} {} email addresses discovered".format(
COLORED_COMBOS.NOTIFY, len(self.emails)))
self._log_emails()
def _find_forms(self, soup):
# TODO: Analyze interesting input names/ids/params
self.forms = soup.select("form")
if self.forms:
self.logger.info("{} {} HTML forms discovered".format(COLORED_COMBOS.NOTIFY, len(self.forms)))
for form in self.forms:
form_id = form.get("id")
form_class = form.get("class")
form_method = form.get("method")
form_action = form.get("action")
if form_action == "#":
continue
self.logger.debug("\tForm details: ID: {}, Class: {}, Method: {}, action: {}".format(
form_id, form_class, form_method, form_action
))
def _add_to_emails(self, href):
self.emails.add(href)
def get_web_application_info(self):
session = self.request_handler.get_new_session()
try:
with session:
# Test if target is serving HTTP requests
response = session.get(
timeout=20,
url="{}://{}:{}".format(
self.host.protocol,
self.host.target,
self.host.port
)
)
self.headers = response.headers
self._detect_cms()
self._robots()
self._sitemap()
self._server_info()
self._x_powered_by()
self._cors_wildcard()
self._xss_protection()
self._anti_clickjacking()
self._cookie_info(session.cookies)
soup = BeautifulSoup(response.text, "lxml")
self._find_urls(soup)
self._find_forms(soup)
except (ConnectionError, TooManyRedirects) as e:
raise WebAppScannerException("Couldn't get response from server.\n"
"Caused due to exception: {}".format(str(e)))
async def run_scan(self):
self.logger.info("{} Trying to collect {} web application data".format(COLORED_COMBOS.INFO, self.host))
try:
self.web_server_validator.validate_target_webserver(self.host)
self.get_web_application_info()
except WebServerValidatorException:
self.logger.info(
"{} Target does not seem to have an active web server on port: {}. "
"No web application data will be gathered.".format(COLORED_COMBOS.NOTIFY, self.host.port))
return
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import subprocess
import unittest
from pybsd import Executor
class TestExecutor(Executor):
ezjail_admin_list_output = (0,
"""STA JID IP Hostname Root Directory\n"""
"""--- ---- --------------- ------------------------------ ------------------------\n"""
"""ZR 1 10.0.1.41/24 system /usr/jails/system\n"""
""" 1 re0|2a01:4f8:210:41e6::1:41:1/100\n"""
""" 1 lo1|127.0.1.41/24\n"""
""" 1 lo1|::1:41/100\n""",
'')
def __call__(self, binary, subcommand, *cmd_args, **kwargs):
if 'ezjail-admin' in binary:
if subcommand == 'list':
return self.ezjail_admin_list_output
elif subcommand == 'console':
return (0,
'The output of command `{}` in jail `{}`'.format(cmd_args[1], cmd_args[2]),
'')
class TestExecutorUnknownHeaders(TestExecutor):
ezjail_admin_list_output = (0,
"""STA JOID IP Hostname Root Directory\n"""
"""--- ---- --------------- ------------------------------ ------------------------\n"""
"""ZR 1 10.0.1.41/24 system /usr/jails/system\n"""
""" 1 re0|2a01:4f8:210:41e6::1:41:1/100\n"""
""" 1 lo1|127.0.1.41/24\n"""
""" 1 lo1|::1:41/100\n""",
'')
class TestExecutorShortOutput(TestExecutor):
ezjail_admin_list_output = (0,
"""STA JID IP Hostname Root Directory""",
'')
class ExecutorTestCase(unittest.TestCase):
def test_ls_output(self):
executor = Executor()
rc, out, err = executor('ls', 'tests/test_executors')
self.assertEqual(rc, 0, 'incorrect executor return code')
self.assertEqual(out, 'readme\n', 'incorrect executor stdout')
self.assertEqual(err, '', 'incorrect executor stderr')
def test_ls_output_splitlines(self):
executor = Executor(splitlines=True)
rc, out, err = executor('ls', 'tests/test_executors')
self.assertEqual(rc, 0, 'incorrect executor return code')
self.assertEqual(out, ['readme'], 'incorrect executor stdout')
self.assertEqual(err, [], 'incorrect executor stderr')
def test_ls_file_not_found_output(self):
executor = Executor()
rc, out, err = executor('ls', 'i/do/not/exist')
self.assertEqual(rc, 2, 'incorrect executor return code')
self.assertEqual(out, '', 'incorrect executor stdout')
self.assertEqual(err, 'ls: cannot access i/do/not/exist: No such file or directory\n', 'incorrect executor stderr')
def test_ls_with_err_output(self):
executor = Executor()
rc, out = executor('ls', 'tests/test_executors', err='')
self.assertEqual(rc, 0, 'incorrect executor return code')
self.assertEqual(out, 'readme\n', 'incorrect executor stdout')
def test_ls_with_out_and_err_output(self):
executor = Executor()
rc = executor('ls', 'tests/test_executors', out='readme\n', err='')
self.assertEqual(rc, 0, 'incorrect executor return code')
def test_ls_with_rc_out_err_output(self):
executor = Executor()
rc = executor('ls', 'tests/test_executors', rc=0, out='readme\n', err='')
self.assertEqual(rc, None, 'incorrect executor return code')
def test_ls_calledprocesserror_1(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'tests/test_executors', rc=2, out='readme\n', err='')
self.assertEqual(context_manager.exception.returncode, 0, 'incorrect executor return code')
def test_ls_calledprocesserror_2(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'tests/test_executors', rc=0, out='readme\n', err='something')
self.assertEqual(context_manager.exception.returncode, 0, 'incorrect executor return code')
def test_ls_calledprocesserror_3(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'i/do/not/exist', rc=2, out='', err='something')
self.assertEqual(context_manager.exception.returncode, 2, 'incorrect executor return code')
def test_ls_calledprocesserror_4(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'tests/test_executors', rc=0, out='', err='something')
self.assertEqual(context_manager.exception.returncode, 0, 'incorrect executor return code')
def test_ls_calledprocesserror_5(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'i/do/not/exist', rc=2, out='readme\n', err='something')
self.assertEqual(context_manager.exception.returncode, 2, 'incorrect executor return code')
def test_ls_calledprocesserror_6(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'i/do/not/exist', rc=[1, 3], out='readme\n', err='something')
self.assertEqual(context_manager.exception.returncode, 2, 'incorrect executor return code')
def test_ls_correct_rc(self):
executor = Executor()
with self.assertRaises(subprocess.CalledProcessError) as context_manager:
executor('ls', 'i/do/not/exist', rc=[1, 2, 3], out='readme\n', err='something')
self.assertEqual(context_manager.exception.returncode, 2, 'incorrect executor return code')
|
"""Module for interacting with the InfluxDB database"""
from typing import List
from influxdb_client import InfluxDBClient
from influxdb_client.client.write_api import SYNCHRONOUS
from homeflux.utils import timer
from homeflux import environment, log
from homeflux.data import data_types
def write(values: List[data_types.AbstractRecord]) -> None:
"""Write the list of records to the database.
Args:
values (List[data_types.AbstractRecord]): List of records to insert into the database.
Returns:
None
"""
# Open connection to database
t = timer.Timer()
client = InfluxDBClient(url=environment.INFLUX_URL, token=environment.INFLUX_TOKEN, org=environment.INFLUX_ORG)
log.debug('Writing %s points to %s', len(values), environment.INFLUX_URL)
with client.write_api(write_options=SYNCHRONOUS) as api:
# Group items by bucket
out_dict = {}
for obj in values:
if obj.bucket not in out_dict:
out_dict[obj.bucket] = []
out_dict[obj.bucket].append(obj.as_influx_dict())
# Insert data into database
for bucket, data_list in out_dict.items():
log.info('Writing %s points to bucket: %s', len(data_list), bucket)
for data_to_write in data_list:
api.write(bucket, environment.INFLUX_ORG, data_to_write)
log.info('Took %s seconds to write %s points %s', t.end(), len(values), environment.INFLUX_URL)
|
from __future__ import unicode_literals
from documents.models import Document
from documents.permissions import permission_document_create
from documents.tests import (
GenericDocumentViewTestCase, TEST_SMALL_DOCUMENT_PATH,
)
from sources.models import WebFormSource
from sources.tests.literals import (
TEST_SOURCE_LABEL, TEST_SOURCE_UNCOMPRESS_N
)
from sources.wizards import WizardStep
from ..models import Cabinet
from ..wizard_steps import WizardStepCabinets
from .literals import TEST_CABINET_LABEL
class CabinetDocumentUploadTestCase(GenericDocumentViewTestCase):
def setUp(self):
super(CabinetDocumentUploadTestCase, self).setUp()
self.login_user()
self.source = WebFormSource.objects.create(
enabled=True, label=TEST_SOURCE_LABEL,
uncompress=TEST_SOURCE_UNCOMPRESS_N
)
self.document.delete()
def tearDown(self):
super(CabinetDocumentUploadTestCase, self).tearDown()
WizardStep.reregister_all()
def _request_upload_interactive_document_create_view(self):
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
return self.post(
viewname='sources:upload_interactive', args=(self.source.pk,),
data={
'document_type_id': self.document_type.pk,
'source-file': file_object,
'cabinets': self.cabinet.pk
}
)
def _create_cabinet(self):
self.cabinet = Cabinet.objects.create(label=TEST_CABINET_LABEL)
def test_upload_interactive_view_with_access(self):
self._create_cabinet()
self.grant_access(
permission=permission_document_create, obj=self.document_type
)
response = self._request_upload_interactive_document_create_view()
self.assertEqual(response.status_code, 302)
self.assertTrue(self.cabinet in Document.objects.first().cabinets.all())
def _request_wizard_view(self):
return self.get(viewname='sources:document_create_multiple')
def test_upload_interactive_cabinet_selection_view_with_access(self):
WizardStep.deregister_all()
WizardStep.reregister(name=WizardStepCabinets.name)
self._create_cabinet()
self.grant_access(
permission=permission_document_create, obj=self.document_type
)
response = self._request_wizard_view()
self.assertEqual(response.status_code, 200)
|
/**
* Copyright (c) 2006-2015, JGraph Ltd
* Copyright (c) 2006-2015, Gaudenz Alder
*/
/**
* Class: mxSwimlaneOrdering
*
* An implementation of the first stage of the Sugiyama layout. Straightforward
* longest path calculation of layer assignment
*
* Constructor: mxSwimlaneOrdering
*
* Creates a cycle remover for the given internal model.
*/
function mxSwimlaneOrdering(layout)
{
this.layout = layout;
};
/**
* Extends mxHierarchicalLayoutStage.
*/
mxSwimlaneOrdering.prototype = new mxHierarchicalLayoutStage();
mxSwimlaneOrdering.prototype.constructor = mxSwimlaneOrdering;
/**
* Variable: layout
*
* Reference to the enclosing <mxHierarchicalLayout>.
*/
mxSwimlaneOrdering.prototype.layout = null;
/**
* Function: execute
*
* Takes the graph detail and configuration information within the facade
* and creates the resulting laid out graph within that facade for further
* use.
*/
mxSwimlaneOrdering.prototype.execute = function(parent)
{
var model = this.layout.getModel();
var seenNodes = new Object();
var unseenNodes = mxUtils.clone(model.vertexMapper, null, true);
// Perform a dfs through the internal model. If a cycle is found,
// reverse it.
var rootsArray = null;
if (model.roots != null)
{
var modelRoots = model.roots;
rootsArray = [];
for (var i = 0; i < modelRoots.length; i++)
{
var nodeId = mxCellPath.create(modelRoots[i]);
rootsArray[i] = model.vertexMapper.get(modelRoots[i]);
}
}
model.visit(function(parent, node, connectingEdge, layer, seen)
{
// Check if the cell is in it's own ancestor list, if so
// invert the connecting edge and reverse the target/source
// relationship to that edge in the parent and the cell
// Ancestor hashes only line up within a swimlane
var isAncestor = parent != null && parent.swimlaneIndex == node.swimlaneIndex && node.isAncestor(parent);
// If the source->target swimlane indices go from higher to
// lower, the edge is reverse
var reversedOverSwimlane = parent != null && connectingEdge != null &&
parent.swimlaneIndex < node.swimlaneIndex && connectingEdge.source == node;
if (isAncestor)
{
connectingEdge.invert();
mxUtils.remove(connectingEdge, parent.connectsAsSource);
node.connectsAsSource.push(connectingEdge);
parent.connectsAsTarget.push(connectingEdge);
mxUtils.remove(connectingEdge, node.connectsAsTarget);
}
else if (reversedOverSwimlane)
{
connectingEdge.invert();
mxUtils.remove(connectingEdge, parent.connectsAsTarget);
node.connectsAsTarget.push(connectingEdge);
parent.connectsAsSource.push(connectingEdge);
mxUtils.remove(connectingEdge, node.connectsAsSource);
}
var cellId = mxCellPath.create(node.cell);
seenNodes[cellId] = node;
delete unseenNodes[cellId];
}, rootsArray, true, null);
};
|
const config = require('config');
const jwt = require('jsonwebtoken');
const _ = require('lodash');
const moment = require('moment');
const { cache, binance, mongo, PubSub, slack } = require('../../helpers');
const isValidCachedExchangeSymbols = exchangeSymbols =>
_.get(
exchangeSymbols,
[Object.keys(exchangeSymbols)[0], 'minNotional'],
null
) !== null;
/**
* Retreive cached exhcnage symbols.
* If not cached, retrieve exchange info from API and cache it.
*
* @param {*} logger
* @param {*} globalConfiguration
*/
const cacheExchangeSymbols = async (logger, _globalConfiguration) => {
const cachedExchangeSymbols =
JSON.parse(await cache.hget('trailing-trade-common', 'exchange-symbols')) ||
{};
// If there is already cached exchange symbols, don't need to cache again.
if (
_.isEmpty(cachedExchangeSymbols) === false &&
// For backward compatibility, verify the cached value is valid.
isValidCachedExchangeSymbols(cachedExchangeSymbols) === true
) {
return;
}
// Retrieve cached exchange information
const cachedExchangeInfo =
JSON.parse(await cache.hget('trailing-trade-common', 'exchange-info')) ||
{};
let exchangeInfo = cachedExchangeInfo;
if (_.isEmpty(cachedExchangeInfo) === true) {
logger.info(
{ function: 'exchangeInfo' },
'Retrieving exchange info from API'
);
exchangeInfo = await binance.client.exchangeInfo();
await cache.hset(
'trailing-trade-common',
'exchange-info',
JSON.stringify(exchangeInfo),
3600
);
}
logger.info('Retrieved exchange info from API');
const { symbols } = exchangeInfo;
const exchangeSymbols = symbols.reduce((acc, symbol) => {
const minNotionalFilter = _.find(symbol.filters, {
filterType: 'MIN_NOTIONAL'
});
acc[symbol.symbol] = {
symbol: symbol.symbol,
quoteAsset: symbol.quoteAsset,
minNotional: parseFloat(minNotionalFilter.minNotional)
};
return acc;
}, {});
await cache.hset(
'trailing-trade-common',
'exchange-symbols',
JSON.stringify(exchangeSymbols),
3600
);
logger.info({ exchangeSymbols }, 'Saved exchange symbols to cache');
};
/**
* Add estimatedBTC and canDustTransfer flags to balances
* - Leave this function for future reference
*
* @param {*} logger
* @param {*} accountInfo
* @returns
*/
const extendBalancesWithDustTransfer = async (_logger, rawAccountInfo) => {
const accountInfo = rawAccountInfo;
const ignoreAssets = ['BNB', 'BTC'];
const newBalances = await Promise.all(
accountInfo.balances.map(async b => {
const balance = b;
const symbol = `${b.asset}BTC`;
// Set default value
balance.estimatedBTC = -1;
balance.canDustTransfer = false;
// If asset can be ignored
if (ignoreAssets.includes(balance.asset)) {
return balance;
}
// Get latest candle for asset+BTC pair
const cachedLatestCandle =
JSON.parse(
await cache.hget('trailing-trade-symbols', `${symbol}-latest-candle`)
) || {};
// If cannot find the latest candle, assume not be able to do dust transfer
if (_.isEmpty(cachedLatestCandle)) {
return balance;
}
// https://academy.binance.com/en/articles/converting-dust-on-binance
// In order to qualify the dust must have a value less than 0.001BTC
balance.estimatedBTC = Number(
parseFloat(cachedLatestCandle.close) * parseFloat(balance.free)
).toFixed(8);
// If the estimated BTC is less than 0.001, then set dust transfer
if (balance.estimatedBTC <= 0.001) {
balance.canDustTransfer = true;
}
return balance;
})
);
accountInfo.balances = newBalances;
return accountInfo;
};
/**
* Retrieve account information from API and filter balances
*
* @param {*} logger
*/
const getAccountInfoFromAPI = async logger => {
logger.info({ tag: 'get-account-info' }, 'Retrieving account info from API');
const accountInfo = await binance.client.accountInfo();
accountInfo.balances = accountInfo.balances.reduce((acc, b) => {
const balance = b;
if (+balance.free > 0 || +balance.locked > 0) {
acc.push(balance);
}
return acc;
}, []);
logger.info(
{ tag: 'get-account-info', accountInfo },
'Retrieved account information from API'
);
await cache.hset(
'trailing-trade-common',
'account-info',
JSON.stringify(accountInfo)
);
return accountInfo;
};
/**
* Retreive account info from cache
* If empty, retrieve from API
*
* @param {*} logger
*/
const getAccountInfo = async logger => {
const accountInfo =
JSON.parse(
await cache.hgetWithoutLock('trailing-trade-common', 'account-info')
) || {};
if (_.isEmpty(accountInfo) === false) {
logger.info(
{ tag: 'get-account-info', accountInfo },
'Retrieved account info from cache'
);
return accountInfo;
}
logger.info(
{ tag: 'get-account-info' },
'Could not parse account information from cache, get from api'
);
return getAccountInfoFromAPI(logger);
};
/**
* Get open orders
*
* @param {*} logger
*/
const getOpenOrdersFromAPI = async logger => {
logger.info(
{ debug: true, function: 'openOrders' },
'Retrieving open orders from API'
);
const openOrders = await binance.client.openOrders({
recvWindow: 10000
});
logger.info({ openOrders }, 'Retrieved open orders from API');
return openOrders;
};
/**
* Get open orders
*
* @param {*} logger
* @param {*} symbol
*/
const getOpenOrdersBySymbolFromAPI = async (logger, symbol) => {
logger.info(
{ debug: true, function: 'openOrders' },
'Retrieving open orders by symbol from API'
);
const openOrders = await binance.client.openOrders({
symbol,
recvWindow: 10000
});
logger.info({ openOrders }, 'Retrieved open orders by symbol from API');
return openOrders;
};
/**
* Refresh open orders for symbol open orders
* Get cached open orders and merge with symbol open orders
* This is necessary step to cover 2 seconds gap.
* The open orders cache will be refreshed with indicator job.
*
* @param {*} logger
* @param {*} symbol
*/
const getAndCacheOpenOrdersForSymbol = async (logger, symbol) => {
// Retrieve open orders from API first
const symbolOpenOrders = await getOpenOrdersBySymbolFromAPI(logger, symbol);
logger.info(
{
symbol,
symbolOpenOrders
},
'Open orders from API'
);
await cache.hset(
'trailing-trade-open-orders',
symbol,
JSON.stringify(symbolOpenOrders)
);
return symbolOpenOrders;
};
/**
* Get last buy price from mongodb
*
* @param {*} logger
* @param {*} symbol
*/
const getLastBuyPrice = async (logger, symbol) =>
mongo.findOne(logger, 'trailing-trade-symbols', {
key: `${symbol}-last-buy-price`
});
/**
* Save last buy price to mongodb
*
* @param {*} logger
* @param {*} symbol
* @param {*} param2
*/
const saveLastBuyPrice = async (logger, symbol, { lastBuyPrice, quantity }) => {
logger.info(
{ tag: 'save-last-buy-price', symbol, lastBuyPrice, quantity },
'Save last buy price'
);
const result = await mongo.upsertOne(
logger,
'trailing-trade-symbols',
{ key: `${symbol}-last-buy-price` },
{
key: `${symbol}-last-buy-price`,
lastBuyPrice,
quantity
}
);
// Refresh configuration
await cache.hdel('trailing-trade-configurations', symbol);
return result;
};
const removeLastBuyPrice = async (logger, symbol) => {
logger.info(
{ tag: 'remove-last-buy-price', symbol },
'Remove last buy price'
);
const result = await mongo.deleteOne(logger, 'trailing-trade-symbols', {
key: `${symbol}-last-buy-price`
});
// Refresh configuration
await cache.hdel('trailing-trade-configurations', symbol);
return result;
};
/**
* Lock symbol
*
* @param {*} logger
* @param {*} symbol
* @param {*} ttl
*
* @returns
*/
const lockSymbol = async (logger, symbol, ttl = 5) => {
logger.info({ debug: true, symbol }, `Lock ${symbol} for ${ttl} seconds`);
return cache.hset('bot-lock', symbol, true, ttl);
};
/**
* Check if symbol is locked
*
* @param {*} _logger
* @param {*} symbol
* @returns
*/
const isSymbolLocked = async (logger, symbol) => {
const isLocked = (await cache.hget('bot-lock', symbol)) === 'true';
if (isLocked === true) {
logger.info(
{ debug: true, symbol, isLocked },
`🔒 Symbol is locked - ${symbol}`
);
} else {
logger.info(
{ debug: true, symbol, isLocked },
`🔓 Symbol is not locked - ${symbol} `
);
}
return isLocked;
};
/**
* Unlock symbol
*
* @param {*} logger
* @param {*} symbol
* @returns
*/
const unlockSymbol = async (logger, symbol) => {
logger.info({ debug: true, symbol }, `Unlock ${symbol}`);
return cache.hdel('bot-lock', symbol);
};
/**
* Disable action
*
* @param {*} symbol
* @param {*} reason
* @param {*} ttl
*
* @returns
*/
const disableAction = async (symbol, reason, ttl) =>
cache.set(`${symbol}-disable-action`, JSON.stringify(reason), ttl);
/**
* Check if the action is disabled.
*
* @param {*} symbol
* @returns
*/
const isActionDisabled = async symbol => {
const result = await cache.getWithTTL(`${symbol}-disable-action`);
if (result === null) {
return { isDisabled: false, ttl: -2 };
}
const ttl = result[0][1];
const reason = JSON.parse(result[1][1]) || {};
return { isDisabled: ttl > 0, ttl, ...reason };
};
/**
* Re-enable action stopped by stop loss
*
* @param {*} logger
* @param {*} symbol
* @returns
*/
const deleteDisableAction = async (logger, symbol) => {
logger.info({ debug: true, symbol }, `Enable action for ${symbol}`);
return cache.del(`${symbol}-disable-action`);
};
/**
* Get API limit
*
* @param {*} logger
* @returns
*/
const getAPILimit = logger => {
const apiInfo = binance.client.getInfo();
logger.info({ apiInfo }, 'API info');
return parseInt(apiInfo.spot?.usedWeight1m || 0, 10);
};
/**
* Check if API limit is over
*
* @param {*} logger
* @returns
*/
const isExceedAPILimit = logger => {
const usedWeight1m = getAPILimit(logger);
return usedWeight1m > 1180;
};
/**
* Get override data for Symbol
*
* @param {*} logger
* @param {*} symbol
* @returns
*/
const getOverrideDataForSymbol = async (_logger, symbol) => {
const overrideData = await cache.hget('trailing-trade-override', symbol);
if (!overrideData) {
return null;
}
return JSON.parse(overrideData);
};
/**
* Remove override data for Symbol
*
* @param {*} _logger
* @param {*} symbol
* @returns
*/
const removeOverrideDataForSymbol = async (_logger, symbol) =>
cache.hdel('trailing-trade-override', symbol);
/**
* Get override data for Indicator
*
* @param {*} logger
* @param {*} key
* @returns
*/
const getOverrideDataForIndicator = async (_logger, key) => {
const overrideData = await cache.hget(
'trailing-trade-indicator-override',
key
);
if (!overrideData) {
return null;
}
return JSON.parse(overrideData);
};
/**
* Remove override data for Indicator
*
* @param {*} _logger
* @param {*} key
* @returns
*/
const removeOverrideDataForIndicator = async (_logger, key) =>
cache.hdel('trailing-trade-indicator-override', key);
/**
* Retrieve last buy price and recalculate new last buy price
*
* @param {*} logger
* @param {*} symbol
* @param {*} order
*/
const calculateLastBuyPrice = async (logger, symbol, order) => {
const { type, executedQty, cummulativeQuoteQty } = order;
const lastBuyPriceDoc = await getLastBuyPrice(logger, symbol);
const orgLastBuyPrice = _.get(lastBuyPriceDoc, 'lastBuyPrice', 0);
const orgQuantity = _.get(lastBuyPriceDoc, 'quantity', 0);
const orgTotalAmount = orgLastBuyPrice * orgQuantity;
logger.info(
{ orgLastBuyPrice, orgQuantity, orgTotalAmount },
'Existing last buy price'
);
const filledQuoteQty = parseFloat(cummulativeQuoteQty);
const filledQuantity = parseFloat(executedQty);
const newQuantity = orgQuantity + filledQuantity;
const newTotalAmount = orgTotalAmount + filledQuoteQty;
const newLastBuyPrice = newTotalAmount / newQuantity;
logger.info(
{ newLastBuyPrice, newTotalAmount, newQuantity },
'New last buy price'
);
await saveLastBuyPrice(logger, symbol, {
lastBuyPrice: newLastBuyPrice,
quantity: newQuantity
});
PubSub.publish('frontend-notification', {
type: 'success',
title: `New last buy price for ${symbol} has been updated.`
});
slack.sendMessage(
`${symbol} Last buy price Updated (${moment().format(
'HH:mm:ss.SSS'
)}): *${type}*\n` +
`- Order Result: \`\`\`${JSON.stringify(
{
orgLastBuyPrice,
orgQuantity,
orgTotalAmount,
newLastBuyPrice,
newQuantity,
newTotalAmount
},
undefined,
2
)}\`\`\`\n` +
`- Current API Usage: ${getAPILimit(logger)}`
);
};
/**
* Get symbol information
*
* @param {*} logger
* @param {*} symbol
*/
const getSymbolInfo = async (logger, symbol) => {
const cachedSymbolInfo =
JSON.parse(
await cache.hget('trailing-trade-symbols', `${symbol}-symbol-info`)
) || {};
if (_.isEmpty(cachedSymbolInfo) === false) {
logger.info({ cachedSymbolInfo }, 'Retrieved symbol info from the cache.');
return cachedSymbolInfo;
}
const cachedExchangeInfo =
JSON.parse(await cache.hget('trailing-trade-common', 'exchange-info')) ||
{};
let exchangeInfo = cachedExchangeInfo;
if (_.isEmpty(cachedExchangeInfo) === true) {
logger.info(
{ debug: true, function: 'exchangeInfo' },
'Request exchange info from Binance.'
);
exchangeInfo = await binance.client.exchangeInfo();
await cache.hset(
'trailing-trade-common',
'exchange-info',
JSON.stringify(exchangeInfo),
3600
);
}
logger.info({}, 'Retrieved exchange info.');
const symbolInfo = _.filter(
exchangeInfo.symbols,
s => s.symbol === symbol
)[0];
// eslint-disable-next-line prefer-destructuring
symbolInfo.filterLotSize = _.filter(
symbolInfo.filters,
f => f.filterType === 'LOT_SIZE'
)[0];
// eslint-disable-next-line prefer-destructuring
symbolInfo.filterPrice = _.filter(
symbolInfo.filters,
f => f.filterType === 'PRICE_FILTER'
)[0];
// eslint-disable-next-line prefer-destructuring
symbolInfo.filterMinNotional = _.filter(
symbolInfo.filters,
f => f.filterType === 'MIN_NOTIONAL'
)[0];
logger.info({ symbolInfo }, 'Retrieved symbol info from Binance.');
const finalSymbolInfo = _.pick(symbolInfo, [
'symbol',
'status',
'baseAsset',
'baseAssetPrecision',
'quoteAsset',
'quotePrecision',
'filterLotSize',
'filterPrice',
'filterMinNotional'
]);
cache.hset(
'trailing-trade-symbols',
`${symbol}-symbol-info`,
JSON.stringify(finalSymbolInfo),
3600
);
return finalSymbolInfo;
};
/**
* Verify authentication
*
* @param {*} funcLogger
* @param {*} authToken
* @returns
*/
const verifyAuthenticated = async (funcLogger, authToken) => {
const logger = funcLogger.child({ tag: 'verifyAuthenticated' });
const authenticationEnabled = config.get('authentication.enabled');
if (authenticationEnabled === false) {
logger.info('Authentication is not enabled.');
return true;
}
const jwtSecret = await cache.get('auth-jwt-secret');
logger.info({ authToken, jwtSecret }, 'Verifying authentication');
let data = null;
try {
data = jwt.verify(authToken, jwtSecret, { algorithm: 'HS256' });
} catch (err) {
logger.info({ err }, 'Failed authentication');
return false;
}
logger.info({ data }, 'Success authentication');
return true;
};
/**
* Save number of buy open orders
*
* @param {*} logger
* @param {*} symbols
*/
const saveNumberOfBuyOpenOrders = async (logger, symbols) => {
const numberOfBuyOpenOrders = await mongo.count(
logger,
'trailing-trade-grid-trade-orders',
{
key: {
$regex: `(${symbols.join('|')})-grid-trade-last-buy-order`
}
}
);
await cache.hset(
'trailing-trade-common',
'number-of-buy-open-orders',
numberOfBuyOpenOrders
);
};
/**
* Get number of buy open orders
*
* @param {*} _logger
* @returns
*/
const getNumberOfBuyOpenOrders = async _logger =>
parseInt(
(await cache.hget('trailing-trade-common', 'number-of-buy-open-orders')) ||
0,
10
);
/**
* Save number of active orders
*
* @param {*} logger
* @param {*} symbols
*/
const saveNumberOfOpenTrades = async (logger, symbols) => {
const numberOfOpenTrades = await mongo.count(
logger,
'trailing-trade-symbols',
{
key: {
$regex: `(${symbols.join('|')})-last-buy-price`
}
}
);
await cache.hset(
'trailing-trade-common',
'number-of-open-trades',
numberOfOpenTrades
);
};
/**
* Get number of open trades
*
* @param {*} _logger
* @returns
*/
const getNumberOfOpenTrades = async _logger =>
parseInt(
(await cache.hget('trailing-trade-common', 'number-of-open-trades')) || 0,
10
);
/**
* Save order statistics
*
* @param {*} logger
* @param {*} symbols
* @returns
*/
const saveOrderStats = async (logger, symbols) =>
Promise.all([
saveNumberOfBuyOpenOrders(logger, symbols),
saveNumberOfOpenTrades(logger, symbols)
]);
/**
* Save override action
*
* @param {*} logger
* @param {*} symbol
* @param {*} overrideData
* @param {*} overrideReason
*/
const saveOverrideAction = async (
logger,
symbol,
overrideData,
overrideReason
) => {
await cache.hset(
'trailing-trade-override',
`${symbol}`,
JSON.stringify(overrideData)
);
slack.sendMessage(
`${symbol} Action (${moment().format('HH:mm:ss.SSS')}): Queued action: ${
overrideData.action
}\n` +
`- Message: ${overrideReason}\n` +
`- Current API Usage: ${getAPILimit(logger)}`
);
PubSub.publish('frontend-notification', {
type: 'info',
title: overrideReason
});
};
/**
* Save override action for indicator
*
* @param {*} logger
* @param {*} symbol
* @param {*} overrideData
* @param {*} overrideReason
*/
const saveOverrideIndicatorAction = async (
logger,
type,
overrideData,
overrideReason
) => {
await cache.hset(
'trailing-trade-indicator-override',
type,
JSON.stringify(overrideData)
);
slack.sendMessage(
`Action (${moment().format('HH:mm:ss.SSS')}): Queued action: ${
overrideData.action
}\n` +
`- Message: ${overrideReason}\n` +
`- Current API Usage: ${getAPILimit(logger)}`
);
PubSub.publish('frontend-notification', {
type: 'info',
title: overrideReason
});
};
module.exports = {
cacheExchangeSymbols,
getAccountInfoFromAPI,
getAccountInfo,
extendBalancesWithDustTransfer,
getOpenOrdersFromAPI,
getOpenOrdersBySymbolFromAPI,
getAndCacheOpenOrdersForSymbol,
getLastBuyPrice,
saveLastBuyPrice,
removeLastBuyPrice,
lockSymbol,
isSymbolLocked,
unlockSymbol,
disableAction,
isActionDisabled,
deleteDisableAction,
getAPILimit,
isExceedAPILimit,
getOverrideDataForSymbol,
removeOverrideDataForSymbol,
getOverrideDataForIndicator,
removeOverrideDataForIndicator,
calculateLastBuyPrice,
getSymbolInfo,
verifyAuthenticated,
saveNumberOfBuyOpenOrders,
getNumberOfBuyOpenOrders,
saveNumberOfOpenTrades,
getNumberOfOpenTrades,
saveOrderStats,
saveOverrideAction,
saveOverrideIndicatorAction
};
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{"0427f930b2f19a25b61d":function(t,e,r){"use strict";r.d(e,"a",function(){return a});var i,n=r("e8e740b3fab0642e6a30"),s=r("b82d2ea0c8dcd2b25103");function a(t,e){this.center=void 0!==t?t:new s.a,this.radius=void 0!==e?e:0}Object.assign(a.prototype,{set:function(t,e){return this.center.copy(t),this.radius=e,this},setFromPoints:(i=new n.a,function(t,e){var r=this.center;void 0!==e?r.copy(e):i.setFromPoints(t).getCenter(r);for(var n=0,s=0,a=t.length;s<a;s++)n=Math.max(n,r.distanceToSquared(t[s]));return this.radius=Math.sqrt(n),this}),clone:function(){return(new this.constructor).copy(this)},copy:function(t){return this.center.copy(t.center),this.radius=t.radius,this},empty:function(){return this.radius<=0},containsPoint:function(t){return t.distanceToSquared(this.center)<=this.radius*this.radius},distanceToPoint:function(t){return t.distanceTo(this.center)-this.radius},intersectsSphere:function(t){var e=this.radius+t.radius;return t.center.distanceToSquared(this.center)<=e*e},intersectsBox:function(t){return t.intersectsSphere(this)},intersectsPlane:function(t){return Math.abs(t.distanceToPoint(this.center))<=this.radius},clampPoint:function(t,e){var r=this.center.distanceToSquared(t);return void 0===e&&(console.warn("THREE.Sphere: .clampPoint() target is now required"),e=new s.a),e.copy(t),r>this.radius*this.radius&&(e.sub(this.center).normalize(),e.multiplyScalar(this.radius).add(this.center)),e},getBoundingBox:function(t){return void 0===t&&(console.warn("THREE.Sphere: .getBoundingBox() target is now required"),t=new n.a),t.set(this.center,this.center),t.expandByScalar(this.radius),t},applyMatrix4:function(t){return this.center.applyMatrix4(t),this.radius=this.radius*t.getMaxScaleOnAxis(),this},translate:function(t){return this.center.add(t),this},equals:function(t){return t.center.equals(this.center)&&t.radius===this.radius}})},"431d73f7466491f671ed":function(t,e,r){"use strict";function i(t){if(0===t.length)return-1/0;for(var e=t[0],r=1,i=t.length;r<i;++r)t[r]>e&&(e=t[r]);return e}r.d(e,"a",function(){return i})},"4f02046caaa3f6d5dbf3":function(t,e,r){"use strict";r.r(e);var i=r("b82d2ea0c8dcd2b25103"),n=r("e8e740b3fab0642e6a30"),s=r("550464552fd011906622"),a=r("9d3ccbbdd0ff23bbfd32"),o=r("0427f930b2f19a25b61d"),u=r("022920c09d348f259d0d");function h(){this.vertices=[],this.normals=[],this.colors=[],this.uvs=[],this.uvs2=[],this.groups=[],this.morphTargets={},this.skinWeights=[],this.skinIndices=[],this.boundingBox=null,this.boundingSphere=null,this.verticesNeedUpdate=!1,this.normalsNeedUpdate=!1,this.colorsNeedUpdate=!1,this.uvsNeedUpdate=!1,this.groupsNeedUpdate=!1}Object.assign(h.prototype,{computeGroups:function(t){for(var e,r=[],i=void 0,n=t.faces,s=0;s<n.length;s++){var a=n[s];a.materialIndex!==i&&(i=a.materialIndex,void 0!==e&&(e.count=3*s-e.start,r.push(e)),e={start:3*s,materialIndex:i})}void 0!==e&&(e.count=3*s-e.start,r.push(e)),this.groups=r},fromGeometry:function(t){var e,r=t.faces,i=t.vertices,n=t.faceVertexUvs,s=n[0]&&n[0].length>0,a=n[1]&&n[1].length>0,o=t.morphTargets,h=o.length;if(h>0){e=[];for(var d=0;d<h;d++)e[d]={name:o[d].name,data:[]};this.morphTargets.position=e}var c,m=t.morphNormals,l=m.length;if(l>0){c=[];for(d=0;d<l;d++)c[d]={name:m[d].name,data:[]};this.morphTargets.normal=c}var p=t.skinIndices,f=t.skinWeights,y=p.length===i.length,x=f.length===i.length;i.length>0&&0===r.length&&console.error("THREE.DirectGeometry: Faceless geometries are not supported.");for(d=0;d<r.length;d++){var g=r[d];this.vertices.push(i[g.a],i[g.b],i[g.c]);var v=g.vertexNormals;if(3===v.length)this.normals.push(v[0],v[1],v[2]);else{var b=g.normal;this.normals.push(b,b,b)}var w,B=g.vertexColors;if(3===B.length)this.colors.push(B[0],B[1],B[2]);else{var A=g.color;this.colors.push(A,A,A)}if(!0===s)void 0!==(w=n[0][d])?this.uvs.push(w[0],w[1],w[2]):(console.warn("THREE.DirectGeometry.fromGeometry(): Undefined vertexUv ",d),this.uvs.push(new u.a,new u.a,new u.a));if(!0===a)void 0!==(w=n[1][d])?this.uvs2.push(w[0],w[1],w[2]):(console.warn("THREE.DirectGeometry.fromGeometry(): Undefined vertexUv2 ",d),this.uvs2.push(new u.a,new u.a,new u.a));for(var N=0;N<h;N++){var z=o[N].vertices;e[N].data.push(z[g.a],z[g.b],z[g.c])}for(N=0;N<l;N++){var U=m[N].vertexNormals[d];c[N].data.push(U.a,U.b,U.c)}y&&this.skinIndices.push(p[g.a],p[g.b],p[g.c]),x&&this.skinWeights.push(f[g.a],f[g.b],f[g.c])}return this.computeGroups(t),this.verticesNeedUpdate=t.verticesNeedUpdate,this.normalsNeedUpdate=t.normalsNeedUpdate,this.colorsNeedUpdate=t.colorsNeedUpdate,this.uvsNeedUpdate=t.uvsNeedUpdate,this.groupsNeedUpdate=t.groupsNeedUpdate,this}});var d=r("3fcced0dfecbf072faf3"),c=r("a4f0ef367f0a790cdce8"),m=r("9fdadb962569641cba1f"),l=r("d1886278d8ad2602c715"),p=r("431d73f7466491f671ed");r.d(e,"BufferGeometry",function(){return w});var f,y,x,g,v,b=1;function w(){Object.defineProperty(this,"id",{value:b+=2}),this.uuid=l.a.generateUUID(),this.name="",this.type="BufferGeometry",this.index=null,this.attributes={},this.morphAttributes={},this.groups=[],this.boundingBox=null,this.boundingSphere=null,this.drawRange={start:0,count:1/0},this.userData={}}w.prototype=Object.assign(Object.create(s.a.prototype),{constructor:w,isBufferGeometry:!0,getIndex:function(){return this.index},setIndex:function(t){Array.isArray(t)?this.index=new(Object(p.a)(t)>65535?a.Uint32BufferAttribute:a.Uint16BufferAttribute)(t,1):this.index=t},addAttribute:function(t,e){return e&&e.isBufferAttribute||e&&e.isInterleavedBufferAttribute?"index"===t?(console.warn("THREE.BufferGeometry.addAttribute: Use .setIndex() for index attribute."),this.setIndex(e),this):(this.attributes[t]=e,this):(console.warn("THREE.BufferGeometry: .addAttribute() now expects ( name, attribute )."),this.addAttribute(t,new a.BufferAttribute(arguments[1],arguments[2])))},getAttribute:function(t){return this.attributes[t]},removeAttribute:function(t){return delete this.attributes[t],this},addGroup:function(t,e,r){this.groups.push({start:t,count:e,materialIndex:void 0!==r?r:0})},clearGroups:function(){this.groups=[]},setDrawRange:function(t,e){this.drawRange.start=t,this.drawRange.count=e},applyMatrix:function(t){var e=this.attributes.position;void 0!==e&&(t.applyToBufferAttribute(e),e.needsUpdate=!0);var r=this.attributes.normal;void 0!==r&&((new m.a).getNormalMatrix(t).applyToBufferAttribute(r),r.needsUpdate=!0);return null!==this.boundingBox&&this.computeBoundingBox(),null!==this.boundingSphere&&this.computeBoundingSphere(),this},rotateX:(v=new c.a,function(t){return v.makeRotationX(t),this.applyMatrix(v),this}),rotateY:function(){var t=new c.a;return function(e){return t.makeRotationY(e),this.applyMatrix(t),this}}(),rotateZ:function(){var t=new c.a;return function(e){return t.makeRotationZ(e),this.applyMatrix(t),this}}(),translate:function(){var t=new c.a;return function(e,r,i){return t.makeTranslation(e,r,i),this.applyMatrix(t),this}}(),scale:function(){var t=new c.a;return function(e,r,i){return t.makeScale(e,r,i),this.applyMatrix(t),this}}(),lookAt:(g=new d.a,function(t){g.lookAt(t),g.updateMatrix(),this.applyMatrix(g.matrix)}),center:(x=new i.a,function(){return this.computeBoundingBox(),this.boundingBox.getCenter(x).negate(),this.translate(x.x,x.y,x.z),this}),setFromObject:function(t){var e=t.geometry;if(t.isPoints||t.isLine){var r=new a.Float32BufferAttribute(3*e.vertices.length,3),i=new a.Float32BufferAttribute(3*e.colors.length,3);if(this.addAttribute("position",r.copyVector3sArray(e.vertices)),this.addAttribute("color",i.copyColorsArray(e.colors)),e.lineDistances&&e.lineDistances.length===e.vertices.length){var n=new a.Float32BufferAttribute(e.lineDistances.length,1);this.addAttribute("lineDistance",n.copyArray(e.lineDistances))}null!==e.boundingSphere&&(this.boundingSphere=e.boundingSphere.clone()),null!==e.boundingBox&&(this.boundingBox=e.boundingBox.clone())}else t.isMesh&&e&&e.isGeometry&&this.fromGeometry(e);return this},setFromPoints:function(t){for(var e=[],r=0,i=t.length;r<i;r++){var n=t[r];e.push(n.x,n.y,n.z||0)}return this.addAttribute("position",new a.Float32BufferAttribute(e,3)),this},updateFromObject:function(t){var e,r=t.geometry;if(t.isMesh){var i=r.__directGeometry;if(!0===r.elementsNeedUpdate&&(i=void 0,r.elementsNeedUpdate=!1),void 0===i)return this.fromGeometry(r);i.verticesNeedUpdate=r.verticesNeedUpdate,i.normalsNeedUpdate=r.normalsNeedUpdate,i.colorsNeedUpdate=r.colorsNeedUpdate,i.uvsNeedUpdate=r.uvsNeedUpdate,i.groupsNeedUpdate=r.groupsNeedUpdate,r.verticesNeedUpdate=!1,r.normalsNeedUpdate=!1,r.colorsNeedUpdate=!1,r.uvsNeedUpdate=!1,r.groupsNeedUpdate=!1,r=i}return!0===r.verticesNeedUpdate&&(void 0!==(e=this.attributes.position)&&(e.copyVector3sArray(r.vertices),e.needsUpdate=!0),r.verticesNeedUpdate=!1),!0===r.normalsNeedUpdate&&(void 0!==(e=this.attributes.normal)&&(e.copyVector3sArray(r.normals),e.needsUpdate=!0),r.normalsNeedUpdate=!1),!0===r.colorsNeedUpdate&&(void 0!==(e=this.attributes.color)&&(e.copyColorsArray(r.colors),e.needsUpdate=!0),r.colorsNeedUpdate=!1),r.uvsNeedUpdate&&(void 0!==(e=this.attributes.uv)&&(e.copyVector2sArray(r.uvs),e.needsUpdate=!0),r.uvsNeedUpdate=!1),r.lineDistancesNeedUpdate&&(void 0!==(e=this.attributes.lineDistance)&&(e.copyArray(r.lineDistances),e.needsUpdate=!0),r.lineDistancesNeedUpdate=!1),r.groupsNeedUpdate&&(r.computeGroups(t.geometry),this.groups=r.groups,r.groupsNeedUpdate=!1),this},fromGeometry:function(t){return t.__directGeometry=(new h).fromGeometry(t),this.fromDirectGeometry(t.__directGeometry)},fromDirectGeometry:function(t){var e=new Float32Array(3*t.vertices.length);if(this.addAttribute("position",new a.BufferAttribute(e,3).copyVector3sArray(t.vertices)),t.normals.length>0){var r=new Float32Array(3*t.normals.length);this.addAttribute("normal",new a.BufferAttribute(r,3).copyVector3sArray(t.normals))}if(t.colors.length>0){var i=new Float32Array(3*t.colors.length);this.addAttribute("color",new a.BufferAttribute(i,3).copyColorsArray(t.colors))}if(t.uvs.length>0){var n=new Float32Array(2*t.uvs.length);this.addAttribute("uv",new a.BufferAttribute(n,2).copyVector2sArray(t.uvs))}if(t.uvs2.length>0){var s=new Float32Array(2*t.uvs2.length);this.addAttribute("uv2",new a.BufferAttribute(s,2).copyVector2sArray(t.uvs2))}for(var o in this.groups=t.groups,t.morphTargets){for(var u=[],h=t.morphTargets[o],d=0,c=h.length;d<c;d++){var m=h[d],l=new a.Float32BufferAttribute(3*m.data.length,3);l.name=m.name,u.push(l.copyVector3sArray(m.data))}this.morphAttributes[o]=u}if(t.skinIndices.length>0){var p=new a.Float32BufferAttribute(4*t.skinIndices.length,4);this.addAttribute("skinIndex",p.copyVector4sArray(t.skinIndices))}if(t.skinWeights.length>0){var f=new a.Float32BufferAttribute(4*t.skinWeights.length,4);this.addAttribute("skinWeight",f.copyVector4sArray(t.skinWeights))}return null!==t.boundingSphere&&(this.boundingSphere=t.boundingSphere.clone()),null!==t.boundingBox&&(this.boundingBox=t.boundingBox.clone()),this},computeBoundingBox:function(){null===this.boundingBox&&(this.boundingBox=new n.a);var t=this.attributes.position;void 0!==t?this.boundingBox.setFromBufferAttribute(t):this.boundingBox.makeEmpty(),(isNaN(this.boundingBox.min.x)||isNaN(this.boundingBox.min.y)||isNaN(this.boundingBox.min.z))&&console.error('THREE.BufferGeometry.computeBoundingBox: Computed min/max have NaN values. The "position" attribute is likely to have NaN values.',this)},computeBoundingSphere:(f=new n.a,y=new i.a,function(){null===this.boundingSphere&&(this.boundingSphere=new o.a);var t=this.attributes.position;if(t){var e=this.boundingSphere.center;f.setFromBufferAttribute(t),f.getCenter(e);for(var r=0,i=0,n=t.count;i<n;i++)y.x=t.getX(i),y.y=t.getY(i),y.z=t.getZ(i),r=Math.max(r,e.distanceToSquared(y));this.boundingSphere.radius=Math.sqrt(r),isNaN(this.boundingSphere.radius)&&console.error('THREE.BufferGeometry.computeBoundingSphere(): Computed radius is NaN. The "position" attribute is likely to have NaN values.',this)}}),computeFaceNormals:function(){},computeVertexNormals:function(){var t=this.index,e=this.attributes;if(e.position){var r=e.position.array;if(void 0===e.normal)this.addAttribute("normal",new a.BufferAttribute(new Float32Array(r.length),3));else for(var n=e.normal.array,s=0,o=n.length;s<o;s++)n[s]=0;var u,h,d,c=e.normal.array,m=new i.a,l=new i.a,p=new i.a,f=new i.a,y=new i.a;if(t){var x=t.array;for(s=0,o=t.count;s<o;s+=3)u=3*x[s+0],h=3*x[s+1],d=3*x[s+2],m.fromArray(r,u),l.fromArray(r,h),p.fromArray(r,d),f.subVectors(p,l),y.subVectors(m,l),f.cross(y),c[u]+=f.x,c[u+1]+=f.y,c[u+2]+=f.z,c[h]+=f.x,c[h+1]+=f.y,c[h+2]+=f.z,c[d]+=f.x,c[d+1]+=f.y,c[d+2]+=f.z}else for(s=0,o=r.length;s<o;s+=9)m.fromArray(r,s),l.fromArray(r,s+3),p.fromArray(r,s+6),f.subVectors(p,l),y.subVectors(m,l),f.cross(y),c[s]=f.x,c[s+1]=f.y,c[s+2]=f.z,c[s+3]=f.x,c[s+4]=f.y,c[s+5]=f.z,c[s+6]=f.x,c[s+7]=f.y,c[s+8]=f.z;this.normalizeNormals(),e.normal.needsUpdate=!0}},merge:function(t,e){if(t&&t.isBufferGeometry){void 0===e&&(e=0,console.warn("THREE.BufferGeometry.merge(): Overwriting original geometry, starting at offset=0. Use BufferGeometryUtils.mergeBufferGeometries() for lossless merge."));var r=this.attributes;for(var i in r)if(void 0!==t.attributes[i])for(var n=r[i].array,s=t.attributes[i],a=s.array,o=0,u=s.itemSize*e;o<a.length;o++,u++)n[u]=a[o];return this}console.error("THREE.BufferGeometry.merge(): geometry not an instance of THREE.BufferGeometry.",t)},normalizeNormals:function(){var t=new i.a;return function(){for(var e=this.attributes.normal,r=0,i=e.count;r<i;r++)t.x=e.getX(r),t.y=e.getY(r),t.z=e.getZ(r),t.normalize(),e.setXYZ(r,t.x,t.y,t.z)}}(),toNonIndexed:function(){function t(t,e){for(var r=t.array,i=t.itemSize,n=new r.constructor(e.length*i),s=0,o=0,u=0,h=e.length;u<h;u++){s=e[u]*i;for(var d=0;d<i;d++)n[o++]=r[s++]}return new a.BufferAttribute(n,i)}if(null===this.index)return console.warn("THREE.BufferGeometry.toNonIndexed(): Geometry is already non-indexed."),this;var e=new w,r=this.index.array,i=this.attributes;for(var n in i){var s=t(i[n],r);e.addAttribute(n,s)}var o=this.morphAttributes;for(n in o){for(var u=[],h=o[n],d=0,c=h.length;d<c;d++){s=t(h[d],r);u.push(s)}e.morphAttributes[n]=u}for(var m=this.groups,l=(d=0,m.length);d<l;d++){var p=m[d];e.addGroup(p.start,p.count,p.materialIndex)}return e},toJSON:function(){var t={metadata:{version:4.5,type:"BufferGeometry",generator:"BufferGeometry.toJSON"}};if(t.uuid=this.uuid,t.type=this.type,""!==this.name&&(t.name=this.name),Object.keys(this.userData).length>0&&(t.userData=this.userData),void 0!==this.parameters){var e=this.parameters;for(var r in e)void 0!==e[r]&&(t[r]=e[r]);return t}t.data={attributes:{}};var i=this.index;if(null!==i){var n=Array.prototype.slice.call(i.array);t.data.index={type:i.array.constructor.name,array:n}}var s=this.attributes;for(var r in s){var a=s[r];n=Array.prototype.slice.call(a.array);t.data.attributes[r]={itemSize:a.itemSize,type:a.array.constructor.name,array:n,normalized:a.normalized}}var o=this.groups;o.length>0&&(t.data.groups=JSON.parse(JSON.stringify(o)));var u=this.boundingSphere;return null!==u&&(t.data.boundingSphere={center:u.center.toArray(),radius:u.radius}),t},clone:function(){return(new w).copy(this)},copy:function(t){var e,r,i;this.index=null,this.attributes={},this.morphAttributes={},this.groups=[],this.boundingBox=null,this.boundingSphere=null,this.name=t.name;var n=t.index;null!==n&&this.setIndex(n.clone());var s=t.attributes;for(e in s){var a=s[e];this.addAttribute(e,a.clone())}var o=t.morphAttributes;for(e in o){var u=[],h=o[e];for(r=0,i=h.length;r<i;r++)u.push(h[r].clone());this.morphAttributes[e]=u}var d=t.groups;for(r=0,i=d.length;r<i;r++){var c=d[r];this.addGroup(c.start,c.count,c.materialIndex)}var m=t.boundingBox;null!==m&&(this.boundingBox=m.clone());var l=t.boundingSphere;return null!==l&&(this.boundingSphere=l.clone()),this.drawRange.start=t.drawRange.start,this.drawRange.count=t.drawRange.count,this.userData=t.userData,this},dispose:function(){this.dispatchEvent({type:"dispose"})}})},e8e740b3fab0642e6a30:function(t,e,r){"use strict";r.d(e,"a",function(){return u});var i,n,s,a=r("b82d2ea0c8dcd2b25103"),o=r("0427f930b2f19a25b61d");function u(t,e){this.min=void 0!==t?t:new a.a(1/0,1/0,1/0),this.max=void 0!==e?e:new a.a(-1/0,-1/0,-1/0)}Object.assign(u.prototype,{isBox3:!0,set:function(t,e){return this.min.copy(t),this.max.copy(e),this},setFromArray:function(t){for(var e=1/0,r=1/0,i=1/0,n=-1/0,s=-1/0,a=-1/0,o=0,u=t.length;o<u;o+=3){var h=t[o],d=t[o+1],c=t[o+2];h<e&&(e=h),d<r&&(r=d),c<i&&(i=c),h>n&&(n=h),d>s&&(s=d),c>a&&(a=c)}return this.min.set(e,r,i),this.max.set(n,s,a),this},setFromBufferAttribute:function(t){for(var e=1/0,r=1/0,i=1/0,n=-1/0,s=-1/0,a=-1/0,o=0,u=t.count;o<u;o++){var h=t.getX(o),d=t.getY(o),c=t.getZ(o);h<e&&(e=h),d<r&&(r=d),c<i&&(i=c),h>n&&(n=h),d>s&&(s=d),c>a&&(a=c)}return this.min.set(e,r,i),this.max.set(n,s,a),this},setFromPoints:function(t){this.makeEmpty();for(var e=0,r=t.length;e<r;e++)this.expandByPoint(t[e]);return this},setFromCenterAndSize:(s=new a.a,function(t,e){var r=s.copy(e).multiplyScalar(.5);return this.min.copy(t).sub(r),this.max.copy(t).add(r),this}),setFromObject:function(t){return this.makeEmpty(),this.expandByObject(t)},clone:function(){return(new this.constructor).copy(this)},copy:function(t){return this.min.copy(t.min),this.max.copy(t.max),this},makeEmpty:function(){return this.min.x=this.min.y=this.min.z=1/0,this.max.x=this.max.y=this.max.z=-1/0,this},isEmpty:function(){return this.max.x<this.min.x||this.max.y<this.min.y||this.max.z<this.min.z},getCenter:function(t){return void 0===t&&(console.warn("THREE.Box3: .getCenter() target is now required"),t=new a.a),this.isEmpty()?t.set(0,0,0):t.addVectors(this.min,this.max).multiplyScalar(.5)},getSize:function(t){return void 0===t&&(console.warn("THREE.Box3: .getSize() target is now required"),t=new a.a),this.isEmpty()?t.set(0,0,0):t.subVectors(this.max,this.min)},expandByPoint:function(t){return this.min.min(t),this.max.max(t),this},expandByVector:function(t){return this.min.sub(t),this.max.add(t),this},expandByScalar:function(t){return this.min.addScalar(-t),this.max.addScalar(t),this},expandByObject:function(){var t,e,r,i=new a.a;function n(n){var s=n.geometry;if(void 0!==s)if(s.isGeometry){var a=s.vertices;for(e=0,r=a.length;e<r;e++)i.copy(a[e]),i.applyMatrix4(n.matrixWorld),t.expandByPoint(i)}else if(s.isBufferGeometry){var o=s.attributes.position;if(void 0!==o)for(e=0,r=o.count;e<r;e++)i.fromBufferAttribute(o,e).applyMatrix4(n.matrixWorld),t.expandByPoint(i)}}return function(e){return t=this,e.updateMatrixWorld(!0),e.traverse(n),this}}(),containsPoint:function(t){return!(t.x<this.min.x||t.x>this.max.x||t.y<this.min.y||t.y>this.max.y||t.z<this.min.z||t.z>this.max.z)},containsBox:function(t){return this.min.x<=t.min.x&&t.max.x<=this.max.x&&this.min.y<=t.min.y&&t.max.y<=this.max.y&&this.min.z<=t.min.z&&t.max.z<=this.max.z},getParameter:function(t,e){return void 0===e&&(console.warn("THREE.Box3: .getParameter() target is now required"),e=new a.a),e.set((t.x-this.min.x)/(this.max.x-this.min.x),(t.y-this.min.y)/(this.max.y-this.min.y),(t.z-this.min.z)/(this.max.z-this.min.z))},intersectsBox:function(t){return!(t.max.x<this.min.x||t.min.x>this.max.x||t.max.y<this.min.y||t.min.y>this.max.y||t.max.z<this.min.z||t.min.z>this.max.z)},intersectsSphere:(n=new a.a,function(t){return this.clampPoint(t.center,n),n.distanceToSquared(t.center)<=t.radius*t.radius}),intersectsPlane:function(t){var e,r;return t.normal.x>0?(e=t.normal.x*this.min.x,r=t.normal.x*this.max.x):(e=t.normal.x*this.max.x,r=t.normal.x*this.min.x),t.normal.y>0?(e+=t.normal.y*this.min.y,r+=t.normal.y*this.max.y):(e+=t.normal.y*this.max.y,r+=t.normal.y*this.min.y),t.normal.z>0?(e+=t.normal.z*this.min.z,r+=t.normal.z*this.max.z):(e+=t.normal.z*this.max.z,r+=t.normal.z*this.min.z),e<=-t.constant&&r>=-t.constant},intersectsTriangle:function(){var t=new a.a,e=new a.a,r=new a.a,i=new a.a,n=new a.a,s=new a.a,o=new a.a,u=new a.a,h=new a.a,d=new a.a;function c(i){var n,s;for(n=0,s=i.length-3;n<=s;n+=3){o.fromArray(i,n);var a=h.x*Math.abs(o.x)+h.y*Math.abs(o.y)+h.z*Math.abs(o.z),u=t.dot(o),d=e.dot(o),c=r.dot(o);if(Math.max(-Math.max(u,d,c),Math.min(u,d,c))>a)return!1}return!0}return function(a){if(this.isEmpty())return!1;this.getCenter(u),h.subVectors(this.max,u),t.subVectors(a.a,u),e.subVectors(a.b,u),r.subVectors(a.c,u),i.subVectors(e,t),n.subVectors(r,e),s.subVectors(t,r);var o=[0,-i.z,i.y,0,-n.z,n.y,0,-s.z,s.y,i.z,0,-i.x,n.z,0,-n.x,s.z,0,-s.x,-i.y,i.x,0,-n.y,n.x,0,-s.y,s.x,0];return!!c(o)&&(!!c(o=[1,0,0,0,1,0,0,0,1])&&(d.crossVectors(i,n),c(o=[d.x,d.y,d.z])))}}(),clampPoint:function(t,e){return void 0===e&&(console.warn("THREE.Box3: .clampPoint() target is now required"),e=new a.a),e.copy(t).clamp(this.min,this.max)},distanceToPoint:function(){var t=new a.a;return function(e){return t.copy(e).clamp(this.min,this.max).sub(e).length()}}(),getBoundingSphere:function(){var t=new a.a;return function(e){return void 0===e&&(console.warn("THREE.Box3: .getBoundingSphere() target is now required"),e=new o.a),this.getCenter(e.center),e.radius=.5*this.getSize(t).length(),e}}(),intersect:function(t){return this.min.max(t.min),this.max.min(t.max),this.isEmpty()&&this.makeEmpty(),this},union:function(t){return this.min.min(t.min),this.max.max(t.max),this},applyMatrix4:(i=[new a.a,new a.a,new a.a,new a.a,new a.a,new a.a,new a.a,new a.a],function(t){return this.isEmpty()?this:(i[0].set(this.min.x,this.min.y,this.min.z).applyMatrix4(t),i[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(t),i[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(t),i[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(t),i[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(t),i[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(t),i[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(t),i[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(t),this.setFromPoints(i),this)}),translate:function(t){return this.min.add(t),this.max.add(t),this},equals:function(t){return t.min.equals(this.min)&&t.max.equals(this.max)}})}}]); |
const config = require("../../config.json"), { app, oauth, guilds, channels, emojis } = require("../constants"), { appeals, appealbanned, tokenToUser } = require("../database"), fs = require("fs"), { join } = require("path");
const
formFile = fs.readFileSync(join(__dirname, "../web/appeals/form.html"), "utf8").replace(/{{PATH}}/g, config.appeal.path),
alertFile = fs.readFileSync(join(__dirname, "../web/appeals/alert.html"), "utf8").replace(/{{PATH}}/g, config.appeal.path);
const
caseEmojis = {
accept: "✅",
claim: "🎗",
reject: "❌",
reset: "♻",
trash: "🗑",
ban: "🔨"
}, caseColors = {
accepted: 0x2ecc71,
rejected: 0xe74c3c,
default: 0xf1c40f
};
module.exports = async client => {
const guild = client.guilds.cache.get(guilds.main), appealChannel = guild.channels.cache.get(channels.appeals);
app.get(config.appeal.path, async (req, res) => {
if (req.query.code) {
const { access_token } = await oauth.tokenRequest({
code: req.query.code,
scope: "identify email",
grantType: "authorization_code",
redirectUri: config.appeal.link
}).catch(e => { console.log(e); return {}; });
if (!access_token) return res.status(500).send(alertFile
.replace(/{{TITLE}}/g, "Failure")
.replace(/{{MESSAGE}}/g, "We could not verify you. Please try again.")
);
const user = await oauth.getUser(access_token), avatar = getAvatar(user, client);
if (await appealbanned.get(user.id)) return res.status(403).send(alertFile
.replace(/{{TITLE}}/g, "Forbidden")
.replace(/{{MESSAGE}}/g, "You have been banned from using the appeal system. Please contact BlurpleMail or [email protected] if you feel this was an error.")
);
tokenToUser.set(req.query.code, user);
return res.send(formFile
.replace(/{{ICON}}/g, guild.iconURL({ dynamic: false, format: "png", size: 64 }))
.replace(/{{AVATAR}}/g, avatar)
.replace(/{{TOKEN}}/g, req.query.code)
);
}
else if (req.query.token) {
console.log(req.query);
const user = await tokenToUser.get(req.query.token);
const embed = {
title: `${req.query.casetype.toUpperCase()} - #${req.query.caseid || "???"}`,
author: {
name: user ? `${user.username}#${user.discriminator} (${user.id})` : "Unknown User",
icon_url: user ? getAvatar(user, client) : null
},
fields: [
...[
req.query.caseid ? { name: "See case info", value: `\`!case ${req.query.caseid}\``, inline: true } : null,
user && user.id ? { name: "See all cases", value: `\`!cases ${user.id}\``, inline: true } : null,
user && user.email ? { name: "Email", value: user.email + "\n" + emojis.blank, inline: true } : null
].filter(f => f),
{
name: "User Statement",
value: req.query.statement + "\n" + emojis.blank
},
{
name: "Why should we appeal your punishment?",
value: req.query.reason + "\n" + emojis.blank
},
{
name: "Appeal log",
value: "No logs yet"
}
],
color: caseColors.default
};
const m = await appealChannel.send({ embed });
appeals.set(m.id, { user: user && user.id ? user.id : null, embed, content: "", log: [] });
if (!user) res.redirect(config.appeal.link + "?failure=1");
else {
tokenToUser.unset(req.query.token);
res.redirect(config.appeal.link + "?failure=0");
}
for (const emoji of Object.values(caseEmojis)) await m.react(emoji);
} else if (req.query.failure == "0") {
return res.send(alertFile
.replace(/{{TITLE}}/g, "Success!")
.replace(/{{MESSAGE}}/g, "We have received your appeal. If you're available on DMs thorugh the server, we will contact you via BlurpleMail. Otherwise, we will contact you via e-mail.")
);
} else if (req.query.failure) {
return res.send(alertFile
.replace(/{{TITLE}}/g, "Failure")
.replace(/{{MESSAGE}}/g, "An unknown error occurred from our end. Please contact BlurpleMail. If you're banned, contact [email protected] from the mail you tried to appeal with, and we will sort it out with you :)")
);
} else return res.redirect(`${client.options.http.api}/oauth2/authorize?client_id=${client.user.id}&redirect_uri=${encodeURI(config.appeal.link)}&response_type=code&scope=identify%20email`);
});
app.get("/appeal.css", (_, res) => res.sendFile(join(__dirname, "../web/appeals/appeal.css")));
client.on("messageReactionAdd", async (reaction, user) => {
if (reaction.message.partial) await reaction.message.fetch();
if (
reaction.message.channel.id == channels.appeals &&
!user.bot &&
reaction.message.author.id == client.user.id
) {
const appeal = await appeals.get(reaction.message.id);
if (appeal) {
let update = true;
appeal.content = "";
if (reaction.emoji.name == caseEmojis.accept) {
appeal.embed.color = caseColors.accepted;
appeal.log.push(`Accepted by ${user} (${user.id})`);
} else if (reaction.emoji.name == caseEmojis.claim) {
appeal.embed.color = caseColors.default;
appeal.content = user.toString();
appeal.log.push(`Claimed by ${user} (${user.id})`);
} else if (reaction.emoji.name == caseEmojis.reject) {
appeal.embed.color = caseColors.rejected;
appeal.log.push(`Rejected by ${user} (${user.id})`);
} else if (reaction.emoji.name == caseEmojis.reset) {
appeal.embed.color = caseColors.default;
appeal.log.push(`Reset by ${user} (${user.id})`);
} else if (reaction.emoji.name == caseEmojis.trash) {
update = false;
reaction.message.delete();
appeal.log.push(`Deleted by ${user} (${user.id})`);
} else if (reaction.emoji.name == caseEmojis.ban) {
update = false;
reaction.message.delete();
appeal.log.push(`Banned from appeals by ${user} (${user.id})`);
appealbanned.set(appeal.user, true);
}
appeals.set(reaction.message.id, appeal);
if (update) {
const e = JSON.parse(JSON.stringify(appeal.embed));
e.fields.find(f => f.name == "Appeal log").value = appeal.log.map(l => `• ${l}`).join("\n");
reaction.message.edit(appeal.content, { embed: e });
reaction.users.remove(user);
}
}
}
});
};
const getAvatar = (user, client) =>
user.avatar ?
`${client.options.http.cdn}/avatars/${user.id}/${user.avatar}.${user.avatar.startsWith("a_") ? "gif" : "png"}?size=64` :
`${client.options.http.cdn}/embed/avatars/${user.discriminator % 5}.png?size=64`; // user has no avatar |
'use strict';
var ctrlFactory = require('restitute').controller;
function listController() {
ctrlFactory.list.call(this, '/rest/admin/types');
this.controllerAction = function() {
this.jsonService(this.service('admin/types/list'));
};
}
listController.prototype = new ctrlFactory.list();
function getController() {
ctrlFactory.get.call(this, '/rest/admin/types/:id');
this.controllerAction = function() {
this.jsonService(this.service('admin/types/get'));
};
}
getController.prototype = new ctrlFactory.get();
function save() {
this.jsonService(this.service('admin/types/save'));
}
function createController() {
ctrlFactory.create.call(this, '/rest/admin/types');
this.controllerAction = save;
}
createController.prototype = new ctrlFactory.create();
function updateController() {
ctrlFactory.update.call(this, '/rest/admin/types/:id');
this.controllerAction = save;
}
updateController.prototype = new ctrlFactory.update();
function deleteController() {
ctrlFactory.delete.call(this, '/rest/admin/types/:id');
this.controllerAction = function() {
this.jsonService(this.service('admin/types/delete'));
};
}
deleteController.prototype = new ctrlFactory.delete();
exports = module.exports = [
listController,
getController,
createController,
updateController,
deleteController
];
|
/**
* Extend module's NODE_PATH
* HACK: temporary solution
*/
require('node-path')(module);
/**
* Module dependencies.
*/
var mongoose = require('mongoose');
var Comment = mongoose.model('Comment');
var utils = require('lib/utils');
var pluck = utils.pluck;
var config = require('lib/config');
var t = require('t-component');
var log = require('debug')('democracyos:db-api:comment');
var config = require('lib/config');
var notifier = require('notifier-client')(config.notifications);
var url = require('url');
/**
* Get all comments
*
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @return {Module} `comment` module
* @api public
*/
exports.all = function all(fn) {
log('Looking for all comments.')
Comment.find(function (err, comments) {
if (err) {
log('Found error %j', err);
return fn(err);
};
log('Delivering comments %j', pluck(comments, 'id'));
fn(null, comments);
});
return this;
};
/**
* Create comment for `proposal` by `author`
* with `text`
*
* @param {String} proposal to submit comment
* @param {Object} comment comment vars like `text` and `author`
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.create = function create(comment, fn) {
log('Creating new comment %j for %s %s', comment.text, comment.context, comment.reference);
var comment = new Comment(comment);
comment.save(function (err) {
if (err) {
log('Found error %s', err);
return fn(err);
};
comment.populate('author', function(err) {
if (err) {
log('Found error %s', err)
return fn(err);
};
log('Delivering comment %j', comment.id);
fn(null, comment);
});
});
};
/**
* Get comments for proposal
*
* @param {String} proposal to get comments from
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.getFor = function getFor(query, paging, fn) {
log('Looking for comments for %s %s', query.context, query.reference);
paging = paging || { page: 0, limit: config('comments per page'), sort: 'score', exclude_user: null };
Comment
.find(query)
.populate('author', 'id firstName lastName fullName email profilePictureUrl')
.sort(paging.sort || 'score')
.skip(paging.page * paging.limit)
.limit(paging.limit)
.exec(function(err, comments) {
if (err) {
log('Found error %j', err);
return fn(err);
};
log('Delivering comments %j', pluck(comments, 'id'));
fn(null, comments);
});
};
/**
* Get replies for comment
*
* @param {String} id
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.replies = function replies(id, fn) {
log('Looking for replies for comment %s', id);
Comment
.findOne({ _id: id })
.populate('replies.author', 'id firstName lastName fullName email profilePictureUrl')
.exec(function(err, comment) {
if (err) {
log('Found error %j', err);
return fn(err);
};
var replies = comment && comment.replies ? comment.replies : [];
log('Delivering replies %j', pluck(replies, 'id'));
fn(null, replies);
});
};
/**
* Reply to comment
*
* @param {String} commentId to attach reply
* @param {Object} reply object with params
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.reply = function reply(commentId, reply, fn) {
log('Looking for comment %s to reply with %j', commentId, reply);
Comment.findById(commentId, function(err, comment) {
if (err) {
log('Found error %j', err);
return fn(err);
};
log('Creating reply %j for comment %j', reply, comment);
var doc = comment.replies.create(reply);
comment.replies.push(doc);
comment.save(function(err, saved) {
if (err) {
log('Found error %j', err);
return fn(err);
};
comment
.populate('replies.author', 'id firstName lastName fullName email profilePictureUrl', function(err) {
if (err) {
log('Found error %j', err);
return fn(err);
};
if (notifier.enabled() && comment.author != reply.author.id) {
var lawUrl = url.format({
protocol: config('protocol')
, hostname: config('host')
, port: config('publicPort')
, pathname: '/law/' + comment.reference
});
var r = {
id: doc.id,
author: { id: reply.author.id },
text: reply.text
}
var c = {
id: comment.id,
author: { id: comment.author }
}
var event = 'reply-argument'
notifier.notify(event)
.to(reply.author.email)
.withData( { reply: r, comment: c, url: lawUrl} )
.send(function (err, data) {
if (err) {
log('Error when sending notification for event %s: %j', event, err);
return fn(err);
}
log('Delivering reply %s', doc.id);
return fn(null, doc);
})
} else {
log('Notifier is disabled: unable to send comment reply notification mail to author');
return fn(null, doc);
}
});
});
});
};
/**
* Edit a reply
*
* @param {Object} comment to attach reply
* @param {Object} reply object with params
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.editReply = function editReply(comment, reply, fn) {
log('Looking for comment %s to reply with %s', comment.id, reply.id);
reply.editedAt = Date.now();
Comment.update(
{ _id: comment.id, 'replies._id': reply.id },
{ $set: { 'replies.$.text': reply.text, 'replies.$.editedAt': reply.editedAt } },
function (err) {
if (err) {
log('Found error %j', err);
return fn(err);
}
log('Delivering reply %s', reply.id);
fn(null, reply);
});
};
/**
* Upvote comment
*
* @param {String} id
* @param {Citizen|ObjectId|String} citizen
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comment' list items found or `undefined`
* @api public
*/
exports.upvote = function upvote(id, citizen, fn) {
Comment.findById(id).populate('author').exec(function(err, comment) {
if (err) return log('Found error %s', err), fn(err);
if (comment.author.id == citizen.id) {
log('Author %s tried to vote their own comment %s', citizen.id, comment.id);
return fn(t('comments.score.not-allowed'), comment);
}
log('Upvoting comment %s', comment.id);
comment.vote(citizen, 'positive', function(err) {
if (err) return log('Found error %s', err), fn(err);
log('Delivering comment %s', comment.id);
fn(null, comment);
});
});
};
/**
* Downvote comment
*
* @param {String} id
* @param {Citizen|ObjectId|String} citizen
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.downvote = function downvote(id, citizen, fn) {
Comment.findById(id).populate('author').exec(function(err, comment) {
if (err) return log('Found error %s', err), fn(err);
if (comment.author.id == citizen.id) {
log('Author %s tried to vote their own comment %s', citizen.id, comment.id);
return fn(t('comments.score.not-allowed'), comment);
}
log('Downvoting comment %s', comment.id);
comment.vote(citizen, 'negative', function(err) {
if (err) return log('Found error %s', err), fn(err);
log('Delivering comment %s', comment.id);
fn(null, comment);
});
});
};
/**
* Flag comment as spam
*
* @param {String} id
* @param {Citizen|ObjectId|String} citizen
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comment' list items found or `undefined`
* @api public
*/
exports.flag = function flag(id, citizen, fn) {
Comment.findById(id, function(err, comment) {
if (err) return log('Found error %s', err), fn(err);
log('Upvoting comment %s', comment.id);
comment.flag(citizen, 'spam', function(err) {
if (err) return log('Found error %s', err), fn(err);
log('Delivering comment %s', comment.id);
fn(null, comment);
});
});
};
/**
* Unflag comment as spam
*
* @param {String} id
* @param {Citizen|ObjectId|String} citizen
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'comments' list items found or `undefined`
* @api public
*/
exports.unflag = function unflag(id, citizen, fn) {
Comment.findById(id, function(err, comment) {
if (err) return log('Found error %s', err), fn(err);
log('Downvoting comment %s', comment.id);
comment.unflag(citizen, function(err) {
if (err) return log('Found error %s', err), fn(err);
log('Delivering comment %s', comment.id);
fn(null, comment);
});
});
};
/**
* Edit comment
*
* @param {String} id
* @param {Citizen|ObjectId|String} citizen
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* @api public
*/
exports.edit = function edit(comment, fn) {
log('Updating comment %s', comment.id);
comment.save(function (err, comment) {
if (!err) return log('Updated comment %s', comment.id), fn(null, comment);
return log('Found error %s', err), fn(err);
});
return this;
};
/**
* Remove comment
*
* @param {String} id
* @param {Citizen|ObjectId|String} citizen
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* @api public
*/
exports.remove = function remove(comment, fn) {
comment.remove(function(err) {
if (err) return log('Found error %s', err), fn(err);
log('Comment %s removed', comment.id);
fn(null);
});
};
/**
* Search comment ratings
*
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'ratings', total rated comments or `undefined`
* @return {Module} `law` module
* @api public
*/
exports.ratings = function ratings(fn) {
log('Counting total rated comments');
Comment
.aggregate(
{ $unwind : "$votes" },
{ $group: { _id: "#votes", total: { $sum: 1 } } },
function (err, res) {
if (err) {
log('Found error: %j', err);
return fn(err);
}
if (!res[0]) return fn(null, 0);
var rated = res[0].total;
log('Found %d rated comments', rated);
fn(null, rated);
}
);
return this;
};
/**
* Total replies
*
* @param {Function} fn callback function
* - 'err' error found while process or `null`
* - 'replies', total comment replies or `undefined`
* @return {Module} `law` module
* @api public
*/
exports.totalReplies = function totalReplies(fn) {
log('Counting total comment replies');
Comment
.aggregate(
{ $unwind : "$replies" },
{ $group: { _id: "#replies", total: { $sum: 1 } } },
function (err, res) {
if (err) {
log('Found error: %j', err);
return fn(err);
}
if (!res[0]) return fn(null, 0);
var replies = res[0].total;
log('Found %d comment replies', replies);
fn(null, replies);
}
)
return this;
}; |
'use strict';
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var React = _interopDefault(require('react'));
var _extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
var objectWithoutProperties = function (obj, keys) {
var target = {};
for (var i in obj) {
if (keys.indexOf(i) >= 0) continue;
if (!Object.prototype.hasOwnProperty.call(obj, i)) continue;
target[i] = obj[i];
}
return target;
};
var ShieldHomeIcon = function ShieldHomeIcon(_ref) {
var _ref$color = _ref.color,
color = _ref$color === undefined ? 'currentColor' : _ref$color,
_ref$size = _ref.size,
size = _ref$size === undefined ? 24 : _ref$size,
children = _ref.children,
props = objectWithoutProperties(_ref, ['color', 'size', 'children']);
var className = 'mdi-icon ' + (props.className || '');
return React.createElement(
'svg',
_extends({}, props, { className: className, width: size, height: size, fill: color, viewBox: '0 0 24 24' }),
React.createElement('path', { d: 'M11,13H13V16H16V11H18L12,6L6,11H8V16H11V13M12,1L21,5V11C21,16.55 17.16,21.74 12,23C6.84,21.74 3,16.55 3,11V5L12,1Z' })
);
};
var ShieldHomeIcon$1 = React.memo ? React.memo(ShieldHomeIcon) : ShieldHomeIcon;
module.exports = ShieldHomeIcon$1;
|
import ethUtil from 'ethereumjs-util'
import React, { Component } from 'react'
import PropTypes from 'prop-types'
import { ENVIRONMENT_TYPE_NOTIFICATION } from '../../../../app/scripts/lib/enums'
import { getEnvironmentType } from '../../../../app/scripts/lib/util'
import ConfirmPageContainer, { ConfirmDetailRow } from '../../components/app/confirm-page-container'
import { isBalanceSufficient } from '../send/send.utils'
import { DEFAULT_ROUTE, CONFIRM_TRANSACTION_ROUTE } from '../../helpers/constants/routes'
import {
INSUFFICIENT_FUNDS_ERROR_KEY,
TRANSACTION_ERROR_KEY,
GAS_LIMIT_TOO_LOW_ERROR_KEY,
} from '../../helpers/constants/error-keys'
import { CONFIRMED_STATUS, DROPPED_STATUS } from '../../helpers/constants/transactions'
import UserPreferencedCurrencyDisplay from '../../components/app/user-preferenced-currency-display'
import { PRIMARY, SECONDARY } from '../../helpers/constants/common'
import { hexToDecimal } from '../../helpers/utils/conversions.util'
import AdvancedGasInputs from '../../components/app/gas-customization/advanced-gas-inputs'
import TextField from '../../components/ui/text-field'
export default class ConfirmTransactionBase extends Component {
static contextTypes = {
t: PropTypes.func,
metricsEvent: PropTypes.func,
}
static propTypes = {
// react-router props
history: PropTypes.object,
// Redux props
balance: PropTypes.string,
cancelTransaction: PropTypes.func,
cancelAllTransactions: PropTypes.func,
clearConfirmTransaction: PropTypes.func,
conversionRate: PropTypes.number,
fromAddress: PropTypes.string,
fromName: PropTypes.string,
hexTransactionAmount: PropTypes.string,
hexTransactionFee: PropTypes.string,
hexTransactionTotal: PropTypes.string,
isTxReprice: PropTypes.bool,
methodData: PropTypes.object,
nonce: PropTypes.string,
useNonceField: PropTypes.bool,
customNonceValue: PropTypes.string,
updateCustomNonce: PropTypes.func,
assetImage: PropTypes.string,
sendTransaction: PropTypes.func,
showCustomizeGasModal: PropTypes.func,
showTransactionConfirmedModal: PropTypes.func,
showRejectTransactionsConfirmationModal: PropTypes.func,
toAddress: PropTypes.string,
tokenData: PropTypes.object,
tokenProps: PropTypes.object,
toName: PropTypes.string,
toEns: PropTypes.string,
toNickname: PropTypes.string,
transactionStatus: PropTypes.string,
txData: PropTypes.object,
unapprovedTxCount: PropTypes.number,
currentNetworkUnapprovedTxs: PropTypes.object,
updateGasAndCalculate: PropTypes.func,
customGas: PropTypes.object,
// Component props
actionKey: PropTypes.string,
contentComponent: PropTypes.node,
dataComponent: PropTypes.node,
detailsComponent: PropTypes.node,
errorKey: PropTypes.string,
errorMessage: PropTypes.string,
primaryTotalTextOverride: PropTypes.oneOfType([PropTypes.string, PropTypes.node]),
secondaryTotalTextOverride: PropTypes.string,
hideData: PropTypes.bool,
hideDetails: PropTypes.bool,
hideSubtitle: PropTypes.bool,
identiconAddress: PropTypes.string,
onCancel: PropTypes.func,
onEdit: PropTypes.func,
onEditGas: PropTypes.func,
onSubmit: PropTypes.func,
setMetaMetricsSendCount: PropTypes.func,
metaMetricsSendCount: PropTypes.number,
subtitle: PropTypes.string,
subtitleComponent: PropTypes.node,
summaryComponent: PropTypes.node,
title: PropTypes.string,
titleComponent: PropTypes.node,
valid: PropTypes.bool,
warning: PropTypes.string,
advancedInlineGasShown: PropTypes.bool,
insufficientBalance: PropTypes.bool,
hideFiatConversion: PropTypes.bool,
transactionCategory: PropTypes.string,
getNextNonce: PropTypes.func,
nextNonce: PropTypes.number,
tryReverseResolveAddress: PropTypes.func.isRequired,
hideSenderToRecipient: PropTypes.bool,
showAccountInHeader: PropTypes.bool,
}
state = {
submitting: false,
submitError: null,
submitWarning: '',
}
componentDidUpdate (prevProps) {
const {
transactionStatus,
showTransactionConfirmedModal,
history,
clearConfirmTransaction,
nextNonce,
customNonceValue,
toAddress,
tryReverseResolveAddress,
} = this.props
const {
customNonceValue: prevCustomNonceValue,
nextNonce: prevNextNonce,
toAddress: prevToAddress,
transactionStatus: prevTxStatus,
} = prevProps
const statusUpdated = transactionStatus !== prevTxStatus
const txDroppedOrConfirmed = transactionStatus === DROPPED_STATUS || transactionStatus === CONFIRMED_STATUS
if (nextNonce !== prevNextNonce || customNonceValue !== prevCustomNonceValue) {
if (customNonceValue > nextNonce) {
this.setState({ submitWarning: this.context.t('nextNonceWarning', [nextNonce]) })
} else {
this.setState({ submitWarning: '' })
}
}
if (statusUpdated && txDroppedOrConfirmed) {
showTransactionConfirmedModal({
onSubmit: () => {
clearConfirmTransaction()
history.push(DEFAULT_ROUTE)
},
})
}
if (toAddress && toAddress !== prevToAddress) {
tryReverseResolveAddress(toAddress)
}
}
getErrorKey () {
const {
balance,
conversionRate,
hexTransactionFee,
txData: {
simulationFails,
txParams: {
value: amount,
} = {},
} = {},
customGas,
} = this.props
const insufficientBalance = balance && !isBalanceSufficient({
amount,
gasTotal: hexTransactionFee || '0x0',
balance,
conversionRate,
})
if (insufficientBalance) {
return {
valid: false,
errorKey: INSUFFICIENT_FUNDS_ERROR_KEY,
}
}
if (hexToDecimal(customGas.gasLimit) < 21000) {
return {
valid: false,
errorKey: GAS_LIMIT_TOO_LOW_ERROR_KEY,
}
}
if (simulationFails) {
return {
valid: true,
errorKey: simulationFails.errorKey ? simulationFails.errorKey : TRANSACTION_ERROR_KEY,
}
}
return {
valid: true,
}
}
handleEditGas () {
const { onEditGas, showCustomizeGasModal, actionKey, txData: { origin }, methodData = {} } = this.props
this.context.metricsEvent({
eventOpts: {
category: 'Transactions',
action: 'Confirm Screen',
name: 'User clicks "Edit" on gas',
},
customVariables: {
recipientKnown: null,
functionType: actionKey || getMethodName(methodData.name) || 'contractInteraction',
origin,
},
})
if (onEditGas) {
onEditGas()
} else {
showCustomizeGasModal()
}
}
renderDetails () {
const {
detailsComponent,
primaryTotalTextOverride,
secondaryTotalTextOverride,
hexTransactionFee,
hexTransactionTotal,
hideDetails,
useNonceField,
customNonceValue,
updateCustomNonce,
advancedInlineGasShown,
customGas,
insufficientBalance,
updateGasAndCalculate,
hideFiatConversion,
nextNonce,
getNextNonce,
} = this.props
if (hideDetails) {
return null
}
return (
detailsComponent || (
<div className="confirm-page-container-content__details">
<div className="confirm-page-container-content__gas-fee">
<ConfirmDetailRow
label="Gas Fee"
value={hexTransactionFee}
headerText="Edit"
headerTextClassName="confirm-detail-row__header-text--edit"
onHeaderClick={() => this.handleEditGas()}
secondaryText={hideFiatConversion ? this.context.t('noConversionRateAvailable') : ''}
/>
{advancedInlineGasShown
? (
<AdvancedGasInputs
updateCustomGasPrice={(newGasPrice) => updateGasAndCalculate({ ...customGas, gasPrice: newGasPrice })}
updateCustomGasLimit={(newGasLimit) => updateGasAndCalculate({ ...customGas, gasLimit: newGasLimit })}
customGasPrice={customGas.gasPrice}
customGasLimit={customGas.gasLimit}
insufficientBalance={insufficientBalance}
customPriceIsSafe
isSpeedUp={false}
/>
)
: null
}
</div>
<div className={useNonceField ? 'confirm-page-container-content__gas-fee' : null}>
<ConfirmDetailRow
label="Total"
value={hexTransactionTotal}
primaryText={primaryTotalTextOverride}
secondaryText={hideFiatConversion ? this.context.t('noConversionRateAvailable') : secondaryTotalTextOverride}
headerText="Amount + Gas Fee"
headerTextClassName="confirm-detail-row__header-text--total"
primaryValueTextColor="#2f9ae0"
/>
</div>
{useNonceField ? (
<div>
<div className="confirm-detail-row">
<div className="confirm-detail-row__label">
{ this.context.t('nonceFieldHeading') }
</div>
<div className="custom-nonce-input">
<TextField
type="number"
min="0"
placeholder={ typeof nextNonce === 'number' ? nextNonce.toString() : null }
onChange={({ target: { value } }) => {
if (!value.length || Number(value) < 0) {
updateCustomNonce('')
} else {
updateCustomNonce(String(Math.floor(value)))
}
getNextNonce()
}}
fullWidth
margin="dense"
value={ customNonceValue || '' }
/>
</div>
</div>
</div>
) : null}
</div>
)
)
}
renderData (functionType) {
const { t } = this.context
const {
txData: {
txParams: {
data,
} = {},
} = {},
methodData: {
params,
} = {},
hideData,
dataComponent,
} = this.props
if (hideData) {
return null
}
return dataComponent || (
<div className="confirm-page-container-content__data">
<div className="confirm-page-container-content__data-box-label">
{`${t('functionType')}:`}
<span className="confirm-page-container-content__function-type">
{ functionType }
</span>
</div>
{
params && (
<div className="confirm-page-container-content__data-box">
<div className="confirm-page-container-content__data-field-label">
{ `${t('parameters')}:` }
</div>
<div>
<pre>{ JSON.stringify(params, null, 2) }</pre>
</div>
</div>
)
}
<div className="confirm-page-container-content__data-box-label">
{`${t('hexData')}: ${ethUtil.toBuffer(data).length} bytes`}
</div>
<div className="confirm-page-container-content__data-box">
{ data }
</div>
</div>
)
}
handleEdit () {
const { txData, tokenData, tokenProps, onEdit, actionKey, txData: { origin }, methodData = {} } = this.props
this.context.metricsEvent({
eventOpts: {
category: 'Transactions',
action: 'Confirm Screen',
name: 'Edit Transaction',
},
customVariables: {
recipientKnown: null,
functionType: actionKey || getMethodName(methodData.name) || 'contractInteraction',
origin,
},
})
onEdit({ txData, tokenData, tokenProps })
}
handleCancelAll () {
const {
cancelAllTransactions,
clearConfirmTransaction,
history,
showRejectTransactionsConfirmationModal,
unapprovedTxCount,
} = this.props
showRejectTransactionsConfirmationModal({
unapprovedTxCount,
onSubmit: async () => {
this._removeBeforeUnload()
await cancelAllTransactions()
clearConfirmTransaction()
history.push(DEFAULT_ROUTE)
},
})
}
handleCancel () {
const { metricsEvent } = this.context
const {
onCancel,
txData,
cancelTransaction,
history,
clearConfirmTransaction,
actionKey,
txData: { origin },
methodData = {},
updateCustomNonce,
} = this.props
this._removeBeforeUnload()
metricsEvent({
eventOpts: {
category: 'Transactions',
action: 'Confirm Screen',
name: 'Cancel',
},
customVariables: {
recipientKnown: null,
functionType: actionKey || getMethodName(methodData.name) || 'contractInteraction',
origin,
},
})
updateCustomNonce('')
if (onCancel) {
onCancel(txData)
} else {
cancelTransaction(txData)
.then(() => {
clearConfirmTransaction()
history.push(DEFAULT_ROUTE)
})
}
}
handleSubmit () {
const { metricsEvent } = this.context
const {
txData: { origin },
sendTransaction,
clearConfirmTransaction,
txData,
history,
onSubmit,
actionKey,
metaMetricsSendCount = 0,
setMetaMetricsSendCount,
methodData = {},
updateCustomNonce,
} = this.props
const { submitting } = this.state
if (submitting) {
return
}
this.setState({
submitting: true,
submitError: null,
}, () => {
this._removeBeforeUnload()
metricsEvent({
eventOpts: {
category: 'Transactions',
action: 'Confirm Screen',
name: 'Transaction Completed',
},
customVariables: {
recipientKnown: null,
functionType: actionKey || getMethodName(methodData.name) || 'contractInteraction',
origin,
},
})
setMetaMetricsSendCount(metaMetricsSendCount + 1)
.then(() => {
if (onSubmit) {
Promise.resolve(onSubmit(txData))
.then(() => {
this.setState({
submitting: false,
})
updateCustomNonce('')
})
} else {
sendTransaction(txData)
.then(() => {
clearConfirmTransaction()
this.setState({
submitting: false,
}, () => {
history.push(DEFAULT_ROUTE)
updateCustomNonce('')
})
})
.catch((error) => {
this.setState({
submitting: false,
submitError: error.message,
})
updateCustomNonce('')
})
}
})
})
}
renderTitleComponent () {
const { title, titleComponent, hexTransactionAmount } = this.props
// Title string passed in by props takes priority
if (title) {
return null
}
return titleComponent || (
<UserPreferencedCurrencyDisplay
value={hexTransactionAmount}
type={PRIMARY}
showEthLogo
ethLogoHeight="26"
hideLabel
/>
)
}
renderSubtitleComponent () {
const { subtitle, subtitleComponent, hexTransactionAmount } = this.props
// Subtitle string passed in by props takes priority
if (subtitle) {
return null
}
return subtitleComponent || (
<UserPreferencedCurrencyDisplay
value={hexTransactionAmount}
type={SECONDARY}
showEthLogo
hideLabel
/>
)
}
handleNextTx (txId) {
const { history, clearConfirmTransaction } = this.props
if (txId) {
clearConfirmTransaction()
history.push(`${CONFIRM_TRANSACTION_ROUTE}/${txId}`)
}
}
getNavigateTxData () {
const { currentNetworkUnapprovedTxs, txData: { id } = {} } = this.props
const enumUnapprovedTxs = Object.keys(currentNetworkUnapprovedTxs)
const currentPosition = enumUnapprovedTxs.indexOf(id ? id.toString() : '')
return {
totalTx: enumUnapprovedTxs.length,
positionOfCurrentTx: currentPosition + 1,
nextTxId: enumUnapprovedTxs[currentPosition + 1],
prevTxId: enumUnapprovedTxs[currentPosition - 1],
showNavigation: enumUnapprovedTxs.length > 1,
firstTx: enumUnapprovedTxs[0],
lastTx: enumUnapprovedTxs[enumUnapprovedTxs.length - 1],
ofText: this.context.t('ofTextNofM'),
requestsWaitingText: this.context.t('requestsAwaitingAcknowledgement'),
}
}
_beforeUnload = () => {
const { txData: { origin, id } = {}, cancelTransaction } = this.props
const { metricsEvent } = this.context
metricsEvent({
eventOpts: {
category: 'Transactions',
action: 'Confirm Screen',
name: 'Cancel Tx Via Notification Close',
},
customVariables: {
origin,
},
})
cancelTransaction({ id })
}
_removeBeforeUnload = () => {
if (getEnvironmentType() === ENVIRONMENT_TYPE_NOTIFICATION) {
window.removeEventListener('beforeunload', this._beforeUnload)
}
}
componentDidMount () {
const { toAddress, txData: { origin } = {}, getNextNonce, tryReverseResolveAddress } = this.props
const { metricsEvent } = this.context
metricsEvent({
eventOpts: {
category: 'Transactions',
action: 'Confirm Screen',
name: 'Confirm: Started',
},
customVariables: {
origin,
},
})
if (getEnvironmentType() === ENVIRONMENT_TYPE_NOTIFICATION) {
window.addEventListener('beforeunload', this._beforeUnload)
}
getNextNonce()
if (toAddress) {
tryReverseResolveAddress(toAddress)
}
}
componentWillUnmount () {
this._removeBeforeUnload()
}
render () {
const { t } = this.context
const {
isTxReprice,
fromName,
fromAddress,
toName,
toAddress,
toEns,
toNickname,
methodData,
valid: propsValid = true,
errorMessage,
errorKey: propsErrorKey,
title,
subtitle,
hideSubtitle,
identiconAddress,
summaryComponent,
contentComponent,
onEdit,
nonce,
customNonceValue,
assetImage,
warning,
unapprovedTxCount,
transactionCategory,
hideSenderToRecipient,
showAccountInHeader,
} = this.props
const { submitting, submitError, submitWarning } = this.state
const { name } = methodData
const { valid, errorKey } = this.getErrorKey()
const { totalTx, positionOfCurrentTx, nextTxId, prevTxId, showNavigation, firstTx, lastTx, ofText, requestsWaitingText } = this.getNavigateTxData()
let functionType = getMethodName(name)
if (!functionType) {
if (transactionCategory) {
functionType = t(transactionCategory) || transactionCategory
} else {
functionType = t('contractInteraction')
}
}
return (
<ConfirmPageContainer
fromName={fromName}
fromAddress={fromAddress}
showAccountInHeader={showAccountInHeader}
toName={toName}
toAddress={toAddress}
toEns={toEns}
toNickname={toNickname}
showEdit={onEdit && !isTxReprice}
action={functionType}
title={title}
titleComponent={this.renderTitleComponent()}
subtitle={subtitle}
subtitleComponent={this.renderSubtitleComponent()}
hideSubtitle={hideSubtitle}
summaryComponent={summaryComponent}
detailsComponent={this.renderDetails()}
dataComponent={this.renderData(functionType)}
contentComponent={contentComponent}
nonce={customNonceValue || nonce}
unapprovedTxCount={unapprovedTxCount}
assetImage={assetImage}
identiconAddress={identiconAddress}
errorMessage={errorMessage || submitError}
errorKey={propsErrorKey || errorKey}
warning={warning || submitWarning}
totalTx={totalTx}
positionOfCurrentTx={positionOfCurrentTx}
nextTxId={nextTxId}
prevTxId={prevTxId}
showNavigation={showNavigation}
onNextTx={(txId) => this.handleNextTx(txId)}
firstTx={firstTx}
lastTx={lastTx}
ofText={ofText}
requestsWaitingText={requestsWaitingText}
disabled={!propsValid || !valid || submitting}
onEdit={() => this.handleEdit()}
onCancelAll={() => this.handleCancelAll()}
onCancel={() => this.handleCancel()}
onSubmit={() => this.handleSubmit()}
hideSenderToRecipient={hideSenderToRecipient}
/>
)
}
}
export function getMethodName (camelCase) {
if (!camelCase || typeof camelCase !== 'string') {
return ''
}
return camelCase
.replace(/([a-z])([A-Z])/g, '$1 $2')
.replace(/([A-Z])([a-z])/g, ' $1$2')
.replace(/ +/g, ' ')
}
|
import './index.css';
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
ReactDOM.render(
<App />,
document.getElementById('root')
); |
deepmacDetailCallback("988bad000000/24",[{"d":"2011-08-27","t":"add","a":"Ashford Mill\nStation Road\nFordingbridge Hampshire SP6 1DZ\n","c":"UNITED KINGDOM","o":"Corintech Ltd."},{"d":"2015-08-27","t":"change","a":"Ashford Mill Fordingbridge Hampshire GB SP6 1DZ","c":"GB","o":"Corintech Ltd."}]);
|
jest.mock('./../../../src/infrastructure/config', () => require('./../../utils').configMockFactory());
jest.mock('./../../../src/infrastructure/logger', () => require('./../../utils').loggerMockFactory());
jest.mock('login.dfe.policy-engine');
jest.mock('./../../../src/infrastructure/organisations');
jest.mock('./../../../src/infrastructure/applications', () => {
return {
getServiceById: jest.fn(),
};
});
jest.mock('./../../../src/infrastructure/access', () => {
return {
getSingleUserService: jest.fn(),
getSingleInvitationService: jest.fn(),
};
});
const { getRequestMock, getResponseMock } = require('./../../utils');
const { getServiceById } = require('./../../../src/infrastructure/applications');
const { getSingleUserService, getSingleInvitationService } = require('./../../../src/infrastructure/access');
const { getUserOrganisations, getInvitationOrganisations } = require('./../../../src/infrastructure/organisations');
const PolicyEngine = require('login.dfe.policy-engine');
const policyEngine = {
getPolicyApplicationResultsForUser: jest.fn(),
};
const res = getResponseMock();
describe('when displaying the associate roles view', () => {
let req;
let getAssociateRoles;
beforeEach(() => {
req = getRequestMock({
params: {
uid: 'user1',
orgId: '88a1ed39-5a98-43da-b66e-78e564ea72b0',
sid: 'service1',
},
session: {
user: {
email: '[email protected]',
firstName: 'test',
lastName: 'name',
services: [
{
serviceId: 'service1',
roles: [],
}
],
isAddService: true
},
},
});
res.mockResetAll();
getUserOrganisations.mockReset();
getUserOrganisations.mockReturnValue([
{
organisation: {
id: '88a1ed39-5a98-43da-b66e-78e564ea72b0',
name: 'Great Big School'
},
},
{
organisation: {
id: 'fe68a9f4-a995-4d74-aa4b-e39e0e88c15d',
name: 'Little Tiny School'
},
},
]);
getInvitationOrganisations.mockReset();
getInvitationOrganisations.mockReturnValue([
{
organisation: {
id: '88a1ed39-5a98-43da-b66e-78e564ea72b0',
name: 'Great Big School'
},
},
{
organisation: {
id: 'fe68a9f4-a995-4d74-aa4b-e39e0e88c15d',
name: 'Little Tiny School'
},
},
]);
getSingleUserService.mockReset();
getSingleUserService.mockReturnValue({
id: 'service1',
name: 'service name',
roles: [],
});
getSingleInvitationService.mockReset();
getSingleInvitationService.mockReturnValue({
id: 'service1',
name: 'service name',
roles: [],
});
getServiceById.mockReset();
getServiceById.mockReturnValue({
id: 'service1',
name: 'service name'
});
policyEngine.getPolicyApplicationResultsForUser.mockReset().mockReturnValue({
rolesAvailableToUser: [],
});
PolicyEngine.mockReset().mockImplementation(() => policyEngine);
getAssociateRoles = require('./../../../src/app/users/associateRoles').get;
});
it('then it should return the associate roles view', async () => {
await getAssociateRoles(req, res);
expect(res.render.mock.calls.length).toBe(1);
expect(res.render.mock.calls[0][0]).toBe('users/views/associateRoles');
});
it('then it should include csrf token', async () => {
await getAssociateRoles(req, res);
expect(res.render.mock.calls[0][1]).toMatchObject({
csrfToken: 'token',
});
});
it('then it should include the organisation details for a user if request of user', async () => {
await getAssociateRoles(req, res);
expect(getUserOrganisations.mock.calls).toHaveLength(1);
expect(getUserOrganisations.mock.calls[0][0]).toBe('user1');
expect(getUserOrganisations.mock.calls[0][1]).toBe('correlationId');
expect(res.render.mock.calls[0][1]).toMatchObject({
organisationDetails: {
organisation: {
id: '88a1ed39-5a98-43da-b66e-78e564ea72b0',
name: 'Great Big School'
},
},
});
});
it('then it should include the organisation details for a invitation if request of invitation', async () => {
req.params.uid = 'inv-invitation1';
await getAssociateRoles(req, res);
expect(getInvitationOrganisations.mock.calls).toHaveLength(1);
expect(getInvitationOrganisations.mock.calls[0][0]).toBe('invitation1');
expect(getInvitationOrganisations.mock.calls[0][1]).toBe('correlationId');
expect(res.render.mock.calls[0][1]).toMatchObject({
organisationDetails: {
organisation: {
id: '88a1ed39-5a98-43da-b66e-78e564ea72b0',
name: 'Great Big School'
},
},
});
});
it('then it should include the number of selected services', async () => {
await getAssociateRoles(req, res);
expect(res.render.mock.calls[0][1]).toMatchObject({
totalNumberOfServices: req.session.user.services.length,
});
});
it('then it should include the current service', async () => {
await getAssociateRoles(req, res);
expect(res.render.mock.calls[0][1]).toMatchObject({
currentService: 1,
});
});
it('then it should get the service details', async () => {
await getAssociateRoles(req, res);
expect(getServiceById.mock.calls).toHaveLength(1);
expect(getServiceById.mock.calls[0][0]).toBe('service1');
});
it('then it should get current users roles if editing service', async () => {
req.session.user.isAddService = false;
await getAssociateRoles(req, res);
expect(getSingleUserService.mock.calls).toHaveLength(1);
expect(getSingleUserService.mock.calls[0][0]).toBe('user1');
expect(getSingleUserService.mock.calls[0][1]).toBe('service1');
expect(getSingleUserService.mock.calls[0][2]).toBe('88a1ed39-5a98-43da-b66e-78e564ea72b0');
expect(getSingleUserService.mock.calls[0][3]).toBe('correlationId');
});
it('then it should get current invitations roles if editing service', async () => {
req.session.user.isAddService = false;
req.params.uid = 'inv-invitation1';
await getAssociateRoles(req, res);
expect(getSingleInvitationService.mock.calls).toHaveLength(1);
expect(getSingleInvitationService.mock.calls[0][0]).toBe('invitation1');
expect(getSingleInvitationService.mock.calls[0][1]).toBe('service1');
expect(getSingleInvitationService.mock.calls[0][2]).toBe('88a1ed39-5a98-43da-b66e-78e564ea72b0');
expect(getSingleInvitationService.mock.calls[0][3]).toBe('correlationId');
});
});
|
# https://leetcode.com/problems/cousins-in-binary-tree/
class Solution:
def isCousins(self, root: Optional[TreeNode], x: int, y: int) -> bool:
queue = deque()
queue.append(root)
parent = {}
found = 0
while queue:
levelSize = len(queue)
for _ in range(levelSize):
cur = queue.popleft()
if cur.val == x or cur.val == y:
found += 1
if cur.left:
parent[cur.left.val] = cur
queue.append(cur.left)
if cur.right:
parent[cur.right.val] = cur
queue.append(cur.right)
if found == 2:
return parent[x] != parent[y]
elif found > 0:
return False
return False
|
#5
def isAbundant(input_no):
#6
total = 0
#7
for i in range(1,input_no):
#8
if(input_no % i == 0):
#9
total = total + i
if(total > input_no):
return True
#10
if(total > input_no):
return True
else :
return False
#1
no = 1
while(True):
#2
if(isAbundant(no) and no%2 != 0):
#3
print("Odd abundant no : ",no)
break
#4
no += 1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
__author__ = 'Guy Kisel'
__email__ = '[email protected]'
__version__ = '4.2.0'
|
import React, { useEffect, useContext, useState } from "react";
import { Context } from "../../Context";
import { likedByAuthor } from "../../ApiUtils";
import LikeComponent from "../Likes/LikeComponent";
const LikedByAuthor = (props) => {
const context = useContext(Context);
const [likes, updateLikes] = useState([]);
useEffect(() => {
if (context.user) {
getLikedByAuthor();
}
}, []);
const getLikedByAuthor = async () => {
try {
const response = await likedByAuthor(context.cookie, context.user);
updateLikes(response.data.items);
} catch (err) {
props.updateError(err);
}
};
return (
<div>
{likes.map((like, index) => {
return (
<div key={index}>
<LikeComponent contents={like} />
</div>
);
})}
</div>
);
};
export default LikedByAuthor;
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Charliecloud(MakefilePackage):
"""Lightweight user-defined software stacks for HPC."""
homepage = "https://hpc.github.io/charliecloud"
url = "https://github.com/hpc/charliecloud/releases/download/v0.9.10/charliecloud-0.9.10.tar.gz"
git = "https://github.com/hpc/charliecloud.git"
version('master', branch='master')
version('0.12', sha256='8a90f33406905cee935b5673a1159232b0b71845f4b6a26d28ca88f5d3f55891')
version('0.11', sha256='942d3c7a74c978fd7420cb2b255e618f4f0acaafb6025160bc3a4deeb687ef3c')
version('0.10', sha256='5cf00b170e7568750ca0b828c43c0857c39674860b480d757057450d69f1a21e')
version('0.9.10', sha256='44e821b62f9c447749d3ed0d2b2e44d374153058814704a5543e83f42db2a45a')
version('0.9.9', sha256='2624c5a0b19a01c9bca0acf873ceeaec401b9185a23e9108fadbcee0b9d74736')
version('0.9.8', sha256='903bcce05b19501b5524ef57a929d2f4c6ddeacb0e8443fcb2fe6963e2f29229')
version('0.9.7', sha256='ec80a4b9bef3a2161a783e11d99cc58e09a32dfbc8a6234c8f7ce7fa76e2f62d')
version('0.9.6', sha256='50e20d5e2a3710cd06e7c999db22495b07ef0fb15ffbc0af3bccac5387f0fddb')
version('0.9.3', sha256='f1bf032377b8845bc9a93b8a4fad6386161e35900223c0acc61d1f3aa3a87bc7')
version('0.9.2', sha256='8d0e4804d412beef720a66f886a0a78bce42f3269e880ebf11f602581f8047d4')
version('0.9.1', sha256='8e69150a271285da71ece7a09b48251ef6593f72207c5126741d9976aa737d95')
version('0.9.0', sha256='7e74cb16e31fd9d502198f7509bab14d1049ec68ba90b15e277e76f805db9458')
version('0.2.4', sha256='b9a8ff54b9d296e30b2cf5d64a7e732ad09e14b989645aaa5eee8a1dc7ee34e5')
depends_on('[email protected]:', type=('build', 'run'))
# experimental builder (ch-grow)
variant('builder', default=False, description='Bundle dependencies for unprivileged builder (ch-grow)')
depends_on('py-lark-parser', type='run', when='+builder')
depends_on('skopeo', type='run', when='+builder')
depends_on('umoci', type='run', when='+builder')
# man pages and html docs
variant('docs', default=False, description='Build man pages and html docs')
depends_on('rsync', type='build', when='+docs')
depends_on('py-sphinx', type='build', when='+docs')
depends_on('py-sphinx-rtd-theme', type='build', when='+docs')
# bash automated testing harness (bats)
depends_on('[email protected]', type='test')
def url_for_version(self, version):
if version >= Version('0.9.8'):
url = "https://github.com/hpc/charliecloud/releases/download/v{0}/charliecloud-{0}.tar.gz"
else:
url = "https://github.com/hpc/charliecloud/archive/v{0}.tar.gz"
return url.format(version)
@property
def install_targets(self):
return ['install', 'PREFIX=%s' % self.prefix]
|
/**
* Caches the passed value in the passed class instance, so that it doesn't have
* to be recomputed. Note that the `propertyName` must be the same as the getter,
* so that the getter is overwritten.
* @example
* ```js
* class Example {
* get myProperty() {
* const result = someExpensiveComputation();
* return cacheResult(this, `myProperty`, result);
* }
* }
* ```
* @see https://humanwhocodes.com/blog/2021/04/lazy-loading-property-pattern-javascript/
* @param {Object} classInstance The instance where the value should be cached.
* @param {String} propertyName The name of the property to find this value.
* @param {*} value The value to be cached.
* @returns {*} The cached value, for easier chaining.
*/
export default function cacheResult(classInstance, propertyName, value) {
Object.defineProperty(classInstance, propertyName, {
value,
writable: false,
configurable: false,
enumerable: false,
});
return value;
}
|
import { getApp, _getProvider, _registerComponent, registerVersion } from '@firebase/app';
import { Component } from '@firebase/component';
import { __awaiter, __generator, __spreadArray, __read, __values, __assign } from 'tslib';
import { ErrorFactory, FirebaseError } from '@firebase/util';
import { openDb } from 'idb';
var name = "@firebase/installations-exp";
var version = "0.0.900-exp.894b5da5a";
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var PENDING_TIMEOUT_MS = 10000;
var PACKAGE_VERSION = "w:" + version;
var INTERNAL_AUTH_VERSION = 'FIS_v2';
var INSTALLATIONS_API_URL = 'https://firebaseinstallations.googleapis.com/v1';
var TOKEN_EXPIRATION_BUFFER = 60 * 60 * 1000; // One hour
var SERVICE = 'installations';
var SERVICE_NAME = 'Installations';
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _a;
var ERROR_DESCRIPTION_MAP = (_a = {},
_a["missing-app-config-values" /* MISSING_APP_CONFIG_VALUES */] = 'Missing App configuration value: "{$valueName}"',
_a["not-registered" /* NOT_REGISTERED */] = 'Firebase Installation is not registered.',
_a["installation-not-found" /* INSTALLATION_NOT_FOUND */] = 'Firebase Installation not found.',
_a["request-failed" /* REQUEST_FAILED */] = '{$requestName} request failed with error "{$serverCode} {$serverStatus}: {$serverMessage}"',
_a["app-offline" /* APP_OFFLINE */] = 'Could not process request. Application offline.',
_a["delete-pending-registration" /* DELETE_PENDING_REGISTRATION */] = "Can't delete installation while there is a pending registration request.",
_a);
var ERROR_FACTORY = new ErrorFactory(SERVICE, SERVICE_NAME, ERROR_DESCRIPTION_MAP);
/** Returns true if error is a FirebaseError that is based on an error from the server. */
function isServerError(error) {
return (error instanceof FirebaseError &&
error.code.includes("request-failed" /* REQUEST_FAILED */));
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function getInstallationsEndpoint(_a) {
var projectId = _a.projectId;
return INSTALLATIONS_API_URL + "/projects/" + projectId + "/installations";
}
function extractAuthTokenInfoFromResponse(response) {
return {
token: response.token,
requestStatus: 2 /* COMPLETED */,
expiresIn: getExpiresInFromResponseExpiresIn(response.expiresIn),
creationTime: Date.now()
};
}
function getErrorFromResponse(requestName, response) {
return __awaiter(this, void 0, void 0, function () {
var responseJson, errorData;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, response.json()];
case 1:
responseJson = _a.sent();
errorData = responseJson.error;
return [2 /*return*/, ERROR_FACTORY.create("request-failed" /* REQUEST_FAILED */, {
requestName: requestName,
serverCode: errorData.code,
serverMessage: errorData.message,
serverStatus: errorData.status
})];
}
});
});
}
function getHeaders(_a) {
var apiKey = _a.apiKey;
return new Headers({
'Content-Type': 'application/json',
Accept: 'application/json',
'x-goog-api-key': apiKey
});
}
function getHeadersWithAuth(appConfig, _a) {
var refreshToken = _a.refreshToken;
var headers = getHeaders(appConfig);
headers.append('Authorization', getAuthorizationHeader(refreshToken));
return headers;
}
/**
* Calls the passed in fetch wrapper and returns the response.
* If the returned response has a status of 5xx, re-runs the function once and
* returns the response.
*/
function retryIfServerError(fn) {
return __awaiter(this, void 0, void 0, function () {
var result;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, fn()];
case 1:
result = _a.sent();
if (result.status >= 500 && result.status < 600) {
// Internal Server Error. Retry request.
return [2 /*return*/, fn()];
}
return [2 /*return*/, result];
}
});
});
}
function getExpiresInFromResponseExpiresIn(responseExpiresIn) {
// This works because the server will never respond with fractions of a second.
return Number(responseExpiresIn.replace('s', '000'));
}
function getAuthorizationHeader(refreshToken) {
return INTERNAL_AUTH_VERSION + " " + refreshToken;
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function createInstallationRequest(appConfig, _a) {
var fid = _a.fid;
return __awaiter(this, void 0, void 0, function () {
var endpoint, headers, body, request, response, responseValue, registeredInstallationEntry;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
endpoint = getInstallationsEndpoint(appConfig);
headers = getHeaders(appConfig);
body = {
fid: fid,
authVersion: INTERNAL_AUTH_VERSION,
appId: appConfig.appId,
sdkVersion: PACKAGE_VERSION
};
request = {
method: 'POST',
headers: headers,
body: JSON.stringify(body)
};
return [4 /*yield*/, retryIfServerError(function () { return fetch(endpoint, request); })];
case 1:
response = _b.sent();
if (!response.ok) return [3 /*break*/, 3];
return [4 /*yield*/, response.json()];
case 2:
responseValue = _b.sent();
registeredInstallationEntry = {
fid: responseValue.fid || fid,
registrationStatus: 2 /* COMPLETED */,
refreshToken: responseValue.refreshToken,
authToken: extractAuthTokenInfoFromResponse(responseValue.authToken)
};
return [2 /*return*/, registeredInstallationEntry];
case 3: return [4 /*yield*/, getErrorFromResponse('Create Installation', response)];
case 4: throw _b.sent();
}
});
});
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Returns a promise that resolves after given time passes. */
function sleep(ms) {
return new Promise(function (resolve) {
setTimeout(resolve, ms);
});
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function bufferToBase64UrlSafe(array) {
var b64 = btoa(String.fromCharCode.apply(String, __spreadArray([], __read(array))));
return b64.replace(/\+/g, '-').replace(/\//g, '_');
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var VALID_FID_PATTERN = /^[cdef][\w-]{21}$/;
var INVALID_FID = '';
/**
* Generates a new FID using random values from Web Crypto API.
* Returns an empty string if FID generation fails for any reason.
*/
function generateFid() {
try {
// A valid FID has exactly 22 base64 characters, which is 132 bits, or 16.5
// bytes. our implementation generates a 17 byte array instead.
var fidByteArray = new Uint8Array(17);
var crypto_1 = self.crypto || self.msCrypto;
crypto_1.getRandomValues(fidByteArray);
// Replace the first 4 random bits with the constant FID header of 0b0111.
fidByteArray[0] = 112 + (fidByteArray[0] % 16);
var fid = encode(fidByteArray);
return VALID_FID_PATTERN.test(fid) ? fid : INVALID_FID;
}
catch (_a) {
// FID generation errored
return INVALID_FID;
}
}
/** Converts a FID Uint8Array to a base64 string representation. */
function encode(fidByteArray) {
var b64String = bufferToBase64UrlSafe(fidByteArray);
// Remove the 23rd character that was added because of the extra 4 bits at the
// end of our 17 byte array, and the '=' padding.
return b64String.substr(0, 22);
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Returns a string key that can be used to identify the app. */
function getKey(appConfig) {
return appConfig.appName + "!" + appConfig.appId;
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var fidChangeCallbacks = new Map();
/**
* Calls the onIdChange callbacks with the new FID value, and broadcasts the
* change to other tabs.
*/
function fidChanged(appConfig, fid) {
var key = getKey(appConfig);
callFidChangeCallbacks(key, fid);
broadcastFidChange(key, fid);
}
function addCallback(appConfig, callback) {
// Open the broadcast channel if it's not already open,
// to be able to listen to change events from other tabs.
getBroadcastChannel();
var key = getKey(appConfig);
var callbackSet = fidChangeCallbacks.get(key);
if (!callbackSet) {
callbackSet = new Set();
fidChangeCallbacks.set(key, callbackSet);
}
callbackSet.add(callback);
}
function removeCallback(appConfig, callback) {
var key = getKey(appConfig);
var callbackSet = fidChangeCallbacks.get(key);
if (!callbackSet) {
return;
}
callbackSet.delete(callback);
if (callbackSet.size === 0) {
fidChangeCallbacks.delete(key);
}
// Close broadcast channel if there are no more callbacks.
closeBroadcastChannel();
}
function callFidChangeCallbacks(key, fid) {
var e_1, _a;
var callbacks = fidChangeCallbacks.get(key);
if (!callbacks) {
return;
}
try {
for (var callbacks_1 = __values(callbacks), callbacks_1_1 = callbacks_1.next(); !callbacks_1_1.done; callbacks_1_1 = callbacks_1.next()) {
var callback = callbacks_1_1.value;
callback(fid);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (callbacks_1_1 && !callbacks_1_1.done && (_a = callbacks_1.return)) _a.call(callbacks_1);
}
finally { if (e_1) throw e_1.error; }
}
}
function broadcastFidChange(key, fid) {
var channel = getBroadcastChannel();
if (channel) {
channel.postMessage({ key: key, fid: fid });
}
closeBroadcastChannel();
}
var broadcastChannel = null;
/** Opens and returns a BroadcastChannel if it is supported by the browser. */
function getBroadcastChannel() {
if (!broadcastChannel && 'BroadcastChannel' in self) {
broadcastChannel = new BroadcastChannel('[Firebase] FID Change');
broadcastChannel.onmessage = function (e) {
callFidChangeCallbacks(e.data.key, e.data.fid);
};
}
return broadcastChannel;
}
function closeBroadcastChannel() {
if (fidChangeCallbacks.size === 0 && broadcastChannel) {
broadcastChannel.close();
broadcastChannel = null;
}
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var DATABASE_NAME = 'firebase-installations-database';
var DATABASE_VERSION = 1;
var OBJECT_STORE_NAME = 'firebase-installations-store';
var dbPromise = null;
function getDbPromise() {
if (!dbPromise) {
dbPromise = openDb(DATABASE_NAME, DATABASE_VERSION, function (upgradeDB) {
// We don't use 'break' in this switch statement, the fall-through
// behavior is what we want, because if there are multiple versions between
// the old version and the current version, we want ALL the migrations
// that correspond to those versions to run, not only the last one.
// eslint-disable-next-line default-case
switch (upgradeDB.oldVersion) {
case 0:
upgradeDB.createObjectStore(OBJECT_STORE_NAME);
}
});
}
return dbPromise;
}
/** Assigns or overwrites the record for the given key with the given value. */
function set(appConfig, value) {
return __awaiter(this, void 0, void 0, function () {
var key, db, tx, objectStore, oldValue;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
key = getKey(appConfig);
return [4 /*yield*/, getDbPromise()];
case 1:
db = _a.sent();
tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
objectStore = tx.objectStore(OBJECT_STORE_NAME);
return [4 /*yield*/, objectStore.get(key)];
case 2:
oldValue = _a.sent();
return [4 /*yield*/, objectStore.put(value, key)];
case 3:
_a.sent();
return [4 /*yield*/, tx.complete];
case 4:
_a.sent();
if (!oldValue || oldValue.fid !== value.fid) {
fidChanged(appConfig, value.fid);
}
return [2 /*return*/, value];
}
});
});
}
/** Removes record(s) from the objectStore that match the given key. */
function remove(appConfig) {
return __awaiter(this, void 0, void 0, function () {
var key, db, tx;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
key = getKey(appConfig);
return [4 /*yield*/, getDbPromise()];
case 1:
db = _a.sent();
tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
return [4 /*yield*/, tx.objectStore(OBJECT_STORE_NAME).delete(key)];
case 2:
_a.sent();
return [4 /*yield*/, tx.complete];
case 3:
_a.sent();
return [2 /*return*/];
}
});
});
}
/**
* Atomically updates a record with the result of updateFn, which gets
* called with the current value. If newValue is undefined, the record is
* deleted instead.
* @return Updated value
*/
function update(appConfig, updateFn) {
return __awaiter(this, void 0, void 0, function () {
var key, db, tx, store, oldValue, newValue;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
key = getKey(appConfig);
return [4 /*yield*/, getDbPromise()];
case 1:
db = _a.sent();
tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
store = tx.objectStore(OBJECT_STORE_NAME);
return [4 /*yield*/, store.get(key)];
case 2:
oldValue = _a.sent();
newValue = updateFn(oldValue);
if (!(newValue === undefined)) return [3 /*break*/, 4];
return [4 /*yield*/, store.delete(key)];
case 3:
_a.sent();
return [3 /*break*/, 6];
case 4: return [4 /*yield*/, store.put(newValue, key)];
case 5:
_a.sent();
_a.label = 6;
case 6: return [4 /*yield*/, tx.complete];
case 7:
_a.sent();
if (newValue && (!oldValue || oldValue.fid !== newValue.fid)) {
fidChanged(appConfig, newValue.fid);
}
return [2 /*return*/, newValue];
}
});
});
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Updates and returns the InstallationEntry from the database.
* Also triggers a registration request if it is necessary and possible.
*/
function getInstallationEntry(appConfig) {
return __awaiter(this, void 0, void 0, function () {
var registrationPromise, installationEntry;
var _a;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, update(appConfig, function (oldEntry) {
var installationEntry = updateOrCreateInstallationEntry(oldEntry);
var entryWithPromise = triggerRegistrationIfNecessary(appConfig, installationEntry);
registrationPromise = entryWithPromise.registrationPromise;
return entryWithPromise.installationEntry;
})];
case 1:
installationEntry = _b.sent();
if (!(installationEntry.fid === INVALID_FID)) return [3 /*break*/, 3];
_a = {};
return [4 /*yield*/, registrationPromise];
case 2:
// FID generation failed. Waiting for the FID from the server.
return [2 /*return*/, (_a.installationEntry = _b.sent(), _a)];
case 3: return [2 /*return*/, {
installationEntry: installationEntry,
registrationPromise: registrationPromise
}];
}
});
});
}
/**
* Creates a new Installation Entry if one does not exist.
* Also clears timed out pending requests.
*/
function updateOrCreateInstallationEntry(oldEntry) {
var entry = oldEntry || {
fid: generateFid(),
registrationStatus: 0 /* NOT_STARTED */
};
return clearTimedOutRequest(entry);
}
/**
* If the Firebase Installation is not registered yet, this will trigger the
* registration and return an InProgressInstallationEntry.
*
* If registrationPromise does not exist, the installationEntry is guaranteed
* to be registered.
*/
function triggerRegistrationIfNecessary(appConfig, installationEntry) {
if (installationEntry.registrationStatus === 0 /* NOT_STARTED */) {
if (!navigator.onLine) {
// Registration required but app is offline.
var registrationPromiseWithError = Promise.reject(ERROR_FACTORY.create("app-offline" /* APP_OFFLINE */));
return {
installationEntry: installationEntry,
registrationPromise: registrationPromiseWithError
};
}
// Try registering. Change status to IN_PROGRESS.
var inProgressEntry = {
fid: installationEntry.fid,
registrationStatus: 1 /* IN_PROGRESS */,
registrationTime: Date.now()
};
var registrationPromise = registerInstallation(appConfig, inProgressEntry);
return { installationEntry: inProgressEntry, registrationPromise: registrationPromise };
}
else if (installationEntry.registrationStatus === 1 /* IN_PROGRESS */) {
return {
installationEntry: installationEntry,
registrationPromise: waitUntilFidRegistration(appConfig)
};
}
else {
return { installationEntry: installationEntry };
}
}
/** This will be executed only once for each new Firebase Installation. */
function registerInstallation(appConfig, installationEntry) {
return __awaiter(this, void 0, void 0, function () {
var registeredInstallationEntry, e_1;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
_a.trys.push([0, 2, , 7]);
return [4 /*yield*/, createInstallationRequest(appConfig, installationEntry)];
case 1:
registeredInstallationEntry = _a.sent();
return [2 /*return*/, set(appConfig, registeredInstallationEntry)];
case 2:
e_1 = _a.sent();
if (!(isServerError(e_1) && e_1.customData.serverCode === 409)) return [3 /*break*/, 4];
// Server returned a "FID can not be used" error.
// Generate a new ID next time.
return [4 /*yield*/, remove(appConfig)];
case 3:
// Server returned a "FID can not be used" error.
// Generate a new ID next time.
_a.sent();
return [3 /*break*/, 6];
case 4:
// Registration failed. Set FID as not registered.
return [4 /*yield*/, set(appConfig, {
fid: installationEntry.fid,
registrationStatus: 0 /* NOT_STARTED */
})];
case 5:
// Registration failed. Set FID as not registered.
_a.sent();
_a.label = 6;
case 6: throw e_1;
case 7: return [2 /*return*/];
}
});
});
}
/** Call if FID registration is pending in another request. */
function waitUntilFidRegistration(appConfig) {
return __awaiter(this, void 0, void 0, function () {
var entry, _a, installationEntry, registrationPromise;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, updateInstallationRequest(appConfig)];
case 1:
entry = _b.sent();
_b.label = 2;
case 2:
if (!(entry.registrationStatus === 1 /* IN_PROGRESS */)) return [3 /*break*/, 5];
// createInstallation request still in progress.
return [4 /*yield*/, sleep(100)];
case 3:
// createInstallation request still in progress.
_b.sent();
return [4 /*yield*/, updateInstallationRequest(appConfig)];
case 4:
entry = _b.sent();
return [3 /*break*/, 2];
case 5:
if (!(entry.registrationStatus === 0 /* NOT_STARTED */)) return [3 /*break*/, 7];
return [4 /*yield*/, getInstallationEntry(appConfig)];
case 6:
_a = _b.sent(), installationEntry = _a.installationEntry, registrationPromise = _a.registrationPromise;
if (registrationPromise) {
return [2 /*return*/, registrationPromise];
}
else {
// if there is no registrationPromise, entry is registered.
return [2 /*return*/, installationEntry];
}
case 7: return [2 /*return*/, entry];
}
});
});
}
/**
* Called only if there is a CreateInstallation request in progress.
*
* Updates the InstallationEntry in the DB based on the status of the
* CreateInstallation request.
*
* Returns the updated InstallationEntry.
*/
function updateInstallationRequest(appConfig) {
return update(appConfig, function (oldEntry) {
if (!oldEntry) {
throw ERROR_FACTORY.create("installation-not-found" /* INSTALLATION_NOT_FOUND */);
}
return clearTimedOutRequest(oldEntry);
});
}
function clearTimedOutRequest(entry) {
if (hasInstallationRequestTimedOut(entry)) {
return {
fid: entry.fid,
registrationStatus: 0 /* NOT_STARTED */
};
}
return entry;
}
function hasInstallationRequestTimedOut(installationEntry) {
return (installationEntry.registrationStatus === 1 /* IN_PROGRESS */ &&
installationEntry.registrationTime + PENDING_TIMEOUT_MS < Date.now());
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function generateAuthTokenRequest(_a, installationEntry) {
var appConfig = _a.appConfig, platformLoggerProvider = _a.platformLoggerProvider;
return __awaiter(this, void 0, void 0, function () {
var endpoint, headers, platformLogger, body, request, response, responseValue, completedAuthToken;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
endpoint = getGenerateAuthTokenEndpoint(appConfig, installationEntry);
headers = getHeadersWithAuth(appConfig, installationEntry);
platformLogger = platformLoggerProvider.getImmediate({
optional: true
});
if (platformLogger) {
headers.append('x-firebase-client', platformLogger.getPlatformInfoString());
}
body = {
installation: {
sdkVersion: PACKAGE_VERSION
}
};
request = {
method: 'POST',
headers: headers,
body: JSON.stringify(body)
};
return [4 /*yield*/, retryIfServerError(function () { return fetch(endpoint, request); })];
case 1:
response = _b.sent();
if (!response.ok) return [3 /*break*/, 3];
return [4 /*yield*/, response.json()];
case 2:
responseValue = _b.sent();
completedAuthToken = extractAuthTokenInfoFromResponse(responseValue);
return [2 /*return*/, completedAuthToken];
case 3: return [4 /*yield*/, getErrorFromResponse('Generate Auth Token', response)];
case 4: throw _b.sent();
}
});
});
}
function getGenerateAuthTokenEndpoint(appConfig, _a) {
var fid = _a.fid;
return getInstallationsEndpoint(appConfig) + "/" + fid + "/authTokens:generate";
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Returns a valid authentication token for the installation. Generates a new
* token if one doesn't exist, is expired or about to expire.
*
* Should only be called if the Firebase Installation is registered.
*/
function refreshAuthToken(installations, forceRefresh) {
if (forceRefresh === void 0) { forceRefresh = false; }
return __awaiter(this, void 0, void 0, function () {
var tokenPromise, entry, authToken, _a;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, update(installations.appConfig, function (oldEntry) {
if (!isEntryRegistered(oldEntry)) {
throw ERROR_FACTORY.create("not-registered" /* NOT_REGISTERED */);
}
var oldAuthToken = oldEntry.authToken;
if (!forceRefresh && isAuthTokenValid(oldAuthToken)) {
// There is a valid token in the DB.
return oldEntry;
}
else if (oldAuthToken.requestStatus === 1 /* IN_PROGRESS */) {
// There already is a token request in progress.
tokenPromise = waitUntilAuthTokenRequest(installations, forceRefresh);
return oldEntry;
}
else {
// No token or token expired.
if (!navigator.onLine) {
throw ERROR_FACTORY.create("app-offline" /* APP_OFFLINE */);
}
var inProgressEntry = makeAuthTokenRequestInProgressEntry(oldEntry);
tokenPromise = fetchAuthTokenFromServer(installations, inProgressEntry);
return inProgressEntry;
}
})];
case 1:
entry = _b.sent();
if (!tokenPromise) return [3 /*break*/, 3];
return [4 /*yield*/, tokenPromise];
case 2:
_a = _b.sent();
return [3 /*break*/, 4];
case 3:
_a = entry.authToken;
_b.label = 4;
case 4:
authToken = _a;
return [2 /*return*/, authToken];
}
});
});
}
/**
* Call only if FID is registered and Auth Token request is in progress.
*
* Waits until the current pending request finishes. If the request times out,
* tries once in this thread as well.
*/
function waitUntilAuthTokenRequest(installations, forceRefresh) {
return __awaiter(this, void 0, void 0, function () {
var entry, authToken;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, updateAuthTokenRequest(installations.appConfig)];
case 1:
entry = _a.sent();
_a.label = 2;
case 2:
if (!(entry.authToken.requestStatus === 1 /* IN_PROGRESS */)) return [3 /*break*/, 5];
// generateAuthToken still in progress.
return [4 /*yield*/, sleep(100)];
case 3:
// generateAuthToken still in progress.
_a.sent();
return [4 /*yield*/, updateAuthTokenRequest(installations.appConfig)];
case 4:
entry = _a.sent();
return [3 /*break*/, 2];
case 5:
authToken = entry.authToken;
if (authToken.requestStatus === 0 /* NOT_STARTED */) {
// The request timed out or failed in a different call. Try again.
return [2 /*return*/, refreshAuthToken(installations, forceRefresh)];
}
else {
return [2 /*return*/, authToken];
}
}
});
});
}
/**
* Called only if there is a GenerateAuthToken request in progress.
*
* Updates the InstallationEntry in the DB based on the status of the
* GenerateAuthToken request.
*
* Returns the updated InstallationEntry.
*/
function updateAuthTokenRequest(appConfig) {
return update(appConfig, function (oldEntry) {
if (!isEntryRegistered(oldEntry)) {
throw ERROR_FACTORY.create("not-registered" /* NOT_REGISTERED */);
}
var oldAuthToken = oldEntry.authToken;
if (hasAuthTokenRequestTimedOut(oldAuthToken)) {
return __assign(__assign({}, oldEntry), { authToken: { requestStatus: 0 /* NOT_STARTED */ } });
}
return oldEntry;
});
}
function fetchAuthTokenFromServer(installations, installationEntry) {
return __awaiter(this, void 0, void 0, function () {
var authToken, updatedInstallationEntry, e_1, updatedInstallationEntry;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
_a.trys.push([0, 3, , 8]);
return [4 /*yield*/, generateAuthTokenRequest(installations, installationEntry)];
case 1:
authToken = _a.sent();
updatedInstallationEntry = __assign(__assign({}, installationEntry), { authToken: authToken });
return [4 /*yield*/, set(installations.appConfig, updatedInstallationEntry)];
case 2:
_a.sent();
return [2 /*return*/, authToken];
case 3:
e_1 = _a.sent();
if (!(isServerError(e_1) &&
(e_1.customData.serverCode === 401 || e_1.customData.serverCode === 404))) return [3 /*break*/, 5];
// Server returned a "FID not found" or a "Invalid authentication" error.
// Generate a new ID next time.
return [4 /*yield*/, remove(installations.appConfig)];
case 4:
// Server returned a "FID not found" or a "Invalid authentication" error.
// Generate a new ID next time.
_a.sent();
return [3 /*break*/, 7];
case 5:
updatedInstallationEntry = __assign(__assign({}, installationEntry), { authToken: { requestStatus: 0 /* NOT_STARTED */ } });
return [4 /*yield*/, set(installations.appConfig, updatedInstallationEntry)];
case 6:
_a.sent();
_a.label = 7;
case 7: throw e_1;
case 8: return [2 /*return*/];
}
});
});
}
function isEntryRegistered(installationEntry) {
return (installationEntry !== undefined &&
installationEntry.registrationStatus === 2 /* COMPLETED */);
}
function isAuthTokenValid(authToken) {
return (authToken.requestStatus === 2 /* COMPLETED */ &&
!isAuthTokenExpired(authToken));
}
function isAuthTokenExpired(authToken) {
var now = Date.now();
return (now < authToken.creationTime ||
authToken.creationTime + authToken.expiresIn < now + TOKEN_EXPIRATION_BUFFER);
}
/** Returns an updated InstallationEntry with an InProgressAuthToken. */
function makeAuthTokenRequestInProgressEntry(oldEntry) {
var inProgressAuthToken = {
requestStatus: 1 /* IN_PROGRESS */,
requestTime: Date.now()
};
return __assign(__assign({}, oldEntry), { authToken: inProgressAuthToken });
}
function hasAuthTokenRequestTimedOut(authToken) {
return (authToken.requestStatus === 1 /* IN_PROGRESS */ &&
authToken.requestTime + PENDING_TIMEOUT_MS < Date.now());
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Creates a Firebase Installation if there isn't one for the app and
* returns the Installation ID.
* @param installations - The `Installations` instance.
*
* @public
*/
function getId(installations) {
return __awaiter(this, void 0, void 0, function () {
var installationsImpl, _a, installationEntry, registrationPromise;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
installationsImpl = installations;
return [4 /*yield*/, getInstallationEntry(installationsImpl.appConfig)];
case 1:
_a = _b.sent(), installationEntry = _a.installationEntry, registrationPromise = _a.registrationPromise;
if (registrationPromise) {
registrationPromise.catch(console.error);
}
else {
// If the installation is already registered, update the authentication
// token if needed.
refreshAuthToken(installationsImpl).catch(console.error);
}
return [2 /*return*/, installationEntry.fid];
}
});
});
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Returns an Installation auth token, identifying the current Firebase Installation.
* @param installations - The `Installations` instance.
* @param forceRefresh - Force refresh regardless of token expiration.
*
* @public
*/
function getToken(installations, forceRefresh) {
if (forceRefresh === void 0) { forceRefresh = false; }
return __awaiter(this, void 0, void 0, function () {
var installationsImpl, authToken;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
installationsImpl = installations;
return [4 /*yield*/, completeInstallationRegistration(installationsImpl.appConfig)];
case 1:
_a.sent();
return [4 /*yield*/, refreshAuthToken(installationsImpl, forceRefresh)];
case 2:
authToken = _a.sent();
return [2 /*return*/, authToken.token];
}
});
});
}
function completeInstallationRegistration(appConfig) {
return __awaiter(this, void 0, void 0, function () {
var registrationPromise;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, getInstallationEntry(appConfig)];
case 1:
registrationPromise = (_a.sent()).registrationPromise;
if (!registrationPromise) return [3 /*break*/, 3];
// A createInstallation request is in progress. Wait until it finishes.
return [4 /*yield*/, registrationPromise];
case 2:
// A createInstallation request is in progress. Wait until it finishes.
_a.sent();
_a.label = 3;
case 3: return [2 /*return*/];
}
});
});
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function deleteInstallationRequest(appConfig, installationEntry) {
return __awaiter(this, void 0, void 0, function () {
var endpoint, headers, request, response;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
endpoint = getDeleteEndpoint(appConfig, installationEntry);
headers = getHeadersWithAuth(appConfig, installationEntry);
request = {
method: 'DELETE',
headers: headers
};
return [4 /*yield*/, retryIfServerError(function () { return fetch(endpoint, request); })];
case 1:
response = _a.sent();
if (!!response.ok) return [3 /*break*/, 3];
return [4 /*yield*/, getErrorFromResponse('Delete Installation', response)];
case 2: throw _a.sent();
case 3: return [2 /*return*/];
}
});
});
}
function getDeleteEndpoint(appConfig, _a) {
var fid = _a.fid;
return getInstallationsEndpoint(appConfig) + "/" + fid;
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Deletes the Firebase Installation and all associated data.
* @param installations - The `Installations` instance.
*
* @public
*/
function deleteInstallations(installations) {
return __awaiter(this, void 0, void 0, function () {
var appConfig, entry;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
appConfig = installations.appConfig;
return [4 /*yield*/, update(appConfig, function (oldEntry) {
if (oldEntry && oldEntry.registrationStatus === 0 /* NOT_STARTED */) {
// Delete the unregistered entry without sending a deleteInstallation request.
return undefined;
}
return oldEntry;
})];
case 1:
entry = _a.sent();
if (!entry) return [3 /*break*/, 6];
if (!(entry.registrationStatus === 1 /* IN_PROGRESS */)) return [3 /*break*/, 2];
// Can't delete while trying to register.
throw ERROR_FACTORY.create("delete-pending-registration" /* DELETE_PENDING_REGISTRATION */);
case 2:
if (!(entry.registrationStatus === 2 /* COMPLETED */)) return [3 /*break*/, 6];
if (!!navigator.onLine) return [3 /*break*/, 3];
throw ERROR_FACTORY.create("app-offline" /* APP_OFFLINE */);
case 3: return [4 /*yield*/, deleteInstallationRequest(appConfig, entry)];
case 4:
_a.sent();
return [4 /*yield*/, remove(appConfig)];
case 5:
_a.sent();
_a.label = 6;
case 6: return [2 /*return*/];
}
});
});
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Sets a new callback that will get called when Installation ID changes.
* Returns an unsubscribe function that will remove the callback when called.
* @param installations - The `Installations` instance.
* @param callback - The callback function that is invoked when FID changes.
* @returns A function that can be called to unsubscribe.
*
* @public
*/
function onIdChange(installations, callback) {
var appConfig = installations.appConfig;
addCallback(appConfig, callback);
return function () {
removeCallback(appConfig, callback);
};
}
/**
* @license
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Returns an instance of FirebaseInstallations associated with the given FirebaseApp instance.
* @param app - The `FirebaseApp` instance.
*
* @public
*/
function getInstallations(app) {
if (app === void 0) { app = getApp(); }
var installationsImpl = _getProvider(app, 'installations-exp').getImmediate();
return installationsImpl;
}
/**
* @license
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function extractAppConfig(app) {
var e_1, _a;
if (!app || !app.options) {
throw getMissingValueError('App Configuration');
}
if (!app.name) {
throw getMissingValueError('App Name');
}
// Required app config keys
var configKeys = [
'projectId',
'apiKey',
'appId'
];
try {
for (var configKeys_1 = __values(configKeys), configKeys_1_1 = configKeys_1.next(); !configKeys_1_1.done; configKeys_1_1 = configKeys_1.next()) {
var keyName = configKeys_1_1.value;
if (!app.options[keyName]) {
throw getMissingValueError(keyName);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (configKeys_1_1 && !configKeys_1_1.done && (_a = configKeys_1.return)) _a.call(configKeys_1);
}
finally { if (e_1) throw e_1.error; }
}
return {
appName: app.name,
projectId: app.options.projectId,
apiKey: app.options.apiKey,
appId: app.options.appId
};
}
function getMissingValueError(valueName) {
return ERROR_FACTORY.create("missing-app-config-values" /* MISSING_APP_CONFIG_VALUES */, {
valueName: valueName
});
}
/**
* @license
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var INSTALLATIONS_NAME = 'installations-exp';
var INSTALLATIONS_NAME_INTERNAL = 'installations-exp-internal';
var publicFactory = function (container) {
var app = container.getProvider('app-exp').getImmediate();
// Throws if app isn't configured properly.
var appConfig = extractAppConfig(app);
var platformLoggerProvider = _getProvider(app, 'platform-logger');
var installationsImpl = {
app: app,
appConfig: appConfig,
platformLoggerProvider: platformLoggerProvider,
_delete: function () { return Promise.resolve(); }
};
return installationsImpl;
};
var internalFactory = function (container) {
var app = container.getProvider('app-exp').getImmediate();
// Internal FIS instance relies on public FIS instance.
var installations = _getProvider(app, INSTALLATIONS_NAME).getImmediate();
var installationsInternal = {
getId: function () { return getId(installations); },
getToken: function (forceRefresh) { return getToken(installations, forceRefresh); }
};
return installationsInternal;
};
function registerInstallations() {
_registerComponent(new Component(INSTALLATIONS_NAME, publicFactory, "PUBLIC" /* PUBLIC */));
_registerComponent(new Component(INSTALLATIONS_NAME_INTERNAL, internalFactory, "PRIVATE" /* PRIVATE */));
}
/**
* Firebase Installations
*
* @packageDocumentation
*/
registerInstallations();
registerVersion(name, version);
export { deleteInstallations, getId, getInstallations, getToken, onIdChange };
//# sourceMappingURL=index.esm.js.map
|
import stat from '../stat'
describe('stat', () => {
it('should be function', () => {
expect(typeof stat).toBe('function')
})
it('should have sync function', () => {
expect(typeof stat.sync).toBe('function')
})
})
|
import config from 'config';
import moment from 'moment';
import winston from 'winston';
// Log levels:
// { error: 0, warn: 1, info: 2, verbose: 3, debug: 4, silly: 5 }
function timestamp() {
return moment().format('YYYY-MM-DD HH:mm:ss.SSS Z');
}
function loggerFactory(label) {
return new winston.Logger({
transports: [
new winston.transports.Console({
colorize: true,
json: false,
stringify: true,
label,
level: config.get('log.level'),
timestamp,
}),
],
});
}
const log = {};
config.get('log.labels').forEach((label) => {
log[label] = loggerFactory(label);
});
Object.keys(log.default.levels).forEach((level) => {
log[level] = log.default[level];
});
export default log;
|
'use strict';
/**
* Module dependencies.
*/
var should = require('should'),
mongoose = require('mongoose'),
User = mongoose.model('User'),
Location = mongoose.model('Location');
/**
* Globals
*/
var user, lction;
/**
* Unit tests
*/
describe('Location Model Unit Tests:', function() {
beforeEach(function(done) {
user = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: '[email protected]',
username: 'username',
password: 'password'
});
user.save(function() {
lction = new Location({
name: 'Location Name',
latitude: 0,
longitude: 0,
user: user
});
done();
});
});
describe('Method Save', function() {
it('should be able to save without problems', function(done) {
return lction.save(function(err) {
should.not.exist(err);
done();
});
});
it('should be able to show an error when try to save without name', function(done) {
lction.name = '';
return lction.save(function(err) {
should.exist(err);
done();
});
});
it('should be able to show an error when try to save without latitude', function(done) {
lction.latitude = null;
return lction.save(function(err) {
should.exist(err);
done();
});
});
it('should be able to show an error when try to save without longitude', function(done) {
lction.longitude = null;
return lction.save(function(err) {
should.exist(err);
done();
});
});
it('should be able to show an error when try to save with icon_number as String', function(done) {
lction.icon_number = 'I am a String';
return lction.save(function(err) {
should.exist(err);
done();
});
});
});
afterEach(function(done) {
Location.remove().exec();
User.remove().exec();
done();
});
}); |
import "core-js/modules/es.array.map.js";
import "core-js/modules/es.array.filter.js";
/**
* @license
* Copyright (c) 2021 Handsoncode. All rights reserved.
*/
import { invalidSimpleCellAddress } from './Cell';
import { NamedExpressionDependency } from './parser';
import { AbsoluteCellRange } from './AbsoluteCellRange';
/**
* Converts dependencies from maybe relative addressing to absolute addressing.
*
* @param deps - list of addresses in R0C0 format
* @param baseAddress - base address with regard to which make a convertion
*/
export var absolutizeDependencies = function absolutizeDependencies(deps, baseAddress) {
return deps.map(function (dep) {
return dep.absolutize(baseAddress);
});
};
export var filterDependenciesOutOfScope = function filterDependenciesOutOfScope(deps) {
return deps.filter(function (dep) {
if (dep instanceof NamedExpressionDependency) {
return true;
}
if (dep instanceof AbsoluteCellRange) {
return !(invalidSimpleCellAddress(dep.start) || invalidSimpleCellAddress(dep.end));
} else {
return !invalidSimpleCellAddress(dep);
}
});
}; |
import { mapActions } from 'vuex';
const install = function(Vue, options = {}) {
Vue.mixin({
methods: {
...mapActions({
$delSession: 'delSession'
}),
$closeView() {
return this.$delSession(this.$route);
}
}
});
};
export default {
name: 'close-view',
install
};
|
import * as React from 'react';
export function VideoChatImage() {
return (
<svg viewBox="0 0 807.0639 517.11635">
<rect
x="0.34446"
y="0.45733"
width="806.71944"
height="516.65902"
fill="#e6e6e6"
/>
<rect x="15.46842" y={48} width="776.47152" height={456} fill="#fff" />
<rect width="806.71944" height="34.27239" fill="var(--bs-primary)" />
<circle cx="25.46849" cy="17.52186" r="6.35212" fill="#fff" />
<circle cx="49.57957" cy="17.52186" r="6.35212" fill="#fff" />
<circle cx="73.69064" cy="17.52186" r="6.35212" fill="#fff" />
<path
d="M454.49026,367.59359s2.91628,40.23814,1.5869,42.604,23.3486,44.87049,23.3486,44.87049l39.4535-36.976s-2.38919-51.14828.15247-53.45562S454.49026,367.59359,454.49026,367.59359Z"
transform="translate(-196.46805 -191.44183)"
fill="#a0616a"
/>
<circle cx="291.57256" cy="160.76167" r="46.11879" fill="#a0616a" />
<path
d="M484.46807,426.15l-4.73184-2.65872-23.34812-21.02912s-2.32553-.8674-2.67692,6.406l-.3514,7.27343L438.28584,426.349l-.69161,165.21582,104.25255,5.0367,4.38688-166.25236-23.542-15.71818-4.314-9.19615-26.51912,18.64212Z"
transform="translate(-196.46805 -191.44183)"
fill="#fff"
/>
<path
d="M588.19835,442.096l-11.5297,168.17575H402.5702C380.3846,584.0084,362.6653,448.86816,381.574,444.86312c6.17745-2.136,48.0606-15.292,48.0606-15.292s12.51276-7.90089,16.50572-14.98865a54.95391,54.95391,0,0,1,8.93246-11.51755l.0243-.0243.01207-.02422c.09712-.09711.18208-.19423.2792-.27919a.16306.16306,0,0,1,.085-.07282c.12134-.13348.24267-.25482.37623-.37623v.18208l-.886,30.754,33.35122,114.61733s9.39366-68.802,18.933-90.2108c5.37647-12.06371,8.19215-26.95525,9.62425-37.65967,1.10447-8.2892,1.38359-14.054,1.38359-14.054s.04852.06066.13348.16985v.01215a.0119.0119,0,0,0,.01215.01215c.14563.18208.37623.48549.71609.89809l.01207.01215c2.79146,3.49532,11.74815,14.07834,17.81641,14.36967,7.26984.35194,27.23436,14.68517,27.23436,14.68517C573.93793,435.32383,588.19835,442.096,588.19835,442.096Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M532.16649,570.61746s13.13566-45.538,32.29726-39.75193-1.87416,38.79163-1.87416,38.79163Z"
transform="translate(-196.46805 -191.44183)"
fill="#a0616a"
/>
<path
d="M455.84442,607.60742s-42.82947-20.295-34.03431-38.27525,37.984,8.09434,37.984,8.09434Z"
transform="translate(-196.46805 -191.44183)"
fill="#a0616a"
/>
<path
d="M472.99848,610.27178H462.71881a104.29179,104.29179,0,0,0-12.5492-2.30589c-.97091-.04852-1.28647-1.56559-1.16506-3.96867a75.4561,75.4561,0,0,1,1.954-11.78452c1.46855-6.55375,3.43466-13.64151,4.69683-18.15626v-.01215c.74031-2.63361,1.2258-4.38127,1.23795-4.63616.03637-.801,6.76006.30341,11.39622,1.153,2.43938.44905,4.2963.82528,4.2963.82528l.2306,21.88218Z"
transform="translate(-196.46805 -191.44183)"
fill="#fff"
/>
<path
d="M610.20188,590.18585c-2.96132,8.38638-5.09736,15.025-6.845,20.08593H468.7264c-.449-4.63608-1.0195-10.98354-1.38358-17.27025-.59468-10.06115-.69179-19.9646.94669-22.44042a1.15787,1.15787,0,0,1,.72816-.57038c3.14339-.619,38.6548-1.17728,63.80171-1.505,3.49533-.03637,6.78428-.07282,9.76989-.10919,9.066-.10926,15.2071-.16993,15.2071-.16993s.48542-16.60275,1.96611-37.76886c2.60931-37.21055,8.31349-88.536,19.9403-88.7544l8.49557.41267s4.84246.2306,6.91783,7.6217S618.64885,566.289,610.20188,590.18585Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M450.27,308.99647l-8.58692-3.60066s19.15842-17.3925,44.50852-14.57481l-6.80594-7.49684s17.788-5.51206,32.87364,11.942c7.93015,9.1752,17.09765,19.9527,22.5621,31.8585l9.36041.45224-4.28416,7.62463,13.29593,8.474L539.22694,341.594a39.47325,39.47325,0,0,1-2.3051,20.1717l-3.60049,8.5868s-11.485-26.0409-11.33147-29.21927l-.3839,7.94584s-8.395-7.57352-8.16466-12.341l-5.03622,5.33172-1.96144-8.85563-29.822,7.32008,5.113-6.92092-18.39063,1.50081,7.57371-8.395s-21.15827,9.33151-22.37528,18.0336c-1.21675,8.702-12.08372,19.32712-12.08372,19.32712l-4.38368-8.17616S426.65126,319.80192,450.27,308.99647Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M894.13062,611.2719H286.20289V268.44183H894.13062Zm-605.92773-2H892.13062V270.44183H288.20289Z"
transform="translate(-196.46805 -191.44183)"
fill="#e6e6e6"
/>
<path
d="M776.27484,310.95036c-5.74833-9.7129-17.12094-10.16556-17.12094-10.16556s-11.082-1.41714-18.191,13.37572c-6.62615,13.78816-15.7711,27.1009-1.47226,30.32862l2.5828-8.03878,1.5995,8.63726a55.94805,55.94805,0,0,0,6.11795.10455c15.31291-.49439,29.89616.14466,29.42659-5.35027C778.59331,332.53719,781.80588,320.29611,776.27484,310.95036Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M755.06876,344.04322a6.06476,6.06476,0,0,1-.52378,3.19543c-1.077,1.88157-3.70613,2.16324-5.28757,3.64624-1.66312,1.55958-1.85137,4.09068-1.93063,6.36925a17.60551,17.60551,0,0,0,.71623,7.06206,17.16845,17.16845,0,0,0,2.00576,3.4459q1.4483,2.08772,2.92339,4.15666a17.94591,17.94591,0,0,0,1.75046,2.19672,14.90829,14.90829,0,0,0,6.589,3.4955,8.23568,8.23568,0,0,0,4.78736.317,9.16513,9.16513,0,0,0,2.784-1.71184,14.91792,14.91792,0,0,0,3.15123-3.0543,14.2207,14.2207,0,0,0,1.92695-6.51309,140.54574,140.54574,0,0,0,.92641-20.24664,4.96216,4.96216,0,0,0-.38654-2.15121c-.80079-1.56365-2.9456-1.89749-4.07573-3.24253-.96624-1.15-1.00564-2.79116-.99739-4.29315l.021-3.82372a1.6018,1.6018,0,0,0-.18584-.93157,1.57828,1.57828,0,0,0-1.09684-.51173,36.9757,36.9757,0,0,0-7.19373-.48128c-1.95217.05966-5.69782-.10375-7.38581.98355-1.53218.98693.38621,3.71273.74111,5.24386A34.16351,34.16351,0,0,1,755.06876,344.04322Z"
transform="translate(-196.46805 -191.44183)"
fill="#ffb8b8"
/>
<circle cx="563.08149" cy="130.57993" r="15.45521" fill="#ffb8b8" />
<path
d="M750.7332,350.45222c-1.67008-.9691-3.93761-.18432-5.15385,1.31538a9.339,9.339,0,0,0-1.69568,5.43786,35.49883,35.49883,0,0,1-.41311,5.76892c-.92823,4.16986-4.37265,7.598-4.62881,11.86225-.252,4.19513,2.62474,7.84969,4.04,11.80691a19.68856,19.68856,0,0,1,1.05477,7.68523,3.08256,3.08256,0,0,0,.495,2.39828,2.73507,2.73507,0,0,0,1.06973.57634c6.82906,2.40261,14.20546,2.61047,21.44253,2.79378l11.95323.30276a3.80991,3.80991,0,0,0,2.1108-.35347c1.07239-.65753,1.21592-2.1314,1.23415-3.3892.157-10.83295-.30376-21.66431-.76443-32.4886a68.97338,68.97338,0,0,0-1.16874-11.70718,3.3587,3.3587,0,0,0-1.46138-2.49528,3.22459,3.22459,0,0,0-2.46354.16452c-3.7049,1.37785-15.4804,6.65868-16.641,4.779C758.396,352.72843,750.07748,352.08486,750.7332,350.45222Z"
transform="translate(-196.46805 -191.44183)"
fill="#fff"
/>
<path
d="M751.33531,350.71936c-.304.21534-.60379.44334-.89093.684a17.24794,17.24794,0,0,0-2.87117,3.02317,23.95023,23.95023,0,0,0-3.2512,6.135,59.39812,59.39812,0,0,0-2.59248,11.96607,28.87958,28.87958,0,0,0-.40957,4.50944c.08866,5.55657,3.37363,10.70358,3.37786,16.2644,0,.21112-.00423.418-.01689.62912a18.26465,18.26465,0,0,1-.32091,2.71073c-.24067,1.30471-.5869,2.59251-.92891,3.87609a37.017,37.017,0,0,1-3.47075,9.37779c-.16889.28711-.35045.57-.53623.84446H727.60162c.09289-.28289.19-.56157.2829-.84446q3.02107-8.94923,6.038-17.89421c.61645-1.824,1.23292-3.65652,1.66782-5.53124a52.38865,52.38865,0,0,0,1.01758-7.02172q.69669-7.34053.91625-14.71059a17.88675,17.88675,0,0,1,.66291-5.23989,17.14222,17.14222,0,0,1,2.10272-3.83388q1.83037-2.75509,3.66077-5.51013a8.46485,8.46485,0,0,1,2.44051-2.71072c.874-.53624,1.91271-.73045,2.83317-1.18225a14.48356,14.48356,0,0,0,1.6636-1.102c1.37648-.97535,2.99362-2.00137,4.1801-1.00491a2.32824,2.32824,0,0,1,.48979.56157,2.82638,2.82638,0,0,1,.18155.32513C756.87922,347.50618,752.868,349.63422,751.33531,350.71936Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M790.282,410.7397H735.52694c.29557-.27023.58691-.55312.8698-.84446.04223-.04222.08023-.08445.12245-.12667,2.09427-2.1745,2.5714-4.84722,3.62275-7.6762q1.18439-3.19841,2.36872-6.39261a3.30653,3.30653,0,0,1,.60379-1.1358,1.87323,1.87323,0,0,1,.8318-.49824,3.22746,3.22746,0,0,1,.73468-.13511,8.18891,8.18891,0,0,1,1.06824,0q14.738.76,29.45494,1.8536,1.9.1457,3.80009.29134c1.03447.76847,2.05627,1.58338,3.03162,2.44472a27.13555,27.13555,0,0,1,7.49463,9.94357c.19.46867.36311.94579.50668,1.43137C790.13,410.1739,790.21026,410.45679,790.282,410.7397Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<polygon
points="577.265 118.572 566.03 112.687 550.514 115.094 547.304 129.273 555.295 128.966 557.527 123.757 557.527 128.879 561.215 128.738 563.355 120.445 564.692 129.273 577.8 129.005 577.265 118.572"
fill="#2f2e41"
/>
<path
d="M790.282,410.7397h-12.363c-.08866-.27866-.17312-.56157-.25334-.84446a66.40159,66.40159,0,0,1-1.91694-9.57623c-.13089-.92469-.24489-1.85361-.35045-2.7783-.06756-.5869-.13089-1.16959-.19422-1.75647-.70935-6.51927-1.254-13.10188-3.247-19.34246-.24067-.75579-.50246-1.50738-.7558-2.25894a26.64073,26.64073,0,0,1-1.0978-3.97319c-.54046-3.09075-.13512-6.29128-.69247-9.37779-.31245-1.72272-.93313-3.23852-1.30892-4.81766a9.75721,9.75721,0,0,1-.27868-2.91762,6.28357,6.28357,0,0,1,3.28919-1.62981,1.73336,1.73336,0,0,0,1.16537-.46867,1.46263,1.46263,0,0,0,.19422-.86134,41.362,41.362,0,0,0-.342-5.9577c-.08446-.69246-.19-1.38493-.31668-2.07315-.00422-.03378-.01266-.06334-.01689-.0971a8.17879,8.17879,0,0,1,3.3103-.44757c1.22026.00423,2.43628.00423,3.65231.00846q3.37573.01267,6.75149.0211a14.86487,14.86487,0,0,1,6.16458.89091,9.81161,9.81161,0,0,1,4.45032,4.5601,26.24726,26.24726,0,0,1,2.00983,6.15193,8.24107,8.24107,0,0,1,.21112,1.77761c.09288,4.75434-3.45386,9.54244-3.80855,14.40234-.114,1.57492.01688,3.16251-.12666,4.73743-.14357,1.57915-.5658,3.12453-.6418,4.70368-.00423.08022-.00846.16467-.00846.2449a16.92965,16.92965,0,0,0,1.24559,6.00836c1.07246,3.01052,2.37717,5.92813,3.7241,8.82886,1.30048,2.80786,2.63472,5.603,3.83387,8.44888a14.46647,14.46647,0,0,1,1.24559,4.17588,5.79468,5.79468,0,0,1-.63757,3.37363,3.95888,3.95888,0,0,1-.50246.71358l-.00845.00845c-.038.04221-.076.08445-.114.12244H790.282Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M802.0581,406.40759a4.67929,4.67929,0,0,1-.45179,1.61716c-.02111.05067-.04221.09711-.06335.14777-.2449.57846-.494,1.14846-.75158,1.72272-.12667.27866-.25333.56157-.38422.84446H793.415a7.82664,7.82664,0,0,0,.7009-.84446c1.07246-1.4905,1.69315-3.30186,2.78672-4.91057a3.78094,3.78094,0,0,1,.51091-.608C799.16579,402.67927,802.16786,403.02551,802.0581,406.40759Z"
transform="translate(-196.46805 -191.44183)"
fill="#ffb8b8"
/>
<path
d="M806.60975,410.7397a8.28236,8.28236,0,0,0,.2238-.84446c.05066-.2238.09288-.44334.12666-.66714.2238-1.47358.21957-2.981.57-4.42921.494-2.07315,1.976-4.52632.75578-6.27014a6.625,6.625,0,0,0,2.62208-1.17382,2.25459,2.25459,0,0,0,.70934-2.59251c-.43491-.84869-1.61293-1.41027-1.45248-2.35184a2.24362,2.24362,0,0,1,.41378-.81913,6.135,6.135,0,0,0-.10556-6.73039c-.38422-.54889-.85714-1.03024-1.20757-1.60447a9.56867,9.56867,0,0,1-.874-2.22518,41.44894,41.44894,0,0,0-5.3328-11.054,7.33492,7.33492,0,0,1-1.178-2.1787,8.99458,8.99458,0,0,1-.09288-2.58829,17.35118,17.35118,0,0,0-3.0274-9.61424c-.13512-.21112-.27446-.418-.41378-.6249a55.6904,55.6904,0,0,0-6.16458-7.18217,3.95307,3.95307,0,0,0-2.17028,1.57492,4.68049,4.68049,0,0,0-.418,1.57069c-.59956,4.18011-1.11893,8.38553-1.52425,12.58674a30.76846,30.76846,0,0,0-.16891,6.38415,10.30563,10.30563,0,0,0,2.39828,5.81838c.89936.988,2.03939,1.72271,3.01475,2.63473q.23433.22165.456.456a14.95045,14.95045,0,0,1,1.862,2.44472,21.07823,21.07823,0,0,1,3.09919,12.63741,20.53235,20.53235,0,0,1-.31667,2.3814,9.84905,9.84905,0,0,1-1.12313,3.209c-.44334.73046-1.026,1.368-1.42714,2.12384a2.68572,2.68572,0,0,0-.19424,2.45317,4.99563,4.99563,0,0,1,1.74382.31245,7.40159,7.40159,0,0,1,3.29341,2.51229,10.952,10.952,0,0,1,.836,1.28358,15.80993,15.80993,0,0,1,.836,1.72271c.10133.23222.19844.4729.29134.71358a.69962.69962,0,0,1,.05067.13089Z"
transform="translate(-196.46805 -191.44183)"
fill="#2f2e41"
/>
<path
d="M873.99683,412.31536H655.33277V288.44183H873.99683Zm-216.66406-2H871.99683V290.44183H657.33277Z"
transform="translate(-196.46805 -191.44183)"
fill="#e6e6e6"
/>
<circle cx="403.37333" cy="460.33374" r="22.49773" fill="#e6e6e6" />
<circle cx="403.37333" cy="460.33374" r="17.24826" fill="#fff" />
<path
d="M608.4822,661.196a.78172.78172,0,0,1-.55534-.23029L590.64308,643.6819a.78563.78563,0,0,1,1.11068-1.11068L609.03754,659.855a.78563.78563,0,0,1-.55534,1.341Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M599.4475,654.70719l-2.543-2.543a.0982.0982,0,0,0-.16645.083,3.14791,3.14791,0,0,0,2.62645,2.62645.0982.0982,0,0,0,.083-.16645Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M600.23313,648.82972l2.5469,2.54592a.0982.0982,0,0,0,.16694-.083,3.1489,3.1489,0,0,0-2.62939-2.6294.0982.0982,0,0,0-.08445.16646Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M611.3792,652.62086a1.58109,1.58109,0,0,0-.00491-1.70678A16.64213,16.64213,0,0,0,606.5,645.97789a12.35131,12.35131,0,0,0-6.67537-2.0657,11.12358,11.12358,0,0,0-3.52648.57891.19641.19641,0,0,0-.0766.32554l2.31956,2.31956a.1964.1964,0,0,0,.18757.05156,4.71376,4.71376,0,0,1,5.69579,5.69579.19642.19642,0,0,0,.05156.18708l3.33646,3.33891a.19639.19639,0,0,0,.26514.01178A16.88137,16.88137,0,0,0,611.3792,652.62086Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M599.84031,656.48221a4.71376,4.71376,0,0,1-4.58118-5.82493.19643.19643,0,0,0-.05156-.18708l-3.282-3.28342a.1964.1964,0,0,0-.26564-.0113,19.63328,19.63328,0,0,0-3.32271,3.718,1.56734,1.56734,0,0,0-.03143,1.74507,16.40009,16.40009,0,0,0,4.82571,4.94208,12.16778,12.16778,0,0,0,6.69305,2.04411,11.69672,11.69672,0,0,0,3.56674-.56713.1964.1964,0,0,0,.07906-.326l-2.33086-2.33037a.19641.19641,0,0,0-.18708-.05155A4.71364,4.71364,0,0,1,599.84031,656.48221Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<circle cx="318.01431" cy="460.33374" r="22.49773" fill="#e6e6e6" />
<circle cx="318.01431" cy="460.33374" r="17.24826" fill="#fff" />
<path
d="M521.10851,662.774a9.252,9.252,0,0,1-4.321-1.473,28.09351,28.09351,0,0,1-6.78634-5.07613,25.753,25.753,0,0,1-5.09625-6.78437c-1.80891-3.28981-1.50055-5.01426-1.15586-5.75128a4.59091,4.59091,0,0,1,1.79958-1.93067,8.65648,8.65648,0,0,1,1.40627-.74635c.0491-.02111.09477-.04124.13552-.05941a1.31415,1.31415,0,0,1,1.07778-.0982,3.15681,3.15681,0,0,1,1.02426.78562,17.02329,17.02329,0,0,1,2.56114,3.80194,3.78743,3.78743,0,0,1,.50231,1.557,2.4606,2.4606,0,0,1-.626,1.46372c-.06433.08789-.12816.17185-.19.25336-.37366.491-.45566.63292-.40165.88629a9.31455,9.31455,0,0,0,2.268,3.364,8.92829,8.92829,0,0,0,3.32516,2.213c.26417.05647.409-.029.91575-.41589.07267-.05548.1473-.11293.22537-.17038a2.47386,2.47386,0,0,1,1.48582-.66484h.00295a3.558,3.558,0,0,1,1.56437.549,18.75716,18.75716,0,0,1,3.78771,2.54248,3.156,3.156,0,0,1,.78808,1.02181,1.32451,1.32451,0,0,1-.09821,1.08023c-.01816.04076-.0383.08544-.05941.135a8.66631,8.66631,0,0,1-.75076,1.40333,4.59174,4.59174,0,0,1-1.93363,1.79613A3.31021,3.31021,0,0,1,521.10851,662.774Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<circle cx="489.39405" cy="460.33374" r="22.49773" fill="#e6e6e6" />
<circle cx="489.39405" cy="460.33374" r="17.24826" fill="#fff" />
<path
d="M689.0046,661.98872h-6.285a.78563.78563,0,1,1,0-1.57125h6.285a.78563.78563,0,1,1,0,1.57125Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M685.8621,658.06059a7.07866,7.07866,0,0,1-7.07064-7.07064V649.4187a.78563.78563,0,0,1,1.57125,0V650.99a5.49939,5.49939,0,1,0,10.99877,0V649.4187a.78563.78563,0,0,1,1.57125,0V650.99A7.07868,7.07868,0,0,1,685.8621,658.06059Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M685.8621,661.98872a.78562.78562,0,0,1-.78563-.78562V657.275a.78563.78563,0,1,1,1.57125,0v3.92813A.7856.7856,0,0,1,685.8621,661.98872Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
<path
d="M685.8621,654.91809a3.87071,3.87071,0,0,1-2.7767-1.18335,3.97186,3.97186,0,0,1-1.15144-2.79389v-5.45028a3.91291,3.91291,0,0,1,3.89763-3.92813q.01526-.00006.03051,0a3.88633,3.88633,0,0,1,3.92813,3.92813v5.45028A3.95757,3.95757,0,0,1,685.8621,654.91809Z"
transform="translate(-196.46805 -191.44183)"
fill="var(--bs-primary)"
/>
</svg>
);
}
|
var __extends=this&&this.__extends||function(){var e=function(t,i){e=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var i in t)if(t.hasOwnProperty(i))e[i]=t[i]};return e(t,i)};return function(t,i){e(t,i);function n(){this.constructor=t}t.prototype=i===null?Object.create(i):(n.prototype=i.prototype,new n)}}();System.register(["./p-226ad382.system.js","./p-57436eba.system.js","./p-5ca8f7bd.system.js","./p-7db317d0.system.js","./p-421d54d3.system.js"],function(e){"use strict";var t,i,n,o,r,s,a,p;return{setters:[function(e){t=e.r;i=e.c;n=e.h;o=e.g},function(e){r=e.E},function(e){s=e.T},function(e){a=e.C},function(e){p=e.T}],execute:function(){var c=r.PSK_WIZARD_EVT;var h=function(e){__extends(t,e);function t(t,i,n){var o=e.call(this,t,n)||this;o.getEventType=function(){return c};o.data=i;return o}return t}(CustomEvent);var d=undefined&&undefined.__decorate||function(e,t,i,n){var o=arguments.length,r=o<3?t:n===null?n=Object.getOwnPropertyDescriptor(t,i):n,s;if(typeof Reflect==="object"&&typeof Reflect.decorate==="function")r=Reflect.decorate(e,t,i,n);else for(var a=e.length-1;a>=0;a--)if(s=e[a])r=(o<3?s(r):o>3?s(t,i,r):s(t,i))||r;return o>3&&r&&Object.defineProperty(t,i,r),r};var l=e("psk_wizard",function(){function e(e){t(this,e);this.needWizardConfiguration=i(this,"needWizardConfiguration",7);this.changeStep=i(this,"changeStep",7);this.finishWizard=i(this,"finishWizard",7)}e.prototype.componentWillLoad=function(){var e=this;this.needWizardConfiguration.emit(function(t){e.wizardSteps=t;e.activeStep=e.wizardSteps.length>0?e.wizardSteps[0]:null})};e.prototype.handleStepChange=function(e){var t=this;var i=new h("changeStep",{stepIndexToDisplay:e,wizardSteps:this.wizardSteps,activeStep:this.activeStep,callback:function(e,i){if(e){console.error(e);return}t.activeStep=i.activeStep;t.wizardSteps=i.wizardSteps}},{bubbles:true,composed:true,cancelable:true});this.host.dispatchEvent(i)};e.prototype.handleFinish=function(){this.finishWizard.emit({wizardSteps:this.wizardSteps,callback:function(e,t){if(e){console.error(e);return}console.log(t)}});return};e.prototype.handleStepPropertiesChange=function(e){this.activeStep["stepProperties"]=e};e.prototype.computeStepDesign=function(e,t,i){var n="";if(e===0){n+="first "}else if(e===i-1){n+="last "}if(e<t){n+="done"}else if(e===t){n+="current"}return n};e.prototype.render=function(){var e=this.activeStep.stepComponent;return[n("div",{class:"page-content"},n("div",{class:"wizard-content"},n("div",{class:"wizard-form"},n("form",{class:"form-register",action:"#",method:"post",onSubmit:function(e){e.preventDefault();e.stopImmediatePropagation()}},n("div",{id:"form-total",class:"wizard clearfix"},n("psk-stepper",{componentRender:this.componentRender,wizardSteps:this.wizardSteps,activeStep:this.activeStep,handleStepChange:this.handleStepChange.bind(this)}),n(e,Object.assign({},this.activeStep.stepProperties,{onPropertiesChange:this.handleStepPropertiesChange.bind(this),stepProperties:this.activeStep.stepProperties})),n("div",{class:"actions clearfix"},n("ul",{role:"menu","aria-label":"Pagination"},this.activeStep.stepIndex>0?n("li",null,n("button",{role:"menuitem",onClick:this.handleStepChange.bind(this,this.activeStep.stepIndex-1)},"Previous")):null,this.activeStep.stepIndex<this.wizardSteps.length-1?n("li",null,n("button",{role:"menuitem",onClick:this.handleStepChange.bind(this,this.activeStep.stepIndex+1)},"Next")):null,this.activeStep.stepIndex===this.wizardSteps.length-1?n("li",null,n("button",{role:"menuitem",onClick:this.handleFinish.bind(this)},"Finish")):null)))))))]};Object.defineProperty(e.prototype,"host",{get:function(){return o(this)},enumerable:true,configurable:true});return e}());d([a()],l.prototype,"host",void 0);d([s({description:"This property is the string that defines the psk-stepper render",isMandatory:false,propertyType:"string"})],l.prototype,"componentRender",void 0);d([s({description:"This parameter holds the wizard configuration, the names of the steps, the components that will be displayed and if there is the case, other properties, like information for the steps.",isMandatory:false,propertyType:"array of WizardStep types (WizardStep[])",specialNote:"These information are filled in and handled by the controller of the component, not by the component itself."})],l.prototype,"wizardSteps",void 0);d([p({eventName:"needWizardConfiguration",controllerInteraction:{required:true},description:"This event is triggered when the component is loaded and if no configuration is given for the wizard.\n In this case, the controller is responsible to send the configuration to the wizard.\n This event comes with a single parameter, a callback function that sends the configuration to the component."})],l.prototype,"needWizardConfiguration",void 0);d([p({eventName:"changeStep",controllerInteraction:{required:true},description:"This event is triggered when the buttons Next, Previous and the step names from the left side of the component are clicked.\n This event comes with the following parameters:\n stepIndexToDisplay - the number of the step to be displayed,\n wizardSteps - the list of the steps from the wizard,\n activeStep - the step that will be displayed,\n callback - a callback function that is called from the controller when the validation is done."})],l.prototype,"changeStep",void 0);d([p({eventName:"finishWizard",description:"This event is triggered when the buttons Finish is clicked.\n This event comes with the following parameters:\n wizardSteps - the list of the steps from the wizard. Maybe the information inside the wizard will be stored somewhere,\n callback - a callback function that is called from the controller when the validation is done."})],l.prototype,"finishWizard",void 0)}}}); |
from flask import *
app = Flask(__name__)
import RPi.GPIO as GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
# leds
amarillo = 17
verde = 18
GPIO.setup(amarillo, GPIO.OUT)
GPIO.output(amarillo, GPIO.LOW)
GPIO.setup(verde, GPIO.OUT)
GPIO.output(verde, GPIO.LOW)
@app.route('/')
def home():
templateData = {
'amarillo' : GPIO.input(amarillo),
'verde' : GPIO.input(verde),
}
return render_template('home.html', **templateData)
@app.route('/<led>/<action>')
def led(led, action):
GPIO.output(int(led), int(action))
templateData = {
'amarillo' : GPIO.input(amarillo),
'verde' : GPIO.input(verde),
}
return render_template('home.html', **templateData)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
|
import unittest
from pathlib import Path
from cw.flex_file import flex_load, flex_dump, determine_serialization
from cw.rm import rm, rrmdir
from cw.object_hierarchies import object_hierarchy_equals
from cw.serializers import yaml, msgpack
from cw.test import test_path
from itertools import product
import pickle
import json
class TestFile(unittest.TestCase):
def setUp(self):
self.test_data = {
"id": 1,
"first_name": "Zebadiah",
"last_name": "Anderton",
"email": "[email protected]",
"gender": "Male",
"ip_address": "160.42.195.192"
}
self.serializers = {
"msgpack": msgpack,
"pickle": pickle,
"yaml": yaml,
"json": json
}
self.file_extensions = {
".pickle": pickle,
".yaml": yaml,
".msgp": msgpack,
".json": json,
".unknown": None
}
# Clear the temporary directory to store the test files.
self.flex_file_temp: Path = (test_path / "flex_file_temp")
if self.flex_file_temp.exists():
rrmdir(self.flex_file_temp)
self.flex_file_temp.mkdir()
def tearDown(self):
pass
# if self.flex_file_temp.exists():
# rrmdir(self.flex_file_temp)
def test_dump_load(self):
for (serializer_name, serializer), \
(extension, extension_serializer), \
is_gzipped,\
is_gzipped_default in product(self.serializers.items(),
self.file_extensions.items(),
[True, False],
[True, False]):
with self.subTest(serializer=serializer_name,
extension=extension,
is_gzipped=is_gzipped,
is_gzipped_default=is_gzipped_default):
file_path = self.flex_file_temp / f"data.i{extension}{'.gz' if is_gzipped else ''}"
if extension == ".unknown":
is_actually_gzipped = is_gzipped or is_gzipped_default
else:
is_actually_gzipped = is_gzipped
flex_dump(self.test_data, file_path, default_serializer=serializer, default_is_gzipped=is_gzipped_default)
loaded_in_data = flex_load(file_path, default_serializer=serializer, default_is_gzipped=is_gzipped_default)
det_serializer, det_is_gzipped, det_is_binary = \
determine_serialization(file_path, default_serializer=serializer, default_is_gzipped=is_gzipped_default)
self.assertEqual(det_is_gzipped, is_actually_gzipped)
errors = object_hierarchy_equals(loaded_in_data, self.test_data)
print(yaml.dump(errors, default_flow_style = False))
self.assertEqual(len(errors), 0)
rm(file_path)
# print("ERROR")
# print("path", file_path)
# print("seri", file.serializer)
# print("bin ", file.is_binary)
# print("gzip", file.is_gzipped)
# print("ig ", is_gzipped)
# print("igd ", is_gzipped_default)
# a = sys.exc_info()
# print("".join(format_exception(*a)))
# print("")
# print("")
|
var appInstance = getApp();
function request(params) {
if (!_validateAttr(params.url)) {
params.fail();
return;
}
var requestUrl = params.url;
if (requestUrl.substr(0, 4) != 'http') {
requestUrl = appInstance.globalData.baseUrl + params.url;
}
if (!_validateAttr(params.method)) {
params.method = 'GET';
}
if (!_validateAttr(params.dataType)) {
params.dataType = 'json';
}
console.log('requestUrl--->' + requestUrl);
wx.request({
url: requestUrl,
data: params.data,
header: params.header,
method: params.method,
dataType: params.dataType,
success: function (res) {
if (_validateAttr(params.success)) {
params.success(res);
}
},
fail: function () {
if (_validateAttr(params.fail)) {
params.fail('很抱歉,网络开小差了');
}
},
complete: function () {
if (_validateAttr(params.complete)) {
params.complete();
}
}
});
}
function _validateAttr(attr) {
return attr != null && typeof (attr) != "undefined";
}
module.exports = {
request: request
}
|
const Product = require("./Product");
const UpdateProduct = require("./UpdateProduct");
class CarInsurance {
constructor(products) {
this.products = products;
}
updatePrice() {
const products = []
for (const product of this.products) {
products.push(new UpdateProduct(product).execute());
}
return products;
}
}
module.exports = {
Product,
CarInsurance
}
|
import React, { useEffect, useState, useRef } from 'react'
import Form from '../../components/Forms/form'
import { useHistory, useParams } from 'react-router-dom'
import { CKEditor } from '@ckeditor/ckeditor5-react';
// import ClassicEditor from '@ckeditor/ckeditor5-build-classic';
import { Editor as TinyMCE } from '@tinymce/tinymce-react';
const Modify = () => {
let { id, method } = useParams()
const key = 'koderahasia'
const basePath = '/api/admin/product'
const [data, setData] = useState(null)
const [path, setPath] = useState(basePath)
useEffect(async () => {
if(method === 'edit'){
const dataFetch = await axios
.get(`/api/admin/product/${id}?key=${key}`)
.then(function (response) {
// console.log(response);
// return response.data;
return response;
})
.catch(function (error) {
return 404;
console.log(error);
});
// console.log(dataFetch.cartSession[5]);
// setRawData(dataFetch);
if (dataFetch.status == 200) {
setData(dataFetch.data);
setPath(basePath + '/' + dataFetch.data.id)
// console.log(dataFetch.data);
} else {
setData(dataFetch);
// console.log(dataFetch);
}
}
}, []);
const editorRef = useRef(null);
const log = () => {
if (editorRef.current) {
console.log(editorRef.current.getContent());
}
};
const dataForm = [
{
name: 'name',
label: 'Name',
type: 'text',
value: data?.name,
},
{
name: 'price',
label: 'Price',
type: 'number',
value: data?.price,
},
{
name: 'weight',
label: 'Weight (gr)',
type: 'number',
value: data?.weight,
},
{
name: 'short_description',
label: 'Short Description',
type: 'tinyMCE',
value: data?.short_description,
},
{
name: 'description',
label: 'Long Description',
type: 'tinyMCE',
value: data?.description,
},
{
name: 'image',
label: 'Image',
type: 'file',
filetype: 'image',
value: data?.thumbnail_img,
},
]
return (
<div className="w-full p-10 bg-white shadow">
<form action={path}
// "/api/admin/product"
method="POST" enctype="multipart/form-data">
{method === 'edit' ?
<input type="hidden" name="_method" value="put" /> : null
}
<div>
{dataForm && dataForm.map((item, i) => (
<Form list={item} />
))
}
{/* <button type="button" id="lfm" onClick={(e) => {
lfm(e, 'lfm', 'file');
}} className="btn btn-primary">
Browser File
</button> */}
{/* {lfm('lfm','file')} */}
<button type="submit" className="btn btn-primary">Submit</button>
</div>
</form>
</div>
)
}
export default Modify
|
import React from 'react';
import Slideshow from '../components/Slideshow';
import Tweet1 from '../static/images/tweet1.png';
import Tweet2 from '../static/images/tweet2.png';
import Tweet3 from '../static/images/tweet3.png';
import Tweet4 from '../static/images/tweet4.png';
import Tweet5 from '../static/images/tweet5.png';
import Tweet6 from '../static/images/tweet6.png';
const SLIDES = 6;
class Slide18 extends React.PureComponent {
constructor(props) {
super(props);
this.state = { active: 0 };
}
componentDidMount() {
setInterval(() => {
this.setState(({ active }) => {
if (active >= SLIDES - 1) {
return { active: 0 };
}
return { active: active + 1 };
});
}, 4000);
}
render() {
return (
<section data-bespoke-backdrop="middle-earth-muted">
<Slideshow
width={760}
height={380}
active={this.state.active}
className="align-center"
>
<img className="fit" src={Tweet1} />
<img className="fit" src={Tweet2} />
<img className="fit" src={Tweet3} />
<img className="fit" src={Tweet4} />
<img className="fit" src={Tweet5} />
<img className="fit" src={Tweet6} />
</Slideshow>
</section>
);
}
}
export default Slide18;
|
'use strict';
const TerserPlugin = require('terser-webpack-plugin');
const {CleanWebpackPlugin} = require('clean-webpack-plugin');
module.exports = {
mode: 'production',
context: `${__dirname}/src/`,
entry: {
'autocomplete-cpnt': './autocomplete-cpnt.js',
'autocomplete-cpnt.min': './autocomplete-cpnt.js'
},
output: {
path: `${__dirname}/dist/`,
filename: '[name].js',
library: 'autocompleteComponent',
libraryTarget: 'var'
},
performance: {hints: false},
optimization: {
minimizer: [
new TerserPlugin({
include: /\.min\.js$/,
parallel: true,
terserOptions: {
compress: true,
ie8: false,
ecma: 5,
output: {comments: false},
warnings: false
}
})
]
},
plugins: [
new CleanWebpackPlugin()
]
}; |
async function editFormHandler(event) {
event.preventDefault();
const title = document.querySelector('input[name="post-title"]').value.trim();
const category_name = document.querySelector('select[name="post-category"]').value;
const post_url = document.querySelector('input[name="post-url"]').value;
const post_text = document.querySelector('textarea[name="post-text"]').value;
const post_image = document.querySelector('input[name="post-image"]').value;
const id = window.location.toString().split('/')[
window.location.toString().split('/').length - 1
];
const response = await fetch(`/api/posts/${id}`, {
method: 'PUT',
body: JSON.stringify({
title,
category_name,
post_text,
post_image,
post_url
}),
headers: {
'Content-Type': 'application/json'
}
});
if (response.ok) {
document.location.replace('/dashboard/admin')
} else {
alert(response.statusText);
}
}
document.querySelector('.edit-post-form').addEventListener('submit', editFormHandler);
|
/* istanbul ignore else */
if (process.env.NODE_ENV === 'test')
var configPath = './testing_config.js';
else
var configPath = './config.js';
var config = require(configPath);
var strings = require('./strings.js');
var restify = require('restify');
var fs = require('fs');
var path = require('path');
var FORBIDDEN_CODE = 403;
var FORBIDDEN_ERROR = {error: {code: 'FORBIDDEN',
message: strings.FORBIDDEN}};
function SEND_FORBIDDEN(req, res, next) {
res.send(FORBIDDEN_CODE, FORBIDDEN_ERROR);
}
var server = restify.createServer();
server.use(restify.queryParser()); // Allows us to access req.query
server.name = 'Hook to Deploy';
server.get('/hook/:hookName', function(req, res, next) {
if (!config.hooks.hasOwnProperty(req.params.hookName) || // hook does not exist or
!req.query.hasOwnProperty('key')) { // no key provided
SEND_FORBIDDEN(req, res, next);
return;
}
var hook = config.hooks[req.params.hookName];
if (req.query.key !== hook.key) { // hook key is incorrect
SEND_FORBIDDEN(req, res, next);
return;
}
hook.action(req, res); // Hook must handle res.send()
});
server.get('/results/:filename', function(req, res, next) {
var resultsPath = path.join(config.resultsFolder, req.params.filename);
if (fs.existsSync(resultsPath) && fs.statSync(resultsPath).isFile()) {
fs.readFile(resultsPath, {encoding: 'utf8'}, function(err, jsonFile) {
if (err) {
res.send(500, {error: err});
} else {
var data = JSON.parse(jsonFile);
res.send(data);
}
});
} else {
SEND_FORBIDDEN(req, res, next);
}
});
server.on('NotFound', SEND_FORBIDDEN);
server.on('MethodNotAllowed', SEND_FORBIDDEN);
server.listen(config.port, function() {
console.log('%s listening on %s', server.name, server.url);
});
|
import { useState, useEffect, useCallback } from 'react'
function useLocalStorage(key, initialValue) {
const [sortLocalStorageValue, setSortLocalStorageValue] = useState(() => {
try {
let value = window.localStorage.getItem(key)
return value ? JSON.parse(value) : initialValue
} catch (error) {
console.log(error)
return initialValue
}
})
// Function to set the localStorage Value
const setValue = useCallback(
(value) => {
try {
const valueToStore =
value instanceof Function ? value(sortLocalStorageValue) : value
//Save value to state
setSortLocalStorageValue(valueToStore)
//Save value to localStorage
window.localStorage.setItem(key, JSON.stringify(valueToStore))
} catch (error) {
throw new Error(`Error when attempting to useLocalState Hook: ${error}`)
}
},
[key, sortLocalStorageValue]
)
useEffect(() => {
let data = window.localStorage.getItem(key)
if (!data) {
setValue(initialValue)
}
}, [setValue, key, initialValue])
return [sortLocalStorageValue, setValue]
}
export default useLocalStorage
|
from functools import update_wrapper
from django import forms
from django.contrib import admin, messages
from django.template import loader
from django.utils.decorators import method_decorator
from django.views.decorators.debug import sensitive_post_parameters
from django.core.exceptions import PermissionDenied
from django.contrib.admin.utils import unquote
from django.http import Http404, HttpResponseRedirect, HttpResponse
from django.utils.html import escape
from django.urls import reverse
from django.utils.translation import gettext, gettext_lazy as _
from django.contrib.admin.options import IS_POPUP_VAR
from django.template.response import TemplateResponse
from django.views.generic import RedirectView
from django.conf import settings
from simple_mail.forms import AdminSendTestMailForm
from simple_mail.models import SimpleMail, SimpleMailConfig
sensitive_post_parameters_m = method_decorator(sensitive_post_parameters())
class ColorInput(forms.widgets.Input):
input_type = 'color'
def get_widgets():
widgets = {}
for field in SimpleMailConfig.COLOR_FIELDS:
widgets[field] = ColorInput
return widgets
if getattr(settings, 'SIMPLE_MAIL_USE_MODELTRANSALTION', False):
from modeltranslation.admin import TabbedTranslationAdmin
modelAdminClass = TabbedTranslationAdmin
else:
modelAdminClass = admin.ModelAdmin
class SimpleMailConfigAdminForm(forms.ModelForm):
class Meta:
model = SimpleMailConfig
widgets = get_widgets()
exclude = []
class SimpleMailConfigAdmin(modelAdminClass):
fieldsets = (
('Header', {
'fields': ('logo',)
}),
('Footer', {
'fields': ('footer_content', 'facebook_url', 'twitter_url', 'instagram_url', 'website_url',)
}),
('Colors', {
'fields': SimpleMailConfig.COLOR_FIELDS,
'classes': ('wide',)
}),
('Sizings', {
'fields': SimpleMailConfig.SIZING_FIELDS,
'classes': ('wide',)
}),
)
form = SimpleMailConfigAdminForm
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def get_urls(self):
try:
from django.urls import re_path
except ImportError:
from django.conf.urls import url as re_path
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
wrapper.model_admin = self
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = [
re_path(r'^$', wrap(RedirectView.as_view(
pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)
)), name='%s_%s_changelist' % info),
re_path(r'^history/$', wrap(self.history_view),
{'object_id': str(self.singleton_instance_id)}, name='%s_%s_history' % info),
re_path(r'^change/$', wrap(self.change_view),
{'object_id': str(self.singleton_instance_id)}, name='%s_%s_change' % info),
]
parent_urlpatterns = super().get_urls()
return urlpatterns + parent_urlpatterns
def change_view(self, request, object_id, form_url='', extra_context=None):
if object_id == str(self.singleton_instance_id):
self.model.objects.get_or_create(pk=self.singleton_instance_id)
return super(SimpleMailConfigAdmin, self).change_view(
request,
object_id,
form_url=form_url,
extra_context=extra_context,
)
def response_post_save_change(self, request, obj):
post_url = reverse('%s:app_list' % self.admin_site.name, args=(self.model._meta.app_label,))
return HttpResponseRedirect(post_url)
@property
def singleton_instance_id(self):
return getattr(self.model, 'singleton_instance_id')
admin.site.register(SimpleMailConfig, SimpleMailConfigAdmin)
class SimpleMailAdmin(modelAdminClass):
'''
Admin View for Mail
'''
list_display = ('key', 'subject',)
readonly_fields = ('key', 'created_at', 'updated_at', 'available_context',)
actions = None
send_test_mail_form = AdminSendTestMailForm
simplemail_send_test_mail_template = None
simplemail_preview_mail_template = None
def available_context(self, obj):
from simple_mail.mailer import simple_mailer
test_mail = simple_mailer._registry.get(obj.key)()
test_mail.set_test_context()
res = []
for k, v in test_mail.context.items():
res.append({
'key': k,
'value': v,
'type': type(v).__name__
})
return loader.render_to_string('admin/simple_mail/simplemail/context.html', {'context': res})
available_context.short_description = 'Available Context'
fieldsets = (
('Content', {
'fields': ('subject', 'title', 'body', 'banner', 'button_label', 'button_link'),
}),
('Context', {
'fields': ('available_context',),
'classes': ('collapse',),
}),
('Metadata', {
'fields': ('key', 'created_at', 'updated_at',),
'classes': ('collapse',),
}),
)
def has_add_permission(self, *args, **kwargs):
return False
def has_delete_permission(self, *args, **kwargs):
return False
def preview_mail(self, request, id):
if not self.has_change_permission(request):
raise PermissionDenied
mail = self.get_object(request, unquote(id))
if mail is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': self.model._meta.verbose_name,
'key': escape(id),
})
from simple_mail.mailer import simple_mailer
test_mail = simple_mailer._registry.get(mail.key)()
test_mail.set_test_context()
html = test_mail.render().get('html_message')
return HttpResponse(html)
@sensitive_post_parameters_m
def send_test_mail(self, request, id, form_url=''):
if not self.has_change_permission(request):
raise PermissionDenied
mail = self.get_object(request, unquote(id))
if mail is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': self.model._meta.verbose_name,
'key': escape(id),
})
if request.method == 'POST':
form = self.send_test_mail_form(request.POST)
if form.is_valid():
from simple_mail.mailer import simple_mailer
email = form.cleaned_data.get('email')
test_mail = simple_mailer._registry.get(mail.key)()
test_mail.set_test_context()
test_mail.send_test_mail([email])
msg = gettext('Test mail successfully sent.')
messages.success(request, msg)
return HttpResponseRedirect(
reverse(
'%s:%s_%s_change' % (
self.admin_site.name,
mail._meta.app_label,
mail._meta.model_name,
),
args=(mail.pk,),
)
)
else:
form = self.send_test_mail_form()
fieldsets = [(None, {'fields': list(form.base_fields)})]
adminForm = admin.helpers.AdminForm(form, fieldsets, {})
context = dict(
self.admin_site.each_context(request),
title= _('Send test mail: %s') % escape(mail.key),
adminForm= adminForm,
form_url= form_url,
form= form,
is_popup= (IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
add= True,
change= False,
has_delete_permission= False,
has_change_permission= True,
has_absolute_url= False,
opts= self.model._meta,
original= mail,
save_as= False,
show_save= True,
)
request.current_app = self.admin_site.name
return TemplateResponse(
request,
self.simplemail_send_test_mail_template or
'admin/simple_mail/simplemail/send_test_mail.html',
context,
)
def get_urls(self):
try:
from django.urls import re_path
except ImportError:
from django.conf.urls import url as re_path
parent_urlpatterns = super().get_urls()
return [
re_path(
r'^(?P<id>\d+)/send-test-mail/$',
self.admin_site.admin_view(self.send_test_mail),
name='send_test_mail',
),
re_path(
r'^(?P<id>\d+)/preview-mail/$',
self.admin_site.admin_view(self.preview_mail),
name='preview_mail',
),
] + parent_urlpatterns
admin.site.register(SimpleMail, SimpleMailAdmin)
|
const IF = async (d) => {
const interpreter = require("../../interpreter.js");
let code = d.code;
let message = d.message;
let client = d.client;
let args = d.args;
if (code.toLowerCase().includes("$if[")) {
for (let statement of code
.split(/\$if\[/gi)
.slice(1)
.reverse()) {
const r = code.toLowerCase().split("$if[").length - 1;
if (!code.toLowerCase().includes("$endif"))
return message.channel.send(`\`$if: Invalid Usage: missing $endif\``);
const everything = code.split(/\$if\[/gi)[r].split(/\$endif/gi)[0];
statement = code.split(/\$if\[/gi)[r].split(/\$endif/gi)[0];
let condition = statement.split("\n")[0].trim();
condition = condition.slice(0, condition.length - 1);
const pass =
(
await interpreter(
client,
message,
args,
{
code: `$checkCondition[${condition}]`,
name: "check",
},
undefined,
true,
)
)?.code === "true";
const elseIfAction = statement.toLowerCase().includes("$elseif");
const elseIfs = {};
if (elseIfAction) {
for (const data of statement.split(/\$elseif\[/gi).slice(1)) {
if (!data.toLowerCase().includes("$endelseif"))
return message.channel.send(
`\`$elseIf: Invalid Usage: missing $endelseIf!\``,
);
const inside = data.split(/\$endelseIf/gi)[0];
let CONDITION = inside.split("\n")[0].trim();
CONDITION = CONDITION.slice(0, CONDITION.length - 1);
const CODE = inside.split("\n").slice(1).join("\n");
elseIfs[CONDITION] = CODE;
function escapeRegExp(string) {
return string.replace(/[.*+?^${}()|[\]\\\n]/g, "\\$&");
}
statement = statement.replace(
new RegExp(`\\$elseif\\[${escapeRegExp(inside)}\\$endelseif`, "mi"),
"",
);
}
}
const elseAction = statement.toLowerCase().includes("$else");
const ifCode = elseAction
? statement
.split("\n")
.slice(1)
.join("\n")
.split(/\$else/gi)[0]
: statement
.split("\n")
.slice(1)
.join("\n")
.split(/\$endif/gi)[0];
const elseCode = elseAction
? statement.split(/\$else/gi)[1].split(/\$endif/gi)[0]
: "";
let passes = false;
let lastCode;
if (elseIfAction) {
for (const data of Object.entries(elseIfs)) {
if (!passes) {
const response =
(
await interpreter(
client,
message,
args,
{
code: `$checkCondition[${data[0]}]`,
name: "check",
},
undefined,
true,
)
).code === "true";
if (response) {
passes = true;
lastCode = data[1];
}
}
}
}
code = code.replace(/\$if\[/gi, "$if[").replace(/\$endif/gi, "$endif");
code = code.replaceLast(
`$if[${everything}$endif`,
pass ? ifCode : passes ? lastCode : elseCode,
);
}
}
return {code};
};
module.exports = IF;
|
import React from 'react';
import {Animated, View, FlatList, Text, TouchableOpacity, I18nManager} from "react-native";
const KeyboardView = ({keyboardOnPress, pinLength, onComplete, bgColor, returnType, textColor, animatedDeleteButton, deleteText, animatedDeleteButtonOnPress, styles}) => {
let data;
if(I18nManager.isRTL) {
data = ["1", "2", "3", "4", "5", "6", "7", "8", "9", deleteText, "0", null].reverse();
} else {
data = ["1", "2", "3", "4", "5", "6", "7", "8", "9", deleteText, "0"];
}
const renderItem = ({item, index}) => {
let style;
let onPressInactive;
if(item === deleteText) {
onPressInactive = animatedDeleteButtonOnPress;
style = [styles[0], {
opacity: animatedDeleteButton
}]
} else {
onPressInactive = false;
style = [styles[0]]
}
return (
<TouchableOpacity
key={"key-item-" + index}
activeOpacity={0.9}
onPress={() => keyboardOnPress(item, returnType, pinLength, onComplete)}
disabled={onPressInactive}>
<Animated.View style={[style, {
backgroundColor: bgColor,
}]}>
<Text style={[styles[1], {
color : textColor,
opacity: 1,
}]}>{item}</Text>
</Animated.View>
</TouchableOpacity>
)
};
return (
<FlatList
contentContainerStyle={{
flexDirection: I18nManager.isRTL ? 'column-reverse' : 'column',
alignItems : I18nManager.isRTL ? 'flex-end' : 'flex-start',
}}
scrollEnabled={false}
horizontal={false}
vertical={true}
numColumns={3}
renderItem={renderItem}
data={data}
keyExtractor={(val, index) => "pinViewItem-" + index}
/>
)
};
export default KeyboardView |
/**
* Provides drop down menus for list of action links.
*
* @module moodle-course-management
*/
/**
* Management JS console.
*
* Provides the organisation for course and category management JS.
*
* @namespace M.course.management
* @class Console
* @constructor
* @extends Base
*/
Console = function() {
Console.superclass.constructor.apply(this, arguments);
};
Console.NAME = 'moodle-course-management';
Console.CSS_PREFIX = 'management';
Console.ATTRS = {
/**
* The HTML element containing the management interface.
* @attribute element
* @type Node
*/
element: {
setter: function(node) {
if (typeof node === 'string') {
node = Y.one('#' + node);
}
return node;
}
},
/**
* The category listing container node.
* @attribute categorylisting
* @type Node
* @default null
*/
categorylisting: {
value: null
},
/**
* The course listing container node.
* @attribute courselisting
* @type Node
* @default null
*/
courselisting: {
value: null
},
/**
* The course details container node.
* @attribute coursedetails
* @type Node|null
* @default null
*/
coursedetails: {
value: null
},
/**
* The id of the currently active category.
* @attribute activecategoryid
* @type Number
* @default null
*/
activecategoryid: {
value: null
},
/**
* The id of the currently active course.
* @attribute activecourseid
* @type Number
* @default Null
*/
activecourseid: {
value: null
},
/**
* The categories that are currently available through the management interface.
* @attribute categories
* @type Array
* @default []
*/
categories: {
setter: function(item, name) {
if (Y.Lang.isArray(item)) {
return item;
}
var items = this.get(name);
items.push(item);
return items;
},
value: []
},
/**
* The courses that are currently available through the management interface.
* @attribute courses
* @type Course[]
* @default Array
*/
courses: {
validator: function(val) {
return Y.Lang.isArray(val);
},
value: []
},
/**
* The currently displayed page of courses.
* @attribute page
* @type Number
* @default null
*/
page: {
getter: function(value, name) {
if (value === null) {
value = this.get('element').getData(name);
this.set(name, value);
}
return value;
},
value: null
},
/**
* The total pages of courses that can be shown for this category.
* @attribute totalpages
* @type Number
* @default null
*/
totalpages: {
getter: function(value, name) {
if (value === null) {
value = this.get('element').getData(name);
this.set(name, value);
}
return value;
},
value: null
},
/**
* The total number of courses belonging to this category.
* @attribute totalcourses
* @type Number
* @default null
*/
totalcourses: {
getter: function(value, name) {
if (value === null) {
value = this.get('element').getData(name);
this.set(name, value);
}
return value;
},
value: null
},
/**
* The URL to use for AJAX actions/requests.
* @attribute ajaxurl
* @type String
* @default /course/ajax/management.php
*/
ajaxurl: {
getter: function(value) {
if (value === null) {
value = M.cfg.wwwroot + '/course/ajax/management.php';
}
return value;
},
value: null
},
/**
* The drag drop handler
* @attribute dragdrop
* @type DragDrop
* @default null
*/
dragdrop: {
value: null
}
};
Console.prototype = {
/**
* Gets set to true once the first categories have been initialised.
* @property categoriesinit
* @private
* @type {boolean}
*/
categoriesinit: false,
/**
* Initialises a new instance of the Console.
* @method initializer
*/
initializer: function() {
Y.log('Initialising course category management console', 'info', 'moodle-course-management');
this.set('element', 'coursecat-management');
var element = this.get('element'),
categorylisting = element.one('#category-listing'),
courselisting = element.one('#course-listing'),
selectedcategory = null,
selectedcourse = null;
if (categorylisting) {
selectedcategory = categorylisting.one('.listitem[data-selected="1"]');
}
if (courselisting) {
selectedcourse = courselisting.one('.listitem[data-selected="1"]');
}
this.set('categorylisting', categorylisting);
this.set('courselisting', courselisting);
this.set('coursedetails', element.one('#course-detail'));
if (selectedcategory) {
this.set('activecategoryid', selectedcategory.getData('id'));
}
if (selectedcourse) {
this.set('activecourseid', selectedcourse.getData('id'));
}
this.initialiseCategories(categorylisting);
this.initialiseCourses();
if (courselisting) {
// No need for dragdrop if we don't have a course listing.
this.set('dragdrop', new DragDrop({console: this}));
}
},
/**
* Initialises all the categories being shown.
* @method initialiseCategories
* @private
* @return {boolean}
*/
initialiseCategories: function(listing) {
var count = 0;
if (!listing) {
return false;
}
// Disable category bulk actions as nothing will be selected on initialise.
var menumovecatto = listing.one('#menumovecategoriesto');
if (menumovecatto) {
menumovecatto.setAttribute('disabled', true);
}
var menuresortcategoriesby = listing.one('#menuresortcategoriesby');
if (menuresortcategoriesby) {
menuresortcategoriesby.setAttribute('disabled', true);
}
var menuresortcoursesby = listing.one('#menuresortcoursesby');
if (menuresortcoursesby) {
menuresortcoursesby.setAttribute('disabled', true);
}
listing.all('.listitem[data-id]').each(function(node) {
this.set('categories', new Category({
node: node,
console: this
}));
count++;
}, this);
if (!this.categoriesinit) {
this.get('categorylisting').delegate('click', this.handleCategoryDelegation, 'a[data-action]', this);
this.get('categorylisting').delegate('click', this.handleCategoryDelegation, 'input[name="bcat[]"]', this);
this.get('categorylisting').delegate('change', this.handleBulkSortByaction, '#menuselectsortby', this);
this.categoriesinit = true;
Y.log(count + ' categories being managed', 'info', 'moodle-course-management');
} else {
Y.log(count + ' new categories being managed', 'info', 'moodle-course-management');
}
},
/**
* Initialises all the categories being shown.
* @method initialiseCourses
* @private
* @return {boolean}
*/
initialiseCourses: function() {
var category = this.getCategoryById(this.get('activecategoryid')),
listing = this.get('courselisting'),
count = 0;
if (!listing) {
return false;
}
// Disable course move to bulk action as nothing will be selected on initialise.
var menumovecoursesto = listing.one('#menumovecoursesto');
if (menumovecoursesto) {
menumovecoursesto.setAttribute('disabled', true);
}
listing.all('.listitem[data-id]').each(function(node) {
this.registerCourse(new Course({
node: node,
console: this,
category: category
}));
count++;
}, this);
listing.delegate('click', this.handleCourseDelegation, 'a[data-action]', this);
listing.delegate('click', this.handleCourseDelegation, 'input[name="bc[]"]', this);
Y.log(count + ' courses being managed', 'info', 'moodle-course-management');
},
/**
* Registers a course within the management display.
* @method registerCourse
* @param {Course} course
*/
registerCourse: function(course) {
var courses = this.get('courses');
courses.push(course);
this.set('courses', courses);
},
/**
* Handles the event fired by a delegated course listener.
*
* @method handleCourseDelegation
* @protected
* @param {EventFacade} e
*/
handleCourseDelegation: function(e) {
var target = e.currentTarget,
action = target.getData('action'),
courseid = target.ancestor('.listitem').getData('id'),
course = this.getCourseById(courseid);
if (course) {
course.handle(action, e);
} else {
Y.log('Course with ID ' + courseid + ' could not be found for delegation', 'error', 'moodle-course-management');
}
},
/**
* Handles the event fired by a delegated course listener.
*
* @method handleCategoryDelegation
* @protected
* @param {EventFacade} e
*/
handleCategoryDelegation: function(e) {
var target = e.currentTarget,
action = target.getData('action'),
categoryid = target.ancestor('.listitem').getData('id'),
category = this.getCategoryById(categoryid);
if (category) {
category.handle(action, e);
} else {
Y.log('Could not find category to delegate to.', 'error', 'moodle-course-management');
}
},
/**
* Check if any course is selected.
*
* @method isCourseSelected
* @param {Node} checkboxnode Checkbox node on which action happened.
* @return bool
*/
isCourseSelected: function(checkboxnode) {
var selected = false;
// If any course selected then show move to category select box.
if (checkboxnode && checkboxnode.get('checked')) {
selected = true;
} else {
var i,
course,
courses = this.get('courses'),
length = courses.length;
for (i = 0; i < length; i++) {
if (courses.hasOwnProperty(i)) {
course = courses[i];
if (course.get('node').one('input[name="bc[]"]').get('checked')) {
selected = true;
break;
}
}
}
}
return selected;
},
/**
* Check if any category is selected.
*
* @method isCategorySelected
* @param {Node} checkboxnode Checkbox node on which action happened.
* @return bool
*/
isCategorySelected: function(checkboxnode) {
var selected = false;
// If any category selected then show move to category select box.
if (checkboxnode && checkboxnode.get('checked')) {
selected = true;
} else {
var i,
category,
categories = this.get('categories'),
length = categories.length;
for (i = 0; i < length; i++) {
if (categories.hasOwnProperty(i)) {
category = categories[i];
if (category.get('node').one('input[name="bcat[]"]').get('checked')) {
selected = true;
break;
}
}
}
}
return selected;
},
/**
* Handle bulk sort action.
*
* @method handleBulkSortByaction
* @protected
* @param {EventFacade} e
*/
handleBulkSortByaction: function(e) {
var sortcategoryby = this.get('categorylisting').one('#menuresortcategoriesby'),
sortcourseby = this.get('categorylisting').one('#menuresortcoursesby'),
sortbybutton = this.get('categorylisting').one('input[name="bulksort"]'),
sortby = e;
if (!sortby) {
sortby = this.get('categorylisting').one('#menuselectsortby');
} else {
if (e && e.currentTarget) {
sortby = e.currentTarget;
}
}
// If no sortby select found then return as we can't do anything.
if (!sortby) {
return;
}
if ((this.get('categories').length <= 1) || (!this.isCategorySelected() &&
(sortby.get("options").item(sortby.get('selectedIndex')).getAttribute('value') === 'selectedcategories'))) {
if (sortcategoryby) {
sortcategoryby.setAttribute('disabled', true);
}
if (sortcourseby) {
sortcourseby.setAttribute('disabled', true);
}
if (sortbybutton) {
sortbybutton.setAttribute('disabled', true);
}
} else {
if (sortcategoryby) {
sortcategoryby.removeAttribute('disabled');
}
if (sortcourseby) {
sortcourseby.removeAttribute('disabled');
}
if (sortbybutton) {
sortbybutton.removeAttribute('disabled');
}
}
},
/**
* Returns the category with the given ID.
* @method getCategoryById
* @param {Number} id
* @return {Category|Boolean} The category or false if it can't be found.
*/
getCategoryById: function(id) {
var i,
category,
categories = this.get('categories'),
length = categories.length;
for (i = 0; i < length; i++) {
if (categories.hasOwnProperty(i)) {
category = categories[i];
if (category.get('categoryid') === id) {
return category;
}
}
}
return false;
},
/**
* Returns the course with the given id.
* @method getCourseById
* @param {Number} id
* @return {Course|Boolean} The course or false if not found/
*/
getCourseById: function(id) {
var i,
course,
courses = this.get('courses'),
length = courses.length;
for (i = 0; i < length; i++) {
if (courses.hasOwnProperty(i)) {
course = courses[i];
if (course.get('courseid') === id) {
return course;
}
}
}
return false;
},
/**
* Removes the course with the given ID.
* @method removeCourseById
* @param {Number} id
*/
removeCourseById: function(id) {
var courses = this.get('courses'),
length = courses.length,
course,
i;
for (i = 0; i < length; i++) {
course = courses[i];
if (course.get('courseid') === id) {
courses.splice(i, 1);
break;
}
}
},
/**
* Performs an AJAX action.
*
* @method performAjaxAction
* @param {String} action The action to perform.
* @param {Object} args The arguments to pass through with teh request.
* @param {Function} callback The function to call when all is done.
* @param {Object} context The object to use as the context for the callback.
*/
performAjaxAction: function(action, args, callback, context) {
var io = new Y.IO();
args.action = action;
args.ajax = '1';
args.sesskey = M.cfg.sesskey;
if (callback === null) {
callback = function() {
Y.log("'Action '" + action + "' completed", 'debug', 'moodle-course-management');
};
}
io.send(this.get('ajaxurl'), {
method: 'POST',
on: {
complete: callback
},
context: context,
data: args,
'arguments': args
});
}
};
Y.extend(Console, Y.Base, Console.prototype);
M.course = M.course || {};
M.course.management = M.course.management || {};
M.course.management.console = null;
/**
* Initalises the course management console.
*
* @method M.course.management.init
* @static
* @param {Object} config
*/
M.course.management.init = function(config) {
M.course.management.console = new Console(config);
};
|
def create_table(table, headers=None):
if headers:
headerstring = "\t{}\t" * len(headers)
print(headerstring.format(*headers))
rowstring = "\t{}\t" * len(table[0])
for row in table:
print(rowstring.format(*row))
|
# ---
# jupyter:
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %%
cat = 42
|
/*Author: Xterminator
NPC Name: Mr. Goldstein
Map(s): Victoria Road : Lith Harbour (104000000)
Description: Extends Buddy List
var status = 0;
function start() {
status = -1;
}
/*
function action(mode, type, selection) {
if (mode == -1) {
cm.dispose();
} else {
if (status == 0 && mode == 0) {
cm.sendNext("I see... you don't have as many friends as I thought you would. Hahaha, just kidding! Anyway if you feel like changing your mind, please feel free to come back and we'll talk business. If you make a lot of friends, then you know ... hehe ...");
cm.dispose();
return;
} else if (status >= 1 && mode == 0) {
cm.sendNext("I see... I don't think you don't have as many friends as I thought you would. If not, you just don't have 250,000 mesos with you right this minute? Anyway, if you ever change your mind, come back and we'll talk business. That is, of course, once you have get some financial relief... hehe ...");
cm.dispose();
return;
}
if (mode == 1)
status++;
else
status--;
if (status == 0) {
cm.sendYesNo("I hope I can make as much as yesterday... well, hello! Don't you want to extend your buddy list? You look like someone who'd have a whole lot of friends... well, what do you think? With some money I can make it happen for you. Remember, though, it only applies to one character at a time, so it won't affect any of your other characters on your account. Do you want to extend your buddy list?");
} else if (status == 1) {
cm.sendYesNo("Alright, good call! It's not that expensive actually. #b250,000 mesos and I'll add 5 more slots to your buddy list#k. And no, I won't be selling them individually. Once you buy it, it's going to be permanently on your buddy list. So if you're one of those that needs more space there, then you might as well do it. What do you think? Will you spend 250,000 mesos for it?");
} else if (status == 2) {
var capacity = cm.getPlayer().getBuddylist().getCapacity();
if (capacity >= 50 || cm.getMeso() < 250000) {
cm.sendNext("Hey... are you sure you have #b250,000 mesos#k? If so, then check and see if you have extended your buddy list to the max. Even if you pay up, the most you can have on your buddy list is #b50#k.");
} else {
var newcapacity = capacity + 5;
cm.gainMeso(-250000);
cm.updateBuddyCapacity(newcapacity);
cm.sendOk("Alright! Your buddy list will have 5 extra slots by now. Check and see for it yourself. And if you still need more room on your buddy list, you know who to find. Of course, it isn't going to be for free ... well, so long ...");
}
cm.dispose();
}
}
}
*/ |
import React, { useMemo } from 'react';
import { View, StyleSheet } from 'react-native';
import TransactionShape, { TransactionKind } from '../../data/model-shapes/Transaction';
import { ListItem, Icon } from 'react-native-elements';
import Colors from '../../utils/styling/Colors';
const TransactionListItem = ({ transaction }) => {
const iconName = useMemo(() => {
return `ios-arrow-${transaction.kind === TransactionKind.SEND ? 'down' : 'up'}`;
}, [transaction.kind]);
return (
<View style={styles.rootContainer}>
<ListItem key={transaction.id}>
<ListItem.Content>
<ListItem.Title style={styles.leftTitleText}>{transaction.amount.toString().replace(/(\d)(?=(\d{3})+(?!\d))/g, "$1,")}</ListItem.Title>
</ListItem.Content>
<ListItem.Title style={styles.rightTitleText}>{transaction.kind}</ListItem.Title>
<Icon name={iconName} type="ionicon" color={Colors.purple} />
</ListItem>
</View>
);
};
const styles = StyleSheet.create({
leftTitleText: {
color: Colors.purple,
fontSize: 20,
fontWeight: '300',
},
rightTitleText: {
color: Colors.purple,
fontSize: 20,
fontWeight: '500',
},
});
TransactionListItem.propTypes = {
transaction: TransactionShape.isRequired,
};
TransactionListItem.defaultProps = {};
export default TransactionListItem;
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(React.createElement(React.Fragment, null, React.createElement("path", {
d: "M21.5 4c-.83 0-1.5.67-1.5 1.5v5c0 .28-.22.5-.5.5s-.5-.22-.5-.5v-8c0-.83-.67-1.5-1.5-1.5S16 1.67 16 2.5v8c0 .28-.22.5-.5.5s-.5-.22-.5-.5v-9c0-.83-.67-1.5-1.5-1.5S12 .67 12 1.5v8.99c0 .28-.22.5-.5.5s-.5-.22-.5-.5V4.5c0-.83-.67-1.5-1.5-1.5S8 3.67 8 4.5v11.41l-4.12-2.35c-.58-.33-1.3-.24-1.78.22-.6.58-.62 1.54-.03 2.13l6.78 6.89c.75.77 1.77 1.2 2.85 1.2H19c2.21 0 4-1.79 4-4V5.5c0-.83-.67-1.5-1.5-1.5z"
})), 'PanToolRounded'); |
from cryptowatch.utils import log
from cryptowatch.auth import read_config
from cryptowatch.requestor import Requestor
from cryptowatch import stream
from cryptowatch.resources.assets import Assets
from cryptowatch.resources.instruments import Instruments
from cryptowatch.resources.exchanges import Exchanges
from cryptowatch.resources.markets import Markets
# Package version
__version__ = "0.0.12"
sdk_version = __version__
# Try to read and set API endpoints from credential file
api_key, rest_endpoint, ws_endpoint = read_config()
# API default endpoints
if not rest_endpoint:
rest_endpoint = "https://api.cryptowat.ch"
if not ws_endpoint:
ws_endpoint = "wss://stream.cryptowat.ch/connect"
def is_authenticated():
return api_key is not None
# HTTP client default settings
verify_ssl = True
connect_timeout = 4 # in seconds
read_timeout = 10 # in seconds
max_retries = 10 # number of time we'll retry a failing request
_user_agent = (
"Mozilla/5.0 (compatible; Cryptowatch-Official-Python-SDK"
"/v{} +https://cryptowat.ch/)".format(sdk_version)
)
# Get an instance of the HTTP client
requestor = Requestor(rest_endpoint, _user_agent, locals())
# Serialize resources to namespace
assets = Assets(requestor)
instruments = Instruments(requestor)
exchanges = Exchanges(requestor)
markets = Markets(requestor)
|
"""[Default values]
What happens at run time...
When modules are loaded: All the code is executed immediately.
Module Code
a = 10 the interger object 10 is created and a references it.
def func(a): the function object is created, and func references it.
print(a)
func(a) the function is executed
What about default values?
Module code
def func(a=10): the function object is created, and func references it
print(a) the integer object 10 is evaluated/created and is assigned as the default value for a
func() the function is executed
by the time this happens, the default value for a has already been evaluated and assigned - it is not re-evaluated when the function is called
So what?
Consider this:
We want to create a function that will write a log entry to the console with a user-specified event date/time. If the user does not supply a date/time, we want to set it to the current date/time.
from datetime import datetime
def log(msg, *, dt=datetime.utcnow()):
print('{0}: {1}'.format(dt, msg))
log('message 1') -> 2017-08-21 20:54:37.706994 : message 1
a few minutes later
log('message 2') -> 2017-08-21 20:54:37.706994 : message 3 ## note the same time is shown above.
Solution Pattern = need to show current time it was executed
We set a default for dt to None
If dt is None, set it to current/date
""" |
from devices.boolean_sensor import BooleanSensor
class ContactSensor(BooleanSensor):
def __init__(self, alias, value_key, device_name_suffix = ''):
super().__init__(alias, value_key, BooleanSensor.SENSOR_TYPE_CONTACT, device_name_suffix)
|
const webpack = require('webpack')
const paths = require('./paths')
module.exports = {
mode: 'development',
output: {
filename: '[name].js',
path: paths.outputPath,
chunkFilename: '[name].js',
},
performance: {
hints: 'warning',
maxAssetSize: 450000,
maxEntrypointSize: 8500000,
assetFilter: assetFilename => (
assetFilename.endsWith('.css') || assetFilename.endsWith('.js')
),
},
optimization: {
splitChunks: {
cacheGroups: {
commons: {
test: /[\\/](node_modules|common|src\/schema)[\\/]/,
name: 'dependencies',
chunks: 'all',
},
},
},
},
devtool: 'cheap-module-eval-source-map',
devServer: {
proxy: {
'/api': {
target: 'http://172.16.0.205:9000',
pathRewrite: { '^/api': '' },
changeOrigin: true,
secure: false,
},
},
contentBase: paths.outputPath,
compress: true,
hot: true,
historyApiFallback: true,
port: 1234,
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
],
}
|
var myImage = document.querySelector('img')
var myRequest = new Request('flowers.jpg')
fetch(myRequest).then(function(response) {
return response.blob()
}).then(function(response) {
var objectURL = URL.createObjectURL(response)
myImage.src = objectURL
}) |
DD_ORIGIN = "_dd_origin"
AUTO_REJECT = 0
AUTO_KEEP = 1
USER_KEEP = 2
SAMPLE_RATE_METRIC_KEY = "_sample_rate"
SAMPLING_PRIORITY_KEY = "_sampling_priority_v1"
|
# -*- coding: utf-8 -*-
"""Converter for WikiPathways."""
import logging
from typing import Iterable
import bioversions
from .gmt_utils import parse_wikipathways_gmt
from ..constants import SPECIES_REMAPPING
from ..struct import Obo, Reference, Term, from_species
from ..struct.typedef import has_part
from ..utils.path import ensure_path
logger = logging.getLogger(__name__)
PREFIX = "wikipathways"
_PATHWAY_INFO = [
("Anopheles_gambiae", "7165"),
("Arabidopsis_thaliana", "3702"),
("Bos_taurus", "9913"),
("Caenorhabditis_elegans", "6239"),
("Canis_familiaris", "9615"),
("Danio_rerio", "7955"),
("Drosophila_melanogaster", "7227"),
("Equus_caballus", "9796"),
("Gallus_gallus", "9031"),
("Homo_sapiens", "9606"),
("Mus_musculus", "10090"),
("Oryza_sativa", "4530"),
("Pan_troglodytes", "9598"),
("Populus_trichocarpa", "3694"),
("Rattus_norvegicus", "10116"),
("Saccharomyces_cerevisiae", "4932"),
("Sus_scrofa", "9823"),
]
def get_obo() -> Obo:
"""Get WikiPathways as OBO."""
version = bioversions.get_version("wikipathways")
return Obo(
ontology=PREFIX,
name="WikiPathways",
data_version=version,
iter_terms=iter_terms,
iter_terms_kwargs=dict(version=version),
typedefs=[has_part, from_species],
auto_generated_by=f"bio2obo:{PREFIX}",
)
def iter_terms(version: str) -> Iterable[Term]:
"""Get WikiPathways terms."""
base_url = f"http://data.wikipathways.org/{version}/gmt/wikipathways-{version}-gmt"
for species_code, taxonomy_id in _PATHWAY_INFO:
url = f"{base_url}-{species_code}.gmt"
path = ensure_path(PREFIX, url=url, version=version)
species_code = species_code.replace("_", " ")
species_reference = Reference(
prefix="ncbitaxon",
identifier=taxonomy_id,
name=SPECIES_REMAPPING.get(species_code, species_code),
)
for identifier, _version, _revision, name, _species, genes in parse_wikipathways_gmt(path):
term = Term(reference=Reference(prefix=PREFIX, identifier=identifier, name=name))
term.append_relationship(from_species, species_reference)
for ncbigene_id in genes:
term.append_relationship(
has_part, Reference(prefix="ncbigene", identifier=ncbigene_id)
)
yield term
if __name__ == "__main__":
get_obo().write_default()
|
var _extend = require("../../core/utils/extend").extend,
inArray = require("../../core/utils/array").inArray,
_each = require("../../core/utils/iterator").each,
rangeCalculator = require("./helpers/range_data_calculator"),
typeUtils = require("../../core/utils/type"),
vizUtils = require("../core/utils"),
_noop = require("../../core/utils/common").noop,
_isDefined = typeUtils.isDefined,
_isString = typeUtils.isString,
_map = vizUtils.map,
_normalizeEnum = vizUtils.normalizeEnum,
math = Math,
_abs = math.abs,
_sqrt = math.sqrt,
_max = math.max,
DEFAULT_TRACKER_WIDTH = 12,
DEFAULT_DURATION = 400,
HIGH_ERROR = "highError",
LOW_ERROR = "lowError",
VARIANCE = "variance",
STANDARD_DEVIATION = "stddeviation",
STANDARD_ERROR = "stderror",
PERCENT = "percent",
FIXED = "fixed",
UNDEFINED = "undefined",
DISCRETE = "discrete",
LOGARITHMIC = "logarithmic",
DATETIME = "datetime";
exports.chart = {};
exports.polar = {};
function sum(array) {
var result = 0;
_each(array, function(_, value) {
result += value;
});
return result;
}
function isErrorBarTypeCorrect(type) {
// TODO why UNDEFINED is here
// return inArray(type, [FIXED, PERCENT, VARIANCE, STANDARD_DEVIATION, STANDARD_ERROR, UNDEFINED]) !== -1;
return inArray(type, [FIXED, PERCENT, VARIANCE, STANDARD_DEVIATION, STANDARD_ERROR]) !== -1;
}
function variance(array, expectedValue) {
return sum(_map(array, function(value) {
return (value - expectedValue) * (value - expectedValue);
})) / array.length;
}
function calculateAvgErrorBars(result, data, series) {
var errorBarsOptions = series.getOptions().valueErrorBar,
valueField = series.getValueFields()[0],
lowValueField = errorBarsOptions.lowValueField || LOW_ERROR,
highValueField = errorBarsOptions.highValueField || HIGH_ERROR;
if(series.areErrorBarsVisible() && errorBarsOptions.type === undefined) {
var fusionData = data.reduce(function(result, item) {
if(_isDefined(item[lowValueField])) {
result[0] += item[valueField] - item[lowValueField];
result[1]++;
}
if(_isDefined(item[highValueField])) {
result[2] += item[highValueField] - item[valueField];
result[3]++;
}
return result;
}, [0, 0, 0, 0]);
if(fusionData[1]) {
result[lowValueField] = result[valueField] - fusionData[0] / fusionData[1];
}
if(fusionData[2]) {
result[highValueField] = result[valueField] + fusionData[2] / fusionData[3];
}
}
return result;
}
function calculateSumErrorBars(result, data, series) {
var errorBarsOptions = series.getOptions().valueErrorBar,
lowValueField = errorBarsOptions.lowValueField || LOW_ERROR,
highValueField = errorBarsOptions.highValueField || HIGH_ERROR;
if(series.areErrorBarsVisible() && errorBarsOptions.type === undefined) {
result[lowValueField] = 0;
result[highValueField] = 0;
result = data.reduce(function(result, item) {
result[lowValueField] += item[lowValueField];
result[highValueField] += item[highValueField];
return result;
}, result);
}
return result;
}
function getMinMaxAggregator(compare) {
return ({ intervalStart, data }, series) => {
const valueField = series.getValueFields()[0];
let targetData = data[0];
targetData = data.reduce((result, item) => {
const value = item[valueField];
if(result[valueField] === null) {
result = item;
}
if(value !== null && compare(value, result[valueField])) {
return item;
}
return result;
}, targetData);
return _extend({}, targetData, {
[series.getArgumentField()]: intervalStart
});
};
}
function checkFields(data, fieldsToCheck, skippedFields) {
let allFieldsIsValid = true;
for(let field in fieldsToCheck) {
const isArgument = field === "argument";
if((isArgument || field === "size") ? !_isDefined(data[field]) : data[field] === undefined) {
const selector = fieldsToCheck[field];
if(!isArgument) {
skippedFields[selector] = (skippedFields[selector] || 0) + 1;
}
allFieldsIsValid = false;
}
}
return allFieldsIsValid;
}
var baseScatterMethods = {
_defaultDuration: DEFAULT_DURATION,
_defaultTrackerWidth: DEFAULT_TRACKER_WIDTH,
_applyStyle: _noop,
_updateOptions: _noop,
_parseStyle: _noop,
_prepareSegment: _noop,
_drawSegment: _noop,
_appendInGroup: function() {
this._group.append(this._extGroups.seriesGroup);
},
_createLegendState: function(styleOptions, defaultColor) {
return {
fill: styleOptions.color || defaultColor,
hatching: styleOptions.hatching ? _extend({}, styleOptions.hatching, { direction: "right" }) : undefined
};
},
_applyElementsClipRect: function(settings) {
settings["clip-path"] = this._paneClipRectID;
},
_applyMarkerClipRect: function(settings) {
settings["clip-path"] = this._forceClipping ? this._paneClipRectID : null;
},
_createGroup: function(groupName, parent, target, settings) {
var group = parent[groupName] = parent[groupName] || this._renderer.g();
target && group.append(target);
settings && group.attr(settings);
},
_applyClearingSettings: function(settings) {
settings.opacity = null;
settings.scale = null;
if(this._options.rotated) {
settings.translateX = null;
} else {
settings.translateY = null;
}
},
_createGroups: function() {
var that = this;
that._createGroup("_markersGroup", that, that._group);
that._createGroup("_labelsGroup", that);
},
_setMarkerGroupSettings: function() {
var that = this,
settings = that._createPointStyles(that._getMarkerGroupOptions()).normal;
settings["class"] = "dxc-markers";
settings.opacity = 1; // T172577
that._applyMarkerClipRect(settings);
that._markersGroup.attr(settings);
},
getVisibleArea: function() {
return this._visibleArea;
},
areErrorBarsVisible: function() {
var errorBarOptions = this._options.valueErrorBar;
return errorBarOptions && this._errorBarsEnabled() && errorBarOptions.displayMode !== "none" && (isErrorBarTypeCorrect(_normalizeEnum(errorBarOptions.type)) || (_isDefined(errorBarOptions.lowValueField) || _isDefined(errorBarOptions.highValueField)));
},
groupPointsByCoords(rotated) {
const cat = [];
_each(this.getVisiblePoints(), function(_, p) {
var pointCoord = parseInt(rotated ? p.vy : p.vx);
if(!cat[pointCoord]) {
cat[pointCoord] = p;
} else {
Array.isArray(cat[pointCoord]) ? cat[pointCoord].push(p) : (cat[pointCoord] = [cat[pointCoord], p]);
}
});
return cat;
},
_createErrorBarGroup: function(animationEnabled) {
var that = this,
errorBarOptions = that._options.valueErrorBar,
settings;
if(that.areErrorBarsVisible()) {
settings = {
"class": "dxc-error-bars",
stroke: errorBarOptions.color,
'stroke-width': errorBarOptions.lineWidth,
opacity: animationEnabled ? 0.001 : errorBarOptions.opacity || 1,
"stroke-linecap": "square",
sharp: true,
"clip-path": that._forceClipping ? that._paneClipRectID : that._widePaneClipRectID
};
that._createGroup("_errorBarGroup", that, that._group, settings);
}
},
_setGroupsSettings: function(animationEnabled) {
var that = this;
that._setMarkerGroupSettings();
that._setLabelGroupSettings(animationEnabled);
that._createErrorBarGroup(animationEnabled);
},
_getCreatingPointOptions: function() {
var that = this,
defaultPointOptions,
creatingPointOptions = that._predefinedPointOptions,
normalStyle;
if(!creatingPointOptions) {
defaultPointOptions = that._getPointOptions();
that._predefinedPointOptions = creatingPointOptions = _extend(true, { styles: {} }, defaultPointOptions);
normalStyle = defaultPointOptions.styles && defaultPointOptions.styles.normal || {};
creatingPointOptions.styles = creatingPointOptions.styles || {};
creatingPointOptions.styles.normal = {
"stroke-width": normalStyle["stroke-width"],
r: normalStyle.r,
opacity: normalStyle.opacity
};
}
return creatingPointOptions;
},
_getPointOptions: function() {
return this._parsePointOptions(this._preparePointOptions(), this._options.label);
},
_getOptionsForPoint: function() {
return this._options.point;
},
_parsePointStyle: function(style, defaultColor, defaultBorderColor, defaultSize) {
var border = style.border || {},
sizeValue = style.size !== undefined ? style.size : defaultSize;
return {
fill: style.color || defaultColor,
stroke: border.color || defaultBorderColor,
"stroke-width": border.visible ? border.width : 0,
r: sizeValue / 2 + (border.visible && sizeValue !== 0 ? ~~(border.width / 2) || 0 : 0)
};
},
_createPointStyles: function(pointOptions) {
var that = this,
mainPointColor = pointOptions.color || that._options.mainSeriesColor,
containerColor = that._options.containerBackgroundColor,
normalStyle = that._parsePointStyle(pointOptions, mainPointColor, mainPointColor);
normalStyle.visibility = pointOptions.visible ? "visible" : "hidden";
return {
normal: normalStyle,
hover: that._parsePointStyle(pointOptions.hoverStyle, containerColor, mainPointColor, pointOptions.size),
selection: that._parsePointStyle(pointOptions.selectionStyle, containerColor, mainPointColor, pointOptions.size)
};
},
_checkData: function(data, skippedFields, fieldsToCheck) {
fieldsToCheck = fieldsToCheck || { value: this.getValueFields()[0] };
fieldsToCheck.argument = this.getArgumentField();
return checkFields(data, fieldsToCheck, skippedFields || {}) && data.value === data.value;
},
getErrorBarRangeCorrector: function() {
var mode,
func;
if(this.areErrorBarsVisible()) {
mode = _normalizeEnum(this._options.valueErrorBar.displayMode);
func = function(point) {
var lowError = point.lowError,
highError = point.highError;
switch(mode) {
case "low": return [lowError];
case "high": return [highError];
case "none": return [];
default: return [lowError, highError];
}
};
}
return func;
},
getValueRangeInitialValue: function() {
return undefined;
},
_getRangeData: function() {
return rangeCalculator.getRangeData(this);
},
_getPointDataSelector: function() {
const valueField = this.getValueFields()[0];
const argumentField = this.getArgumentField();
const tagField = this.getTagField();
const areErrorBarsVisible = this.areErrorBarsVisible();
let lowValueField, highValueField;
if(areErrorBarsVisible) {
const errorBarOptions = this._options.valueErrorBar;
lowValueField = errorBarOptions.lowValueField || LOW_ERROR;
highValueField = errorBarOptions.highValueField || HIGH_ERROR;
}
return (data) => {
const pointData = {
value: data[valueField],
argument: data[argumentField],
tag: data[tagField],
data: data
};
if(areErrorBarsVisible) {
pointData.lowError = data[lowValueField];
pointData.highError = data[highValueField];
}
return pointData;
};
},
_errorBarsEnabled: function() {
return (this.valueAxisType !== DISCRETE && this.valueAxisType !== LOGARITHMIC && this.valueType !== DATETIME);
},
_drawPoint: function(options) {
var point = options.point;
if(point.isInVisibleArea()) {
point.clearVisibility();
point.draw(this._renderer, options.groups, options.hasAnimation, options.firstDrawing);
this._drawnPoints.push(point);
} else {
point.setInvisibility();
}
},
_animateComplete: function() {
var that = this,
animationSettings = { duration: that._defaultDuration };
that._labelsGroup && that._labelsGroup.animate({ opacity: 1 }, animationSettings);
that._errorBarGroup && that._errorBarGroup.animate({ opacity: (that._options.valueErrorBar).opacity || 1 }, animationSettings);
},
_animate: function() {
var that = this,
lastPointIndex = that._drawnPoints.length - 1;
_each(that._drawnPoints || [], function(i, p) {
p.animate(i === lastPointIndex ? function() { that._animateComplete(); } : undefined, { translateX: p.x, translateY: p.y });
});
},
_defaultAggregator: "avg",
_aggregators: {
avg({ data, intervalStart }, series) {
if(!data.length) {
return;
}
const valueField = series.getValueFields()[0];
const aggregationResult = data.reduce((result, item) => {
const value = item[valueField];
if(_isDefined(value)) {
result[0] += value;
result[1]++;
} else if(value === null) {
result[2]++;
}
return result;
}, [0, 0, 0]);
return calculateAvgErrorBars({
[valueField]: aggregationResult[2] === data.length ? null : aggregationResult[0] / aggregationResult[1],
[series.getArgumentField()]: intervalStart
}, data, series);
},
sum({ intervalStart, data }, series) {
if(!data.length) {
return;
}
const valueField = series.getValueFields()[0];
const aggregationResult = data.reduce((result, item) => {
const value = item[valueField];
if(value !== undefined) {
result[0] += value;
}
if(value === null) {
result[1]++;
} else if(value === undefined) {
result[2]++;
}
return result;
}, [0, 0, 0]);
let value = aggregationResult[0];
if(aggregationResult[1] === data.length) {
value = null;
}
if(aggregationResult[2] === data.length) {
return;
}
return calculateSumErrorBars({
[valueField]: value,
[series.getArgumentField()]: intervalStart
}, data, series);
},
count({ data, intervalStart }, series) {
const valueField = series.getValueFields()[0];
return {
[series.getArgumentField()]: intervalStart,
[valueField]: data.filter(i => i[valueField] !== undefined).length
};
},
min: getMinMaxAggregator((a, b) => a < b),
max: getMinMaxAggregator((a, b) => a > b)
},
_endUpdateData: function() {
delete this._predefinedPointOptions;
},
getArgumentField: function() {
return this._options.argumentField || "arg";
},
getValueFields: function() {
var options = this._options,
errorBarsOptions = options.valueErrorBar,
valueFields = [options.valueField || "val"],
lowValueField,
highValueField;
if(errorBarsOptions) {
lowValueField = errorBarsOptions.lowValueField;
highValueField = errorBarsOptions.highValueField;
_isString(lowValueField) && valueFields.push(lowValueField);
_isString(highValueField) && valueFields.push(highValueField);
}
return valueFields;
},
_calculateErrorBars: function(data) {
if(!this.areErrorBarsVisible()) {
return;
}
var that = this,
options = that._options,
errorBarsOptions = options.valueErrorBar,
errorBarType = _normalizeEnum(errorBarsOptions.type),
floatErrorValue = parseFloat(errorBarsOptions.value),
valueField = that.getValueFields()[0],
value,
lowValueField = errorBarsOptions.lowValueField || LOW_ERROR,
highValueField = errorBarsOptions.highValueField || HIGH_ERROR,
valueArray,
valueArrayLength,
meanValue,
processDataItem,
addSubError = function(_i, item) {
value = item.value;
item.lowError = value - floatErrorValue;
item.highError = value + floatErrorValue;
};
switch(errorBarType) {
case FIXED:
processDataItem = addSubError;
break;
case PERCENT:
processDataItem = function(_, item) {
value = item.value;
var error = value * floatErrorValue / 100;
item.lowError = value - error;
item.highError = value + error;
};
break;
case UNDEFINED: // TODO: rework this
processDataItem = function(_, item) {
item.lowError = item.data[lowValueField];
item.highError = item.data[highValueField];
};
break;
default:
valueArray = _map(data, function(item) { return _isDefined(item.data[valueField]) ? item.data[valueField] : null; });
valueArrayLength = valueArray.length;
floatErrorValue = floatErrorValue || 1;
switch(errorBarType) {
case VARIANCE:
floatErrorValue = variance(valueArray, sum(valueArray) / valueArrayLength) * floatErrorValue;
processDataItem = addSubError;
break;
case STANDARD_DEVIATION:
meanValue = sum(valueArray) / valueArrayLength;
floatErrorValue = _sqrt(variance(valueArray, meanValue)) * floatErrorValue;
processDataItem = function(_, item) {
item.lowError = meanValue - floatErrorValue;
item.highError = meanValue + floatErrorValue;
};
break;
case STANDARD_ERROR:
floatErrorValue = _sqrt(variance(valueArray, sum(valueArray) / valueArrayLength) / valueArrayLength) * floatErrorValue;
processDataItem = addSubError;
break;
}
}
processDataItem && _each(data, processDataItem);
},
_patchMarginOptions: function(options) {
var pointOptions = this._getCreatingPointOptions(),
styles = pointOptions.styles,
maxSize = [styles.normal, styles.hover, styles.selection]
.reduce(function(max, style) {
return _max(max, style.r * 2 + style["stroke-width"]);
}, 0);
options.size = pointOptions.visible ? maxSize : 0;
options.sizePointNormalState = pointOptions.visible ? styles.normal.r * 2 + styles.normal["stroke-width"] : 2;
return options;
},
usePointsToDefineAutoHiding() {
return true;
}
};
exports.chart = _extend({}, baseScatterMethods, {
drawTrackers: function() {
var that = this,
trackers,
trackersGroup,
segments = that._segments || [],
rotated = that._options.rotated;
if(!that.isVisible()) {
return;
}
if(segments.length) {
trackers = that._trackers = that._trackers || [];
trackersGroup = that._trackersGroup = (that._trackersGroup || that._renderer.g().attr({
fill: "gray",
opacity: 0.001,
stroke: "gray",
"class": "dxc-trackers"
})).attr({ "clip-path": this._paneClipRectID || null }).append(that._group);
_each(segments, function(i, segment) {
if(!trackers[i]) {
trackers[i] = that._drawTrackerElement(segment).data({ "chart-data-series": that }).append(trackersGroup);
} else {
that._updateTrackerElement(segment, trackers[i]);
}
});
}
that._trackersTranslator = that.groupPointsByCoords(rotated);
},
checkAxisVisibleAreaCoord(isArgument, coord) {
const axis = isArgument ? this.getArgumentAxis() : this.getValueAxis();
const visibleArea = axis.getVisibleArea();
return _isDefined(coord) && visibleArea[0] <= coord && visibleArea[1] >= coord;
},
checkSeriesViewportCoord(axis, coord) {
return true;
},
getShapePairCoord(coord, isArgument, getPointClearance) {
let oppositeCoord = null;
const isOpposite = !isArgument && !this._options.rotated || isArgument && this._options.rotated;
const coordName = !isOpposite ? "vx" : "vy";
const oppositeCoordName = !isOpposite ? "vy" : "vx";
const points = this.getVisiblePoints();
for(let i = 0; i < points.length; i++) {
const p = points[i];
const tmpCoord = Math.abs(p[coordName] - coord) <= getPointClearance(p) ? p[oppositeCoordName] : undefined;
if(this.checkAxisVisibleAreaCoord(!isArgument, tmpCoord)) {
oppositeCoord = tmpCoord;
break;
}
}
return oppositeCoord;
},
getSeriesPairCoord(coord, isArgument) {
return this.getShapePairCoord(coord, isArgument, () => {
return this._options.point.size / 2;
});
},
getNearestPointsByCoord(coord, isArgument) {
const that = this;
const rotated = that.getOptions().rotated;
const isOpposite = !isArgument && !rotated || isArgument && rotated;
const coordName = isOpposite ? "vy" : "vx";
const points = that.getVisiblePoints();
const allPoints = that.getPoints();
const nearestPoints = [];
if(that.isVisible() && allPoints.length > 0) {
if(allPoints.length > 1) {
that.findNeighborPointsByCoord(coord, coordName, points.slice(0), allPoints, (point, nextPoint) => {
nearestPoints.push([point, nextPoint]);
});
} else {
if(allPoints[0][coordName] === coord) {
nearestPoints.push([allPoints[0], allPoints[0]]);
}
}
}
return nearestPoints;
},
findNeighborPointsByCoord(coord, coordName, points, allPoints, pushNeighborPoints) {
let searchPoints = allPoints;
if(points.length > 0) {
points.splice(0, 0, allPoints[allPoints.indexOf(points[0]) - 1]);
points.splice(points.length, 0, allPoints[allPoints.indexOf(points[points.length - 1]) + 1]);
searchPoints = points;
}
searchPoints.forEach((p, i) => {
const np = searchPoints[i + 1];
if(p && np && (p[coordName] <= coord && np[coordName] >= coord || p[coordName] >= coord && np[coordName] <= coord)) {
pushNeighborPoints(p, np);
}
});
},
getNeighborPoint: function(x, y) {
var pCoord = this._options.rotated ? y : x,
nCoord = pCoord,
cat = this._trackersTranslator,
point = null,
minDistance,
oppositeCoord = this._options.rotated ? x : y,
oppositeCoordName = this._options.rotated ? "vx" : "vy";
if(this.isVisible() && cat) {
point = cat[pCoord];
do {
point = cat[nCoord] || cat[pCoord];
pCoord--;
nCoord++;
} while((pCoord >= 0 || nCoord < cat.length) && !point);
if(Array.isArray(point)) {
minDistance = _abs(point[0][oppositeCoordName] - oppositeCoord);
_each(point, function(i, p) {
var distance = _abs(p[oppositeCoordName] - oppositeCoord);
if(minDistance >= distance) {
minDistance = distance;
point = p;
}
});
}
}
return point;
},
_applyVisibleArea: function() {
var that = this,
rotated = that._options.rotated,
visibleX = (rotated ? that.getValueAxis() : that.getArgumentAxis()).getVisibleArea(),
visibleY = (rotated ? that.getArgumentAxis() : that.getValueAxis()).getVisibleArea();
that._visibleArea = {
minX: visibleX[0],
maxX: visibleX[1],
minY: visibleY[0],
maxY: visibleY[1]
};
}
});
exports.polar = _extend({}, baseScatterMethods, {
drawTrackers: function() {
exports.chart.drawTrackers.call(this);
var cat = this._trackersTranslator,
index;
if(!this.isVisible()) {
return;
}
_each(cat, function(i, category) {
if(category) {
index = i;
return false;
}
});
cat[index + 360] = cat[index];
},
getNeighborPoint: function(x, y) {
var pos = vizUtils.convertXYToPolar(this.getValueAxis().getCenter(), x, y);
return exports.chart.getNeighborPoint.call(this, pos.phi, pos.r);
},
_applyVisibleArea: function() {
var that = this,
canvas = that.getValueAxis().getCanvas();
that._visibleArea = {
minX: canvas.left,
maxX: canvas.width - canvas.right,
minY: canvas.top,
maxY: canvas.height - canvas.bottom
};
}
});
|
var validation__mode_8h =
[
[ "EnableBitfieldOperators< ValidationMode >", "structktt_1_1_enable_bitfield_operators_3_01_validation_mode_01_4.html", "structktt_1_1_enable_bitfield_operators_3_01_validation_mode_01_4" ],
[ "ValidationMode", "validation__mode_8h.html#a3baf318a03750f7418a5faa051967c04", [
[ "None", "validation__mode_8h.html#a3baf318a03750f7418a5faa051967c04a6adf97f83acf6453d4a6a4b1070f3754", null ],
[ "Running", "validation__mode_8h.html#a3baf318a03750f7418a5faa051967c04a5bda814c4aedb126839228f1a3d92f09", null ],
[ "OfflineTuning", "validation__mode_8h.html#a3baf318a03750f7418a5faa051967c04add15d361ea740ab5e11145f0fa2f9374", null ],
[ "OnlineTuning", "validation__mode_8h.html#a3baf318a03750f7418a5faa051967c04abc40db0f46cab2811dede4f5341446fb", null ],
[ "All", "validation__mode_8h.html#a3baf318a03750f7418a5faa051967c04ab1c94ca2fbc3e78fc30069c8d0f01680", null ]
] ]
]; |
const express = require('express');
const Notes = require('./trip_notesModel.js');
const router = express.Router();
/******************GETS************* */
router.get('/', (req,res)=>{
console.log(req.body)
Notes.find()
.then(notes=>{
res.json(notes)
})
.catch(err=>{
res.status(500).json({message:'Database failed to return notes'})
console.log(err)
})
})
module.exports = router; |
# Generated by Django 3.0.8 on 2020-08-16 18:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('app_jumanji', '0006_auto_20200815_2218'),
]
operations = [
migrations.CreateModel(
name='Resume',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('surname', models.CharField(max_length=100)),
('status', models.CharField(choices=[('not_in_search', 'Не ищу работу'),
('consideration', 'Рассматриваю предложения'), ('in_search', 'Ищу работу')], max_length=100)),
('salary', models.FloatField()),
('specialty', models.CharField(choices=[('frontend', 'Фронтенд'), ('backend', 'Бэкенд'),
('gamedev', 'Геймдев'), ('devops', 'Девопс'), ('design', 'Дизайн'), ('products', 'Продукты'),
('management', 'Менеджмент'), ('testing', 'Тестирование')], max_length=100)),
('grade', models.CharField(choices=[('intern', 'intern'), ('junior', 'junior'), ('middle', 'middle'),
('senior', 'senior'), ('lead', 'lead')], max_length=100)),
('education', models.CharField(choices=[('missing', 'Отсутствует'), ('secondary', 'Среднее'),
('vocational', 'Средне-специальное'), ('incomplete_higher', 'Неполное высшее'),
('higher', 'Высшее')], max_length=100)),
('experience', models.CharField(max_length=500)),
('portfolio', models.CharField(max_length=500)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='resume', to=settings.AUTH_USER_MODEL)),
],
),
]
|
let fs = require('fs');
let Eos = require('eosjs');
let httpEndpoint = 'https://api-kylin.eosasia.one';
let chainId = '5fff1dae8dc8e2fc4d5b23b2c7665c97f9e9d8edf2b6485a86ba311c25639191';
var secret = fs.readFileSync('buy/key.txt', {encoding: 'utf8'});
let keyProvider = [secret];
let eos = Eos({httpEndpoint, chainId, keyProvider});
module.exports = {
eos: eos,
public_key: 'EOS7R7sj7qvGPCT8ZutguKJeW6EsptBuHMTtXrQNEAyyBSzdN18N8'
};
|
"""
WSGI config for household_budget project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "household_budget.settings")
application = get_wsgi_application()
|
import argparse
import pandas as pd
if __name__ == "__main__":
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--tip-attributes", required=True, help="table of tip attributes from one or more timepoints")
parser.add_argument("--output", required=True, help="table of tip attributes annotated with a 'naive' predictor")
args = parser.parse_args()
# Annotate a predictor for a naive model with no growth.
df = pd.read_csv(args.tip_attributes, sep="\t")
df["naive"] = 0.0
df.to_csv(args.output, sep="\t", index=False)
|
const submitForm = () => {
const regex = new RegExp(
/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)/gi
);
const value = document.getElementById("input").value;
if (value.match(regex)) {
document.getElementById("submit").innerHTML = "please wait...";
document.location.href = `/poem?page=${value}`;
} else {
alert("enter a url please");
}
};
|
from django.apps import AppConfig
class HouseplantsApiConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'houseplants_api'
|
/* */
"format cjs";
import { DOM } from 'angular2/src/platform/dom/dom_adapter';
/**
* A service that can be used to get and set the title of a current HTML document.
*
* Since an Angular 2 application can't be bootstrapped on the entire HTML document (`<html>` tag)
* it is not possible to bind to the `text` property of the `HTMLTitleElement` elements
* (representing the `<title>` tag). Instead, this service can be used to set and get the current
* title value.
*/
export class Title {
/**
* Get the title of the current HTML document.
* @returns {string}
*/
getTitle() { return DOM.getTitle(); }
/**
* Set the title of the current HTML document.
* @param newTitle
*/
setTitle(newTitle) { DOM.setTitle(newTitle); }
}
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidGl0bGUuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyJhbmd1bGFyMi9zcmMvcGxhdGZvcm0vYnJvd3Nlci90aXRsZS50cyJdLCJuYW1lcyI6WyJUaXRsZSIsIlRpdGxlLmdldFRpdGxlIiwiVGl0bGUuc2V0VGl0bGUiXSwibWFwcGluZ3MiOiJPQUFPLEVBQUMsR0FBRyxFQUFDLE1BQU0sdUNBQXVDO0FBRXpEOzs7Ozs7O0dBT0c7QUFDSDtJQUNFQTs7O09BR0dBO0lBQ0hBLFFBQVFBLEtBQWFDLE1BQU1BLENBQUNBLEdBQUdBLENBQUNBLFFBQVFBLEVBQUVBLENBQUNBLENBQUNBLENBQUNBO0lBRTdDRDs7O09BR0dBO0lBQ0hBLFFBQVFBLENBQUNBLFFBQWdCQSxJQUFJRSxHQUFHQSxDQUFDQSxRQUFRQSxDQUFDQSxRQUFRQSxDQUFDQSxDQUFDQSxDQUFDQSxDQUFDQTtBQUN4REYsQ0FBQ0E7QUFBQSIsInNvdXJjZXNDb250ZW50IjpbImltcG9ydCB7RE9NfSBmcm9tICdhbmd1bGFyMi9zcmMvcGxhdGZvcm0vZG9tL2RvbV9hZGFwdGVyJztcblxuLyoqXG4gKiBBIHNlcnZpY2UgdGhhdCBjYW4gYmUgdXNlZCB0byBnZXQgYW5kIHNldCB0aGUgdGl0bGUgb2YgYSBjdXJyZW50IEhUTUwgZG9jdW1lbnQuXG4gKlxuICogU2luY2UgYW4gQW5ndWxhciAyIGFwcGxpY2F0aW9uIGNhbid0IGJlIGJvb3RzdHJhcHBlZCBvbiB0aGUgZW50aXJlIEhUTUwgZG9jdW1lbnQgKGA8aHRtbD5gIHRhZylcbiAqIGl0IGlzIG5vdCBwb3NzaWJsZSB0byBiaW5kIHRvIHRoZSBgdGV4dGAgcHJvcGVydHkgb2YgdGhlIGBIVE1MVGl0bGVFbGVtZW50YCBlbGVtZW50c1xuICogKHJlcHJlc2VudGluZyB0aGUgYDx0aXRsZT5gIHRhZykuIEluc3RlYWQsIHRoaXMgc2VydmljZSBjYW4gYmUgdXNlZCB0byBzZXQgYW5kIGdldCB0aGUgY3VycmVudFxuICogdGl0bGUgdmFsdWUuXG4gKi9cbmV4cG9ydCBjbGFzcyBUaXRsZSB7XG4gIC8qKlxuICAgKiBHZXQgdGhlIHRpdGxlIG9mIHRoZSBjdXJyZW50IEhUTUwgZG9jdW1lbnQuXG4gICAqIEByZXR1cm5zIHtzdHJpbmd9XG4gICAqL1xuICBnZXRUaXRsZSgpOiBzdHJpbmcgeyByZXR1cm4gRE9NLmdldFRpdGxlKCk7IH1cblxuICAvKipcbiAgICogU2V0IHRoZSB0aXRsZSBvZiB0aGUgY3VycmVudCBIVE1MIGRvY3VtZW50LlxuICAgKiBAcGFyYW0gbmV3VGl0bGVcbiAgICovXG4gIHNldFRpdGxlKG5ld1RpdGxlOiBzdHJpbmcpIHsgRE9NLnNldFRpdGxlKG5ld1RpdGxlKTsgfVxufVxuIl19 |
function ShapePath(){
this.c = false;
this._length = 0;
this._maxLength = 8;
this.v = createSizedArray(this._maxLength);
this.o = createSizedArray(this._maxLength);
this.i = createSizedArray(this._maxLength);
}
ShapePath.prototype.setPathData = function(closed, len) {
this.c = closed;
this.setLength(len);
var i = 0;
while(i < len){
this.v[i] = point_pool.newElement();
this.o[i] = point_pool.newElement();
this.i[i] = point_pool.newElement();
i += 1;
}
};
ShapePath.prototype.setLength = function(len) {
while(this._maxLength < len) {
this.doubleArrayLength();
}
this._length = len;
};
ShapePath.prototype.doubleArrayLength = function() {
this.v = this.v.concat(createSizedArray(this._maxLength));
this.i = this.i.concat(createSizedArray(this._maxLength));
this.o = this.o.concat(createSizedArray(this._maxLength));
this._maxLength *= 2;
};
ShapePath.prototype.setXYAt = function(x, y, type, pos, replace) {
var arr;
this._length = Math.max(this._length, pos + 1);
if(this._length >= this._maxLength) {
this.doubleArrayLength();
}
switch(type){
case 'v':
arr = this.v;
break;
case 'i':
arr = this.i;
break;
case 'o':
arr = this.o;
break;
}
if(!arr[pos] || (arr[pos] && !replace)){
arr[pos] = point_pool.newElement();
}
arr[pos][0] = x;
arr[pos][1] = y;
};
ShapePath.prototype.setTripleAt = function(vX,vY,oX,oY,iX,iY,pos, replace) {
this.setXYAt(vX,vY,'v',pos, replace);
this.setXYAt(oX,oY,'o',pos, replace);
this.setXYAt(iX,iY,'i',pos, replace);
};
ShapePath.prototype.reverse = function() {
var newPath = new ShapePath();
newPath.setPathData(this.c, this._length);
var vertices = this.v, outPoints = this.o, inPoints = this.i;
var init = 0;
if (this.c) {
newPath.setTripleAt(vertices[0][0], vertices[0][1], inPoints[0][0], inPoints[0][1], outPoints[0][0], outPoints[0][1], 0, false);
init = 1;
}
var cnt = this._length - 1;
var len = this._length;
for (i = init; i < len; i += 1) {
newPath.setTripleAt(vertices[cnt][0], vertices[cnt][1], inPoints[cnt][0], inPoints[cnt][1], outPoints[cnt][0], outPoints[cnt][1], i, false);
cnt -= 1;
}
return newPath;
}; |
import React, { useRef, useState, useEffect} from 'react';
import panzoom from 'panzoom';
import { Paper, Box, Stack, Button, Typography } from '@mui/material';
import PanZoomModal from '../components/PanZoomModal'
import * as utl from './svg_utils'
import FullscreenIcon from '@mui/icons-material/Fullscreen';
import LinkIcon from '@mui/icons-material/Link';
import {useRouter} from 'next/router';
import config from '../next.config'
export default function PanZoomSlide({src,menu=false,width=600}) {
const started = useRef(false)
const [loaded, setLoaded] = useState(false)
const [open, setOpen] = useState(false);
const [height,setHeight] = useState(Math.round(width/2))
const [title,setTitle] = useState(src.replace(/\.[^/.]+$/, ""))
const router = useRouter()
const zoomOptions = {
minZoom: 0.1,
maxZoom:4
}
const boxRef = useRef(null);
const divRef = useRef(null);
const panzoomRef = useRef(null);
const stackRef = useRef(null);
function startPZ(){
//console.log("adding listener")
if(loaded && divRef.current && !started.current){
panzoomRef.current = panzoom(divRef.current, zoomOptions);
started.current = true
if(utl.get_svg_id(src)){//protect against mysterious react reload cases
on_svg_pz_ready()
}
}
}
function stopPZ(){
//console.log(`stopPZ panzoomRef.current=${panzoomRef.current}`)
if((started.current) && (panzoomRef.current)){
panzoomRef.current.dispose();
started.current = false
//console.log(`pan zoom : disposed`)
}
}
function on_svg_pz_ready(){
utl.Fit(src,panzoomRef.current,boxRef.current)
if(utl.has_model(src)){
utl.fetch_json(src.replace(".svg",".json")).then((model)=>{
utl.setup_links(src,model)
})
}
let new_title = utl.get_title(src)
if(new_title){
setTitle(new_title)
}
if(utl.has_model(src)){
utl.fetch_json(src.replace(".svg",".json")).then((model)=>{
utl.setup_links(src,model)
})
}
//console.log(location.search)//empty
//why not useRouter, because it has a bug : https://github.com/vercel/next.js/discussions/13220
const query_list = router.asPath.split('?')
if(query_list.length == 2){
const query = utl.search_to_query(query_list[1])
if(("modal" in query) && query.modal === src){
openModal()
}
}
}
function setLink(){
router.push(`${router.pathname}#pz-${src}`)
}
function openModal(){
if(!open){
//element.scrollTo(0,100)//not effective
router.push(`${router.pathname}#pz-${src}?modal=${src}`)//,{scroll:false} not effective
setOpen(true)
}
}
function closeModal(){
const url = `${router.pathname}#pz-${src}`
router.push(url,url,{scroll:false})
setOpen(false)
}
function onHashChangeStart(url){
//not catching paste of same url with new search query params
//from : http://localhost:3000/#pz-Linux_kernel_map.svg
//to : http://localhost:3000/#pz-Linux_kernel_map.svg?modal=Linux_kernel_map.svg
console.log(url)
}
function onComponentUnmount(){
stopPZ()
//router.events.off("hashChangeStart", onHashChangeStart);
}
useEffect(()=>{
//console.log(`width is now (${width})`)
const target_height = Math.round(width/2)
if(height!=target_height){ //1) height only mismatch if width has changed
setHeight(target_height)
}else{ //2) height match, already applied after render
//console.log(`fitting '${src}' now with new width (${width})`)
utl.Fit(src,panzoomRef.current,boxRef.current)
}
},[height,width])
useEffect(() => {
if((loaded) && (divRef.current) && (!started.current)){
startPZ()
stopPZ()
//router.events.on("hashChangeStart", onHashChangeStart);
}
return onComponentUnmount
}, [loaded]);
//TODO update basePath fom config in this file and in svg_utils line 162
return (
<>
<Box id="mainContent" m={1} sx={{width:width}}>
<Paper elevation={1} sx={{ overflow: 'hidden'}}>
<Stack id={`pz-${src}`} ref={stackRef}>
{menu&&
<Stack
direction="row"
spacing={2}
justifyContent="space-between"
>
<Typography variant="h6" p={1}>{title}</Typography>
<Stack
direction="row"
spacing={2}
justifyContent="flex-end"
>
<Button sx={{zIndex:'modal',backgroundColor:'#ffffffaa'}} onClick={()=>{setLink()}} variant="text"><LinkIcon/></Button>
<Button sx={{zIndex:'modal',backgroundColor:'#ffffffaa'}} onClick={()=>{openModal()}} variant="text"><FullscreenIcon/></Button>
</Stack>
</Stack>
}
<Box ref={boxRef}
sx={{ height:height, position:'relative'}}>
<div ref={divRef} >
<object type="image/svg+xml" data={`${config.basePath}/${src}`} id={src} onLoad={()=>{setLoaded(true)}} />
</div>
</Box>
</Stack>
</Paper>
</Box>
<PanZoomModal src={src} open={open} handleClose={()=>{closeModal()}}/>
</>
)
}
|
import os.path
import sys
import numpy as np
try:
import matplotlib.pyplot as plt
except:
print("Error importing pyplot from matplotlib, please install matplotlib package first...")
sys.tracebacklimit=0
raise Exception("Importing matplotlib failed")
if __name__ == '__main__':
#b = np.loadtxt('/home/liuq0a/tests/marmousi_onshore_SRVM_new10_1/output.stats/misfit')
b = np.loadtxt('Eigen_values.txt')
b0 = b[0]
#b = b / b[0]
print "b = ", b[:]
nb = np.size(b)
plt.figure(figsize=(10,5))
ax = plt.subplot(111)
plt.plot(range(1,nb+1),b,'o-',color="black",linewidth=2)
#plt.plot(range(na),a,label='$L-BFGS$',color="blue",linewidth=2)
ax.legend()
ax.set_yscale('log')
plt.xlabel("ORDER")
plt.ylabel("EIGENVALUE")
plt.xlim(0,70)
#plt.ylim(5.0e-3,1.1)
#plt.title("MARMOUSI")
plt.savefig("fig3.png")
|
gXMLBuffer="<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?><pk><wd nm=\"rintermark\" rd=\"1223,0:9847,9932:0\"/><wd nm=\"rintername\" rd=\"227,0:860,1027,1074:0|248,0:7934:0|894,0:1663:0|895,0:1460,1667,1833,1987:0|1084,0:6854:0|1130,0:97602,97656,97871:0|1223,8:371,7689,7992,13215,19627,19754,19800,19965,20187,20341:0|1290,0:848,938:0|1297,0:2298:0\"/><wd nm=\"rinternames\" rd=\"248,0:1637:0|894,0:2351:0|1084,8:316,6768,7036,7103:0|1223,0:20089:0|1297,0:411:0\"/><wd nm=\"rinters\" rd=\"594,0:1615:0|601,0:1549:0|737,0:280:0|898,0:1429:0|899,0:1327,1643,1771,2475:0|1084,0:6809,6899,7018:0|1102,0:1170:0|1223,0:20432,24763,25411:0\"/><wd nm=\"rintersmarks\" rd=\"1223,0:9909:0\"/><wd nm=\"rinterval\" rd=\"248,0:1856:0|1056,0:620,878,1491,1882:0|1085,8:268,13553,13891,57493,58021,59468,60132,60206:0|1090,0:1169:0|1129,0:16791:0|1155,0:3272:0|1299,0:459:0|1561,0:3374,3455,27232,27313:0\"/><wd nm=\"rinter—does\" rd=\"1223,0:9522:0\"/><wd nm=\"rinter’s\" rd=\"227,0:1159,1191:0|686,0:103:0|1223,0:16402,16486,18239,24851,24883:0\"/><wd nm=\"rintex\" rd=\"251,0:173:0|1789,0:456,878:0|1815,130:0,12,424,474,887:0|1817,0:640:0|1819,0:820:0\"/><wd nm=\"rintf\" rd=\"248,0:9680:0|309,0:569:0|348,0:4739:0|397,0:2186:0|398,0:2945:0|488,0:1519:0|491,0:1161:0|497,0:3618,4797,7479:0|514,0:900:0|516,0:347:0|535,0:6096:0|553,0:1159:0|732,0:561:0|947,0:1810:0|1130,0:55920:0|1145,0:11511:0|1231,0:3965,4083:0|1271,8:140,5613,6017,7669,7906,7980,8028,8072,8119:0|1561,0:2376,26523:0|1579,0:2780,2893:0\"/><wd nm=\"rintflag\" rd=\"1908,8:266,7480,7525:0\"/><wd nm=\"rinting\" rd=\"14,0:172:0|20,0:675:0|24,0:231:0|47,130:0,17,224,1191:0|48,0:393:0|84,0:140,523:0|90,0:218,378:0|138,0:1242,1267:0|158,0:486,1109,1707:0|199,16:1066:0|203,0:195:0|278,0:160:0|349,0:1193:0|366,130:0,18,282,394,451,540,859,1117:0|367,0:296:0|482,0:1213:0|564,0:674,688,5554:0|570,0:2963:0|592,0:877:0|594,0:1559,1598,1930,2125,2662:0|605,0:7378:0|610,0:261:0|612,0:371:0|615,0:1672:0|617,0:2616,3217:0|668,0:322,350,5372:0|675,0:134,983:0|694,132:0,19,49,80,178,412:0|697,0:233,280:0|737,0:340:0|739,0:642,672:0|767,0:2927:0|774,0:538:0|814,0:2819,3827:0|816,0:6873:0|817,0:173:0|835,0:3497:0|893,0:264,295,353,403,472,669:0|894,0:45,513,708,820,1084,1168,1250,1349,1446,2440:0|895,130:0,21,787,2231:0|896,132:6,21,284,372,598,1378:0|897,132:0,25,623:0|898,132:7,23,282,310,448,798,1194,1560,1775:0|902,0:3006:0|904,0:4641:0|951,0:739,762,782,805,933,1356:0|987,0:309:0|1017,0:241:0|1018,0:3102,3117,3287,3842,3906:0|1076,0:7710,10588:0|1102,0:1225,1297:0|1106,0:2524,2871:0|1130,0:13749,17604,33466,33508,34063,52154,53165,53209,53247,53284,65972,95241,95363,95421,96171,96768,97256:0|1155,0:20170:0|1222,0:56:0|1223,0:748,941,1072,1284,1341,1616,2127,3436,3592,4126,4213,4314,4386,4612,4832,5014,5110,5195,5285,6501,7051,7149,7462,7599,8395,8789,8806,8969,11856,12150,12208,12704,12786,12918,12999,13744,13967,14165,14500,14814,15017,15610,15970,16862,17114,17254,18833,19896,22039,22242,22314,22403,22624:0|1290,0:637:0|1561,0:21319:0|1642,0:882:0|1643,0:969,1095,1213,1422:0|1644,0:803:0|1645,0:891,1017,1135,1344:0|1770,0:99,108,173,238,295:0|1771,0:162,219:0|1772,0:246,303,436:0|1773,0:166,223:0|1774,0:223,280:0|1775,0:119,128,193,258,315:0|1789,0:412:0|1805,0:121:0|1814,0:98:0|1815,0:102,382:0|1816,0:112:0|1817,0:116,388:0|1818,0:150:0|1819,0:154,399:0|1883,0:504,624:0|1908,0:7141,7621:0\"/><wd nm=\"rintingimagestream\" rd=\"814,0:3132,3536,3804:0\"/><wd nm=\"rintingparams\" rd=\"700,0:1305,1572,1619,1679,1742,1799,2013:0\"/><wd nm=\"rintings\" rd=\"1223,0:1556:0\"/><wd nm=\"rintlayout\" rd=\"615,0:1351,1539,3010:0\"/><wd nm=\"rintln\" rd=\"248,0:3119,3141:0|851,0:580:0|855,0:1824:0|856,0:286,473,1128,1378:0|864,0:256:0|882,0:1624,1668,2412,2471:0|888,0:3341:0|889,0:1804:0|904,0:13386,13423,13463,13505,13558,13597,13637,13679,19626,19700,19754,22373:0|915,0:1356,2331,2374,2417,2460,2503,2546:0|916,0:852,907,965,1026,1091,1862,1921:0|917,0:1372,4190,4793,5094:0|922,0:412:0|930,0:4122,4211,4245,4281:0|933,0:599,738,782,842,896:0|937,0:436,709:0|947,0:417,501,1592,1688,1798,1886:0|961,0:618,665:0|968,0:566,619:0|970,0:2046,2260,2502:0|972,0:1761,1807:0|975,0:779:0|977,0:2605:0|980,0:3886:0|981,0:1606:0|984,0:2297:0|985,0:1644:0|996,0:2699:0|1002,0:467,1446,1525,1589:0|1009,0:775:0|1010,0:699,747,802:0|1018,0:2580:0|1019,0:2065,3292:0|1022,0:6470,6518:0|1023,0:2102,2153,2234,2292:0|1034,0:2593,2639,3444,3492,3554,3605:0|1035,0:1951,2002,2051,2499,2552:0|1038,0:1083,1437:0|1039,0:3499,4236,5857:0|1040,0:930:0|1042,0:621:0|1047,0:1465,1647,2833,2907:0|1048,0:1253:0|1056,0:428,583,769,821,1299,1454,1773,1825:0|1073,0:1930,1967,2008,2045,2132:0|1076,0:6520,9809:0|1077,0:1101,1907,3747:0|1084,0:3722,5549,6386,6723,7082:0|1085,0:12626,12666,22189,23438,23564,23612,23934,24176,27100,27570,35310,38729,38885,39645,63945,64028,68657:0|1087,0:5297,5343:0|1088,0:11535,11587,15185,15227:0|1090,0:433,592,1491:0|1094,0:718:0|1108,0:265:0|1111,0:2836,2870:0|1113,8:41,154,459,691,736,769,819:0|1115,0:528,628:0|1118,0:763,793:0|1123,0:471:0|1124,0:1513,1810,1866,4134:0|1126,0:2649,2700:0|1127,0:865:0|1129,0:2001,2532,2616,4386,7063,8065,8208,8317,8378,8417,8829,9504,9548,9691,10825,11641,12136,16675,17616,18082,20695,20957,21495,21732,23162,25536,26643,26786,27068,27840:0|1130,0:20412,37410,37864,48552,48713,52471,53723,53754,55798,55908,57890,61858,69490,69540,69602,69655,69843,69926,69964,70532,70603,70926,72656,74654,76286,76703,76781,78992,82577,94874,94947,102709,110326,110461,110581,110651,110699,115827:0|1132,0:1538:0|1133,0:463:0|1137,0:1679:0|1146,0:1444,1533:0|1147,0:4520,4647,4755,4905,10046,12239:0|1148,0:251,549,656:0|1149,0:997,2527:0|1151,0:773,1322:0|1152,0:7849,7898,7954:0|1155,0:8610,8684,8738,13229,18299,18492,19381,19439,19691,22874,28856:0|1156,0:9904,18066,18198,18333,18469,18594,18628,19086,19131,19246,20409,20456,20624,20656,20692,20772,20918,20967,21081,21134:0|1158,0:2770:0|1162,0:419,1022,1323:0|1165,0:2943:0|1170,0:521,562:0|1172,0:596:0|1173,0:1091:0|1178,0:674,1106:0|1181,0:1144:0|1185,0:1124,1617,5442:0|1186,0:5477:0|1188,0:325,435:0|1190,0:1065,1579:0|1192,0:8472:0|1194,0:333,655:0|1195,0:1497:0|1196,0:568,620,5566:0|1206,0:524,564,608,841:0|1211,0:440,823:0|1213,0:1038,1101,1801,1860,1896,1942,1988,2034,3032,3091,3127,3173,3219,3265,4060,4119,4155,4201,4247,4293:0|1215,0:1306,1365,1412,1465,1533,1599,1654,1725,1791,1852:0|1227,0:1565:0|1228,0:2318:0|1233,0:423,1650,2469:0|1236,0:2037:0|1240,0:1232:0|1241,0:2099,3034,3106,3149,3264,3302,4007,5874,7982,8361:0|1243,0:4006,4107:0|1251,0:2597,2782,3045,6801,6939,10101,10177,21490,21939:0|1253,0:180,286:0|1258,0:268,2677,3128:0|1259,0:3125,3168,7055,7179,10833:0|1265,0:443:0|1266,0:631,816,1877:0|1268,0:227:0|1269,0:429:0|1271,0:1685,1723,2347,4543,4643,4780,7968,8016,8060,8107,9013,10733,10855,11287,11779:0|1272,0:620,759,803,863,917:0|1274,0:2690,2933,4161,4368,4559,4594,4802,4939,5197,5861,5961:0|1350,132:0,8,84,380:0|1422,0:330:0|1557,0:1554,1692,1773,1945:0|1558,0:688:0|1561,0:3339,3436,3563,3697,4521,4554,6448,6938,7263,7723,7757,7910,9086,9692,9726,10773,11138,11237,14454,18416,18479,25436,25688,26037,26646,26836,26883,27197,27294,27471,27535,27631,27801,30923,31002:0|1582,0:918,1521:0\"/><wd nm=\"rintmode\" rd=\"710,0:656:0|1106,0:2559:0\"/><wd nm=\"rintname\" rd=\"1818,0:340,440:0|1819,0:526,626:0\"/><wd nm=\"rintout\" rd=\"1223,0:23796:0\"/><wd nm=\"rintpage\" rd=\"150,130:0,10,336:0\"/><wd nm=\"rintpagenum\" rd=\"1266,0:563,763:0\"/><wd nm=\"rintpages\" rd=\"250,0:666:0|366,0:113,159:0|1627,0:1018:0|1642,132:0,11,83,299,1202:0|1643,0:121,1484:0|1644,0:119,865:0|1645,0:1385:0|1761,0:1269:0|1770,0:401:0|1771,0:325:0|1772,0:655:0|1773,132:0,11,263,734:0|1774,0:693:0|1775,0:444:0|1883,0:864:0\"/><wd nm=\"rintpagesex\" rd=\"1627,0:1085:0|1642,0:944:0|1643,132:0,13,88,131,349,1740:0|1644,0:881:0|1645,0:123,1401:0\"/><wd nm=\"rintpagesfit\" rd=\"250,0:678:0|1761,0:1337:0|1770,0:419:0|1771,0:343:0|1772,0:673:0|1773,0:498:0|1774,132:0,14,320,926:0|1775,0:462:0\"/><wd nm=\"rintpagessilent\" rd=\"1627,0:1155:0|1642,0:962:0|1643,0:1500:0|1644,132:0,17,163,233,1117:0|1645,0:208:0\"/><wd nm=\"rintpagessilentex\" rd=\"1627,0:1232:0|1642,0:984:0|1643,0:1522:0|1644,0:899:0|1645,132:0,19,169,224,296,1419,1637:0\"/><wd nm=\"rintpageswithparams\" rd=\"366,0:130,325,615:0\"/><wd nm=\"rintparams\" rd=\"248,0:4478,7707:0|366,0:819:0|893,0:223:0|894,0:247,322,605,2298:0|895,0:741,757,812,1276,1395,1434,1647,1857,1915:0|896,0:336,400,682:0|897,0:78,288:0|898,0:98,127,252,737,851,1155:0|899,0:83,368,447,558,766,982,1117,1262,1382,1508,2020,2148,2349,2643,2732,2868:0|1084,0:6561,6878:0|1130,8:1024,75678,75710,75796,75837,75869,75916,95129,95533,97174,97210,97530,97823:0|1222,130:0,12,240,283,310,374,486,533,904:0|1223,132:0,22,3836,4440,4648,4884,5586,5770,7197,7822,7944,8290,10155,10373,10606,12348,13143,14220,15666,16002,17142,17309,17573,18048,18923,19519,20167,20293,22505,22729,25861:0|1278,132:9,36,83,328:0|1290,0:741:0|1292,0:2338:0|1294,0:2845:0|1295,0:453:0|1297,0:1301,2071:0\"/><wd nm=\"rintpareams\" rd=\"899,0:856:0\"/><wd nm=\"rintpdf\" rd=\"327,0:3657:0\"/><wd nm=\"rintproduction\" rd=\"383,0:1739:0|1085,0:31408:0\"/><wd nm=\"rintrange\" rd=\"1223,8:427,19060,19090,19586:0\"/><wd nm=\"rints\" rd=\"47,0:125:0|90,0:981:0|150,0:99:0|366,0:180,356:0|559,0:841:0|669,0:1468:0|855,0:1527:0|894,0:1833:0|1106,0:2547:0|1113,0:175,310:0|1130,0:18848,95003:0|1155,0:12956,20120:0|1223,0:3367,3465,3620,6828,6875,6955,14748,16965:0|1266,0:729,846:0|1271,0:11633:0|1349,0:12:0|1350,0:16:0|1422,0:284:0|1627,0:1030,1099,1173,1252:0|1642,0:22:0|1643,0:26:0|1644,0:34:0|1645,0:38:0|1761,0:992,1066,1135,1281,1352,1425:0|1770,0:12:0|1771,0:18:0|1772,0:24:0|1773,0:22:0|1774,0:28:0|1775,0:32:0|1805,0:18:0|1814,0:20:0|1815,0:24:0|1816,0:32:0|1817,0:36:0|1818,0:24:0|1819,0:28:0|1848,0:12:0|1883,0:22:0|1908,0:6970,7069,7513,7582:0\"/><wd nm=\"rintsetting\" rd=\"709,0:1159:0\"/><wd nm=\"rintsettings\" rd=\"668,0:247:0|669,4:868:0|709,0:1131:0\"/><wd nm=\"rintsilent\" rd=\"251,0:186:0|1789,0:469,891:0|1805,0:558:0|1814,0:460:0|1815,0:628:0|1816,130:0,16,248,302,707:0|1817,0:652:0|1818,0:676:0\"/><wd nm=\"rintsilentex\" rd=\"251,0:203:0|1789,0:486,908:0|1815,0:644:0|1817,130:0,18,430,486,893:0|1819,0:832:0\"/></pk>"; |
import Home from './pages/home'
export default {
pages: () => [
{
label: 'Home',
path: '',
component: Home,
},
{
label: "Hackathon",
children: [
{
label: 'Schedule',
children: ['Event', 'Location', 'Tag']
},
{
label: 'Content',
children: ['FAQ', 'Block', 'BrandAsset', 'SocialAccount']
},
{
label: 'Sponsors',
children: ['Sponsor', 'Perk', 'Challenge']
}
]
},
{
label: "Admin",
children: [
'User', 'Hackathon', 'Type'
]
}
]
};
|
/**
* cn - 可展开(受控)
* -- 当传入一个expandKeys时,展开会变成受控的,需要自行在column里面的onClick去处理
* en - Expand(controlled)
* -- When an expandKeys is provided, the expansion becomes controlled and needs to be processed by the onClick in the column.
*/
import React from 'react'
import { Table } from 'shineout'
import { fetchSync } from 'doc/data/user'
const data = fetchSync(100)
export default class extends React.Component {
constructor(props) {
super(props)
this.state = {
expandKeys: [1],
}
}
render() {
const { expandKeys } = this.state
const columns = [
{ title: 'id', render: 'id', width: 50 },
{
type: 'expand',
onClick: (d, isExpand) => {
if (isExpand) this.setState({ expandKeys: [...expandKeys, d.id] })
else this.setState({ expandKeys: expandKeys.filter(k => k !== d.id) })
},
render: (d) => {
if (d.id > 5) return undefined
return () => <div style={{ padding: '10px 30px', wordBreak: 'break-all' }}>{JSON.stringify(d)}</div>
},
},
{ title: 'Name', render: d => `${d.firstName} ${d.lastName}` },
{ title: 'Office', render: 'office' },
{ title: 'Start Date', render: 'start' },
{
title: 'Salary',
render: d => `$${d.salary.toString().replace(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1,')}`,
},
]
return (
<Table
fixed="y"
expandKeys={expandKeys}
data={data}
keygen="id"
style={{ height: 300 }}
columns={columns}
/>
)
}
}
|
"""
Tests for the swflows.py module
"""
from datetime import datetime
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from mfobs.obs import get_spatial_differences, get_temporal_differences
from mfobs.swflows import get_flux_obs
@pytest.fixture
def flux_obs_input(shellmound_data_path):
class FluxObsInput:
perioddata = pd.read_csv(shellmound_data_path / 'tables/stress_period_data.csv')
model_output_file = shellmound_data_path / 'shellmound.sfr.obs.output.csv'
observed_values_file = shellmound_data_path / 'tables/processed_flow_obs.csv'
tflux_diff_obs_outfile = shellmound_data_path / 'tables/processed_flow_obs_tdiffs.csv'
sflux_diff_obs_outfile = shellmound_data_path / 'tables/processed_flow_obs_sdiffs.csv'
return FluxObsInput
@pytest.fixture
def flux_obs(flux_obs_input):
results = get_flux_obs(flux_obs_input.perioddata,
model_output_file=flux_obs_input.model_output_file,
observed_values_file=flux_obs_input.observed_values_file,
observed_values_site_id_col='site_no',
observed_values_obsval_col='obsval',
observed_values_group_column='category',
variable_name='flux',
outfile=None,
write_ins=False)
return results
@pytest.fixture
def flux_obs_per_based_suffixes(flux_obs_input):
results = get_flux_obs(flux_obs_input.perioddata,
model_output_file=flux_obs_input.model_output_file,
observed_values_file=flux_obs_input.observed_values_file,
observed_values_site_id_col='site_no',
observed_values_obsval_col='obsval',
observed_values_group_column='category',
obsnme_date_suffix=False,
obsnme_suffix_format='03d',
variable_name='flux',
outfile=None,
write_ins=False)
return results
def test_get_flux_obs(flux_obs):
results = flux_obs
expected_columns = ['datetime', 'per', 'obsprefix', 'obsnme',
'obs_flux', 'sim_flux', 'obsval', 'obgnme']
assert np.all(results.columns == expected_columns)
assert len(set(results.obsnme)) == len(results)
assert not results.obs_flux.isna().any()
assert not results.obsnme.str.isupper().any()
# check sorting
assert np.all(results.reset_index(drop=True).groupby('obsprefix').per.diff().dropna() > 0)
def test_get_flux_obs_per_based_suffixes(flux_obs_per_based_suffixes):
results = flux_obs_per_based_suffixes
expected_columns = ['datetime', 'per', 'obsprefix', 'obsnme',
'obs_flux', 'sim_flux', 'obsval', 'obgnme']
assert np.all(results.columns == expected_columns)
assert len(set(results.obsnme)) == len(results)
assert not results.obs_flux.isna().any()
assert not results.obsnme.str.isupper().any()
# check sorting
assert np.all(results.reset_index(drop=True).groupby('obsprefix').per.diff().dropna() > 0)
# test observation name suffixes
is_trans = [False if obsnme.split('_')[1] == 'ss' else True
for obsnme in results['obsnme']]
parsed_periods = [int(obsnme.split('_')[1]) for obsnme in results.loc[is_trans, 'obsnme']]
assert np.array_equal(parsed_periods, results.loc[is_trans, 'per'].values)
@pytest.mark.parametrize(('write_ins,obsnme_date_suffix,'
'obsnme_suffix_format'),
((True, False, '03d'),
(False, True, '%Y%m%d'),
))
def test_get_temporal_flux_differences(flux_obs_input, write_ins,
obsnme_date_suffix, obsnme_suffix_format):
flux_obs = get_flux_obs(flux_obs_input.perioddata,
model_output_file=flux_obs_input.model_output_file,
observed_values_file=flux_obs_input.observed_values_file,
observed_values_site_id_col='site_no',
observed_values_obsval_col='obsval',
observed_values_group_column='category',
obsnme_date_suffix=obsnme_date_suffix,
obsnme_suffix_format=obsnme_suffix_format,
variable_name='flux',
outfile=None,
write_ins=False)
results = get_temporal_differences(flux_obs,
flux_obs_input.perioddata,
obs_values_col='obs_flux',
sim_values_col='sim_flux',
obsnme_date_suffix=obsnme_date_suffix,
obsnme_suffix_format=obsnme_suffix_format,
obstype='flux',
write_ins=write_ins,
outfile=flux_obs_input.tflux_diff_obs_outfile)
assert flux_obs_input.tflux_diff_obs_outfile.exists()
insfile = Path(str(flux_obs_input.tflux_diff_obs_outfile) + '.ins')
if not write_ins:
assert not insfile.exists()
else:
assert insfile.exists()
insfile.unlink()
flux_obs_input.tflux_diff_obs_outfile.unlink() # delete it
assert np.all(results.columns ==
['datetime', 'per', 'obsprefix', 'obsnme',
'obs_flux', 'sim_flux',
'obsval', 'sim_obsval', 'obgnme', 'type']
)
assert len(set(results.obsnme)) == len(results)
assert not results.obsval.isna().any()
assert not results.sim_obsval.isna().any()
assert results.obsnme.str.islower().all()
suffixes = np.ravel([obsnme.split('_')[1].split('d') for obsnme in results.obsnme])
assert 'ss' not in suffixes
# check observation names
for i, r in results.iterrows():
prefix, suffix = r.obsnme.split('_')
suffix1, suffix2 = suffix.split('d')
if obsnme_date_suffix:
datetime1 = datetime.strptime(suffix1, obsnme_suffix_format)
datetime2 = datetime.strptime(suffix2, obsnme_suffix_format)
else:
per1 = int(suffix1)
per2 = int(suffix1)
@pytest.mark.parametrize('flux_difference_sites, write_ins',
(({'07288280': # sunflower r. at merigold
'07288500' # sunflower r. at sunflower
}, True),
({'07288500': '07288280' }, False)))
def test_get_spatial_flux_difference_obs(flux_obs, flux_obs_input, flux_difference_sites, write_ins):
results = get_spatial_differences(flux_obs, flux_obs_input.perioddata,
flux_difference_sites,
obs_values_col='obs_flux',
sim_values_col='sim_flux',
obstype='flux',
use_gradients=False,
write_ins=write_ins,
outfile=flux_obs_input.sflux_diff_obs_outfile)
assert flux_obs_input.sflux_diff_obs_outfile.exists()
insfile = Path(str(flux_obs_input.sflux_diff_obs_outfile) + '.ins')
if not write_ins:
assert not insfile.exists()
else:
assert insfile.exists()
insfile.unlink()
flux_obs_input.sflux_diff_obs_outfile.unlink() # delete it
assert np.all(results.columns ==
['datetime', 'per', 'obsprefix',
'obsnme1', 'obs_flux1', 'sim_flux1',
'obsnme2', 'obs_flux2', 'sim_flux2',
'obs_diff', 'sim_diff', 'obgnme', 'obsnme',
'obsval', 'sim_obsval', 'type']
)
assert len(set(results.obsnme)) == len(results)
assert not results.obsval.isna().any()
assert not results.sim_obsval.isna().any()
assert results.obsnme.str.islower().all() |
module.exports = {
env: {
es6: true,
node: true,
},
extends: ['eslint:recommended', 'plugin:prettier/recommended'],
parserOptions: {
ecmaVersion: 2018,
sourceType: 'module',
ecmaFeatures: {
jsx: true,
},
},
rules: {
indent: ['error', 2, { SwitchCase: 1 }],
'linebreak-style': ['error', 'unix'],
},
overrides: [
{
files: ['./src/gatsby-browser.js', './src/loader/*'],
env: {
browser: true,
},
},
{
files: ['**/cypress/**/*'],
env: {
'cypress/globals': true,
},
plugins: ['cypress'],
},
{
files: ['e2e-tests/*/src/**/*'],
extends: ['plugin:react/recommended'],
rules: {
'react/prop-types': 0,
},
settings: {
react: {
version: '16.9.0',
},
},
},
],
};
|
/**
* Bootstrap Table Afrikaans translation
* Author: Phillip Kruger <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['af-ZA'] = {
formatLoadingMessage: function () {
return 'Besig om te laai, wag asseblief ...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rekords per bladsy';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Resultate ' + pageFrom + ' tot ' + pageTo + ' van ' + totalRows + ' rye';
},
formatSearch: function () {
return 'Soek';
},
formatNoMatches: function () {
return 'Geen rekords gevind nie';
},
formatPaginationSwitch: function () {
return 'Wys/verberg bladsy nummering';
},
formatRefresh: function () {
return 'Herlaai';
},
formatToggle: function () {
return 'Wissel';
},
formatColumns: function () {
return 'Kolomme';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['af-ZA']);
})(jQuery);
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ar-SA'] = {
formatLoadingMessage: function () {
return 'جاري التحميل, يرجى الإنتظار...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' سجل لكل صفحة';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'الظاهر ' + pageFrom + ' إلى ' + pageTo + ' من ' + totalRows + ' سجل';
},
formatSearch: function () {
return 'بحث';
},
formatNoMatches: function () {
return 'لا توجد نتائج مطابقة للبحث';
},
formatPaginationSwitch: function () {
return 'إخفاء\إظهار ترقيم الصفحات';
},
formatRefresh: function () {
return 'تحديث';
},
formatToggle: function () {
return 'تغيير';
},
formatColumns: function () {
return 'أعمدة';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ar-SA']);
})(jQuery);
/**
* Bootstrap Table Catalan translation
* Authors: Marc Pina<[email protected]>
* Claudi Martinez<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ca-ES'] = {
formatLoadingMessage: function () {
return 'Espereu, si us plau...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' resultats per pàgina';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Mostrant de ' + pageFrom + ' fins ' + pageTo + ' - total ' + totalRows + ' resultats';
},
formatSearch: function () {
return 'Cerca';
},
formatNoMatches: function () {
return 'No s\'han trobat resultats';
},
formatPaginationSwitch: function () {
return 'Amaga/Mostra paginació';
},
formatRefresh: function () {
return 'Refresca';
},
formatToggle: function () {
return 'Alterna formatació';
},
formatColumns: function () {
return 'Columnes';
},
formatAllRows: function () {
return 'Tots';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ca-ES']);
})(jQuery);
/**
* Bootstrap Table Czech translation
* Author: Lukas Kral ([email protected])
* Author: Jakub Svestka <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['cs-CZ'] = {
formatLoadingMessage: function () {
return 'Čekejte, prosím...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' položek na stránku';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Zobrazena ' + pageFrom + '. - ' + pageTo + '. položka z celkových ' + totalRows;
},
formatSearch: function () {
return 'Vyhledávání';
},
formatNoMatches: function () {
return 'Nenalezena žádná vyhovující položka';
},
formatPaginationSwitch: function () {
return 'Skrýt/Zobrazit stránkování';
},
formatRefresh: function () {
return 'Aktualizovat';
},
formatToggle: function () {
return 'Přepni';
},
formatColumns: function () {
return 'Sloupce';
},
formatAllRows: function () {
return 'Vše';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['cs-CZ']);
})(jQuery);
/**
* Bootstrap Table danish translation
* Author: Your Name Jan Borup Coyle, [email protected]
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['da-DK'] = {
formatLoadingMessage: function () {
return 'Indlæser, vent venligst...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' poster pr side';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Viser ' + pageFrom + ' til ' + pageTo + ' af ' + totalRows + ' rækker';
},
formatSearch: function () {
return 'Søg';
},
formatNoMatches: function () {
return 'Ingen poster fundet';
},
formatRefresh: function () {
return 'Opdater';
},
formatToggle: function () {
return 'Skift';
},
formatColumns: function () {
return 'Kolonner';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['da-DK']);
})(jQuery);
/**
* Bootstrap Table German translation
* Author: Paul Mohr - Sopamo<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['de-DE'] = {
formatLoadingMessage: function () {
return 'Lade, bitte warten...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' Einträge pro Seite';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Zeige ' + pageFrom + ' bis ' + pageTo + ' von ' + totalRows + ' Zeile' + ((totalRows > 1) ? "n" : "");
},
formatSearch: function () {
return 'Suchen';
},
formatNoMatches: function () {
return 'Keine passenden Ergebnisse gefunden';
},
formatRefresh: function () {
return 'Neu laden';
},
formatToggle: function () {
return 'Umschalten';
},
formatColumns: function () {
return 'Spalten';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['de-DE']);
})(jQuery);
/**
* Bootstrap Table Greek translation
* Author: giannisdallas
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['el-GR'] = {
formatLoadingMessage: function () {
return 'Φορτώνει, παρακαλώ περιμένετε...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' αποτελέσματα ανά σελίδα';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Εμφανίζονται από την ' + pageFrom + ' ως την ' + pageTo + ' από σύνολο ' + totalRows + ' σειρών';
},
formatSearch: function () {
return 'Αναζητήστε';
},
formatNoMatches: function () {
return 'Δεν βρέθηκαν αποτελέσματα';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['el-GR']);
})(jQuery);
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['en-US'] = {
formatLoadingMessage: function () {
return 'Loading, please wait...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rows per page';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Showing ' + pageFrom + ' to ' + pageTo + ' of ' + totalRows + ' rows';
},
formatSearch: function () {
return 'Search';
},
formatNoMatches: function () {
return 'No matching records found';
},
formatPaginationSwitch: function () {
return 'Hide/Show pagination';
},
formatRefresh: function () {
return 'Refresh';
},
formatToggle: function () {
return 'Toggle';
},
formatColumns: function () {
return 'Columns';
},
formatAllRows: function () {
return 'All';
},
formatExport: function () {
return 'Export data';
},
formatClearFilters: function () {
return 'Clear filters';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['en-US']);
})(jQuery);
/**
* Bootstrap Table Spanish (Argentina) translation
* Author: Felix Vera ([email protected])
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['es-AR'] = {
formatLoadingMessage: function () {
return 'Cargando, espere por favor...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registros por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Mostrando ' + pageFrom + ' a ' + pageTo + ' de ' + totalRows + ' filas';
},
formatSearch: function () {
return 'Buscar';
},
formatNoMatches: function () {
return 'No se encontraron registros';
},
formatAllRows: function () {
return 'Todo';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['es-AR']);
})(jQuery);
/**
* Bootstrap Table Spanish (Costa Rica) translation
* Author: Dennis Hernández (http://djhvscf.github.io/Blog/)
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['es-CR'] = {
formatLoadingMessage: function () {
return 'Cargando, por favor espere...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registros por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Mostrando de ' + pageFrom + ' a ' + pageTo + ' registros de ' + totalRows + ' registros en total';
},
formatSearch: function () {
return 'Buscar';
},
formatNoMatches: function () {
return 'No se encontraron registros';
},
formatRefresh: function () {
return 'Refrescar';
},
formatToggle: function () {
return 'Alternar';
},
formatColumns: function () {
return 'Columnas';
},
formatAllRows: function () {
return 'Todo';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['es-CR']);
})(jQuery);
/**
* Bootstrap Table Spanish Spain translation
* Author: Marc Pina<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['es-ES'] = {
formatLoadingMessage: function () {
return 'Por favor espere...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' resultados por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Mostrando desde ' + pageFrom + ' hasta ' + pageTo + ' - En total ' + totalRows + ' resultados';
},
formatSearch: function () {
return 'Buscar';
},
formatNoMatches: function () {
return 'No se encontraron resultados';
},
formatPaginationSwitch: function () {
return 'Ocultar/Mostrar paginación';
},
formatRefresh: function () {
return 'Refrescar';
},
formatToggle: function () {
return 'Ocultar/Mostrar';
},
formatColumns: function () {
return 'Columnas';
},
formatAllRows: function () {
return 'Todos';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['es-ES']);
})(jQuery);
/**
* Bootstrap Table Spanish (México) translation (Obtenido de traducción de Argentina)
* Author: Felix Vera ([email protected])
* Copiado: Mauricio Vera ([email protected])
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['es-MX'] = {
formatLoadingMessage: function () {
return 'Cargando, espere por favor...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registros por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Mostrando ' + pageFrom + ' a ' + pageTo + ' de ' + totalRows + ' filas';
},
formatSearch: function () {
return 'Buscar';
},
formatNoMatches: function () {
return 'No se encontraron registros';
},
formatAllRows: function () {
return 'Todo';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['es-MX']);
})(jQuery);
/**
* Bootstrap Table Spanish (Nicaragua) translation
* Author: Dennis Hernández (http://djhvscf.github.io/Blog/)
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['es-NI'] = {
formatLoadingMessage: function () {
return 'Cargando, por favor espere...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registros por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Mostrando de ' + pageFrom + ' a ' + pageTo + ' registros de ' + totalRows + ' registros en total';
},
formatSearch: function () {
return 'Buscar';
},
formatNoMatches: function () {
return 'No se encontraron registros';
},
formatRefresh: function () {
return 'Refrescar';
},
formatToggle: function () {
return 'Alternar';
},
formatColumns: function () {
return 'Columnas';
},
formatAllRows: function () {
return 'Todo';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['es-NI']);
})(jQuery);
/**
* Bootstrap Table Spanish (España) translation
* Author: Antonio Pérez <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['es-SP'] = {
formatLoadingMessage: function () {
return 'Cargando, por favor espera...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registros por página.';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return pageFrom + ' - ' + pageTo + ' de ' + totalRows + ' registros.';
},
formatSearch: function () {
return 'Buscar';
},
formatNoMatches: function () {
return 'No se han encontrado registros.';
},
formatRefresh: function () {
return 'Actualizar';
},
formatToggle: function () {
return 'Alternar';
},
formatColumns: function () {
return 'Columnas';
},
formatAllRows: function () {
return 'Todo';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['es-SP']);
})(jQuery);
/**
* Bootstrap Table Estonian translation
* Author: [email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['et-EE'] = {
formatLoadingMessage: function () {
return 'Päring käib, palun oota...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rida lehe kohta';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Näitan tulemusi ' + pageFrom + ' kuni ' + pageTo + ' - kokku ' + totalRows + ' tulemust';
},
formatSearch: function () {
return 'Otsi';
},
formatNoMatches: function () {
return 'Päringu tingimustele ei vastanud ühtegi tulemust';
},
formatPaginationSwitch: function () {
return 'Näita/Peida lehtedeks jagamine';
},
formatRefresh: function () {
return 'Värskenda';
},
formatToggle: function () {
return 'Lülita';
},
formatColumns: function () {
return 'Veerud';
},
formatAllRows: function () {
return 'Kõik';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['et-EE']);
})(jQuery);
/**
* Bootstrap Table Persian translation
* Author: MJ Vakili <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['fa-IR'] = {
formatLoadingMessage: function () {
return 'در حال بارگذاری, لطفا صبر کنید...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' رکورد در صفحه';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'نمایش ' + pageFrom + ' تا ' + pageTo + ' از ' + totalRows + ' ردیف';
},
formatSearch: function () {
return 'جستجو';
},
formatNoMatches: function () {
return 'رکوردی یافت نشد.';
},
formatPaginationSwitch: function () {
return 'نمایش/مخفی صفحه بندی';
},
formatRefresh: function () {
return 'به روز رسانی';
},
formatToggle: function () {
return 'تغییر نمایش';
},
formatColumns: function () {
return 'سطر ها';
},
formatAllRows: function () {
return 'همه';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['fa-IR']);
})(jQuery);
/**
* Bootstrap Table French (Belgium) translation
* Author: Julien Bisconti ([email protected])
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['fr-BE'] = {
formatLoadingMessage: function () {
return 'Chargement en cours...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' entrées par page';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Affiche de' + pageFrom + ' à ' + pageTo + ' sur ' + totalRows + ' lignes';
},
formatSearch: function () {
return 'Recherche';
},
formatNoMatches: function () {
return 'Pas de fichiers trouvés';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['fr-BE']);
})(jQuery);
/**
* Bootstrap Table French (France) translation
* Author: Dennis Hernández (http://djhvscf.github.io/Blog/)
* Modification: Tidalf (https://github.com/TidalfFR)
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['fr-FR'] = {
formatLoadingMessage: function () {
return 'Chargement en cours, patientez, s´il vous plaît ...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' lignes par page';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Affichage des lignes ' + pageFrom + ' à ' + pageTo + ' sur ' + totalRows + ' lignes au total';
},
formatSearch: function () {
return 'Rechercher';
},
formatNoMatches: function () {
return 'Aucun résultat trouvé';
},
formatRefresh: function () {
return 'Rafraîchir';
},
formatToggle: function () {
return 'Alterner';
},
formatColumns: function () {
return 'Colonnes';
},
formatAllRows: function () {
return 'Tous';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['fr-FR']);
})(jQuery);
/**
* Bootstrap Table Hebrew translation
* Author: legshooter
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['he-IL'] = {
formatLoadingMessage: function () {
return 'טוען, נא להמתין...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' שורות בעמוד';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'מציג ' + pageFrom + ' עד ' + pageTo + ' מ-' + totalRows + ' שורות';
},
formatSearch: function () {
return 'חיפוש';
},
formatNoMatches: function () {
return 'לא נמצאו רשומות תואמות';
},
formatPaginationSwitch: function () {
return 'הסתר/הצג מספור דפים';
},
formatRefresh: function () {
return 'רענן';
},
formatToggle: function () {
return 'החלף תצוגה';
},
formatColumns: function () {
return 'עמודות';
},
formatAllRows: function () {
return 'הכל';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['he-IL']);
})(jQuery);
/**
* Bootstrap Table Croatian translation
* Author: Petra Štrbenac ([email protected])
* Author: Petra Štrbenac ([email protected])
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['hr-HR'] = {
formatLoadingMessage: function () {
return 'Molimo pričekajte ...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' broj zapisa po stranici';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Prikazujem ' + pageFrom + '. - ' + pageTo + '. od ukupnog broja zapisa ' + totalRows;
},
formatSearch: function () {
return 'Pretraži';
},
formatNoMatches: function () {
return 'Nije pronađen niti jedan zapis';
},
formatPaginationSwitch: function () {
return 'Prikaži/sakrij stranice';
},
formatRefresh: function () {
return 'Osvježi';
},
formatToggle: function () {
return 'Promijeni prikaz';
},
formatColumns: function () {
return 'Kolone';
},
formatAllRows: function () {
return 'Sve';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['hr-HR']);
})(jQuery);
/**
* Bootstrap Table Hungarian translation
* Author: Nagy Gergely <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['hu-HU'] = {
formatLoadingMessage: function () {
return 'Betöltés, kérem várjon...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rekord per oldal';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Megjelenítve ' + pageFrom + ' - ' + pageTo + ' / ' + totalRows + ' összesen';
},
formatSearch: function () {
return 'Keresés';
},
formatNoMatches: function () {
return 'Nincs találat';
},
formatPaginationSwitch: function () {
return 'Lapozó elrejtése/megjelenítése';
},
formatRefresh: function () {
return 'Frissítés';
},
formatToggle: function () {
return 'Összecsuk/Kinyit';
},
formatColumns: function () {
return 'Oszlopok';
},
formatAllRows: function () {
return 'Összes';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['hu-HU']);
})(jQuery);
/**
* Bootstrap Table Italian translation
* Author: Davide Renzi<[email protected]>
* Author: Davide Borsatto <[email protected]>
* Author: Alessio Felicioni <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['it-IT'] = {
formatLoadingMessage: function () {
return 'Caricamento in corso...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' elementi per pagina';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Pagina ' + pageFrom + ' di ' + pageTo + ' (' + totalRows + ' elementi)';
},
formatSearch: function () {
return 'Cerca';
},
formatNoMatches: function () {
return 'Nessun elemento trovato';
},
formatRefresh: function () {
return 'Aggiorna';
},
formatToggle: function () {
return 'Alterna';
},
formatColumns: function () {
return 'Colonne';
},
formatAllRows: function () {
return 'Tutto';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['it-IT']);
})(jQuery);
/**
* Bootstrap Table Japanese translation
* Author: Azamshul Azizy <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ja-JP'] = {
formatLoadingMessage: function () {
return '読み込み中です。少々お待ちください。';
},
formatRecordsPerPage: function (pageNumber) {
return 'ページ当たり最大' + pageNumber + '件';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return '全' + totalRows + '件から、'+ pageFrom + 'から' + pageTo + '件目まで表示しています';
},
formatSearch: function () {
return '検索';
},
formatNoMatches: function () {
return '該当するレコードが見つかりません';
},
formatPaginationSwitch: function () {
return 'ページ数を表示・非表示';
},
formatRefresh: function () {
return '更新';
},
formatToggle: function () {
return 'トグル';
},
formatColumns: function () {
return '列';
},
formatAllRows: function () {
return 'すべて';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ja-JP']);
})(jQuery);
/**
* Bootstrap Table Georgian translation
* Author: Levan Lotuashvili <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ka-GE'] = {
formatLoadingMessage: function() {
return 'იტვირთება, გთხოვთ მოიცადოთ...';
},
formatRecordsPerPage: function(pageNumber) {
return pageNumber + ' ჩანაწერი თითო გვერდზე';
},
formatShowingRows: function(pageFrom, pageTo, totalRows) {
return 'ნაჩვენებია ' + pageFrom + '-დან ' + pageTo + '-მდე ჩანაწერი ჯამური ' + totalRows + '-დან';
},
formatSearch: function() {
return 'ძებნა';
},
formatNoMatches: function() {
return 'მონაცემები არ არის';
},
formatPaginationSwitch: function() {
return 'გვერდების გადამრთველის დამალვა/გამოჩენა';
},
formatRefresh: function() {
return 'განახლება';
},
formatToggle: function() {
return 'ჩართვა/გამორთვა';
},
formatColumns: function() {
return 'სვეტები';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ka-GE']);
})(jQuery);
/**
* Bootstrap Table Korean translation
* Author: Yi Tae-Hyeong ([email protected])
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ko-KR'] = {
formatLoadingMessage: function () {
return '데이터를 불러오는 중입니다...';
},
formatRecordsPerPage: function (pageNumber) {
return '페이지 당 ' + pageNumber + '개 데이터 출력';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return '전체 ' + totalRows + '개 중 ' + pageFrom + '~' + pageTo + '번째 데이터 출력,';
},
formatSearch: function () {
return '검색';
},
formatNoMatches: function () {
return '조회된 데이터가 없습니다.';
},
formatRefresh: function () {
return '새로 고침';
},
formatToggle: function () {
return '전환';
},
formatColumns: function () {
return '컬럼 필터링';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ko-KR']);
})(jQuery);
/**
* Bootstrap Table Malay translation
* Author: Azamshul Azizy <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ms-MY'] = {
formatLoadingMessage: function () {
return 'Permintaan sedang dimuatkan. Sila tunggu sebentar...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rekod setiap muka surat';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Sedang memaparkan rekod ' + pageFrom + ' hingga ' + pageTo + ' daripada jumlah ' + totalRows + ' rekod';
},
formatSearch: function () {
return 'Cari';
},
formatNoMatches: function () {
return 'Tiada rekod yang menyamai permintaan';
},
formatPaginationSwitch: function () {
return 'Tunjuk/sembunyi muka surat';
},
formatRefresh: function () {
return 'Muatsemula';
},
formatToggle: function () {
return 'Tukar';
},
formatColumns: function () {
return 'Lajur';
},
formatAllRows: function () {
return 'Semua';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ms-MY']);
})(jQuery);
/**
* Bootstrap Table norwegian translation
* Author: Jim Nordbø, [email protected]
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['nb-NO'] = {
formatLoadingMessage: function () {
return 'Oppdaterer, vennligst vent...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' poster pr side';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Viser ' + pageFrom + ' til ' + pageTo + ' av ' + totalRows + ' rekker';
},
formatSearch: function () {
return 'Søk';
},
formatNoMatches: function () {
return 'Ingen poster funnet';
},
formatRefresh: function () {
return 'Oppdater';
},
formatToggle: function () {
return 'Endre';
},
formatColumns: function () {
return 'Kolonner';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['nb-NO']);
})(jQuery);
/**
* Bootstrap Table Dutch translation
* Author: Your Name <[email protected]>
*/
(function($) {
'use strict';
$.fn.bootstrapTable.locales['nl-NL'] = {
formatLoadingMessage: function() {
return 'Laden, even geduld...';
},
formatRecordsPerPage: function(pageNumber) {
return pageNumber + ' records per pagina';
},
formatShowingRows: function(pageFrom, pageTo, totalRows) {
return 'Toon ' + pageFrom + ' tot ' + pageTo + ' van ' + totalRows + ' record' + ((totalRows > 1) ? 's' : '');
},
formatDetailPagination: function(totalRows) {
return 'Toon ' + totalRows + ' record' + ((totalRows > 1) ? 's' : '');
},
formatSearch: function() {
return 'Zoeken';
},
formatNoMatches: function() {
return 'Geen resultaten gevonden';
},
formatRefresh: function() {
return 'Vernieuwen';
},
formatToggle: function() {
return 'Omschakelen';
},
formatColumns: function() {
return 'Kolommen';
},
formatAllRows: function() {
return 'Alle';
},
formatPaginationSwitch: function() {
return 'Verberg/Toon paginatie';
},
formatExport: function() {
return 'Exporteer data';
},
formatClearFilters: function() {
return 'Verwijder filters';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['nl-NL']);
})(jQuery);
/**
* Bootstrap Table Polish translation
* Author: zergu <michal.zagdan @ gmail com>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['pl-PL'] = {
formatLoadingMessage: function () {
return 'Ładowanie, proszę czekać...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rekordów na stronę';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Wyświetlanie rekordów od ' + pageFrom + ' do ' + pageTo + ' z ' + totalRows;
},
formatSearch: function () {
return 'Szukaj';
},
formatNoMatches: function () {
return 'Niestety, nic nie znaleziono';
},
formatRefresh: function () {
return 'Odśwież';
},
formatToggle: function () {
return 'Przełącz';
},
formatColumns: function () {
return 'Kolumny';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['pl-PL']);
})(jQuery);
/**
* Bootstrap Table Brazilian Portuguese Translation
* Author: Eduardo Cerqueira<[email protected]>
* Update: João Mello<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['pt-BR'] = {
formatLoadingMessage: function () {
return 'Carregando, aguarde...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registros por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Exibindo ' + pageFrom + ' até ' + pageTo + ' de ' + totalRows + ' linhas';
},
formatSearch: function () {
return 'Pesquisar';
},
formatRefresh: function () {
return 'Recarregar';
},
formatToggle: function () {
return 'Alternar';
},
formatColumns: function () {
return 'Colunas';
},
formatPaginationSwitch: function () {
return 'Ocultar/Exibir paginação';
},
formatNoMatches: function () {
return 'Nenhum registro encontrado';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['pt-BR']);
})(jQuery);
/**
* Bootstrap Table Portuguese Portugal Translation
* Author: Burnspirit<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['pt-PT'] = {
formatLoadingMessage: function () {
return 'A carregar, por favor aguarde...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' registos por página';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'A mostrar ' + pageFrom + ' até ' + pageTo + ' de ' + totalRows + ' linhas';
},
formatSearch: function () {
return 'Pesquisa';
},
formatNoMatches: function () {
return 'Nenhum registo encontrado';
},
formatPaginationSwitch: function () {
return 'Esconder/Mostrar paginação';
},
formatRefresh: function () {
return 'Atualizar';
},
formatToggle: function () {
return 'Alternar';
},
formatColumns: function () {
return 'Colunas';
},
formatAllRows: function () {
return 'Tudo';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['pt-PT']);
})(jQuery);
/**
* Bootstrap Table Romanian translation
* Author: cristake <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ro-RO'] = {
formatLoadingMessage: function () {
return 'Se incarca, va rugam asteptati...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' inregistrari pe pagina';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Arata de la ' + pageFrom + ' pana la ' + pageTo + ' din ' + totalRows + ' randuri';
},
formatSearch: function () {
return 'Cauta';
},
formatNoMatches: function () {
return 'Nu au fost gasite inregistrari';
},
formatPaginationSwitch: function () {
return 'Ascunde/Arata paginatia';
},
formatRefresh: function () {
return 'Reincarca';
},
formatToggle: function () {
return 'Comuta';
},
formatColumns: function () {
return 'Coloane';
},
formatAllRows: function () {
return 'Toate';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ro-RO']);
})(jQuery);
/**
* Bootstrap Table Russian translation
* Author: Dunaevsky Maxim <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ru-RU'] = {
formatLoadingMessage: function () {
return 'Пожалуйста, подождите, идёт загрузка...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' записей на страницу';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Записи с ' + pageFrom + ' по ' + pageTo + ' из ' + totalRows;
},
formatSearch: function () {
return 'Поиск';
},
formatNoMatches: function () {
return 'Ничего не найдено';
},
formatRefresh: function () {
return 'Обновить';
},
formatToggle: function () {
return 'Переключить';
},
formatColumns: function () {
return 'Колонки';
},
formatClearFilters: function () {
return 'Очистить фильтры';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ru-RU']);
})(jQuery);
/**
* Bootstrap Table Slovak translation
* Author: Jozef Dúc<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['sk-SK'] = {
formatLoadingMessage: function () {
return 'Prosím čakajte ...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' záznamov na stranu';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Zobrazená ' + pageFrom + '. - ' + pageTo + '. položka z celkových ' + totalRows;
},
formatSearch: function () {
return 'Vyhľadávanie';
},
formatNoMatches: function () {
return 'Nenájdená žiadna vyhovujúca položka';
},
formatRefresh: function () {
return 'Obnoviť';
},
formatToggle: function () {
return 'Prepni';
},
formatColumns: function () {
return 'Stĺpce';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['sk-SK']);
})(jQuery);
/**
* Bootstrap Table Swedish translation
* Author: C Bratt <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['sv-SE'] = {
formatLoadingMessage: function () {
return 'Laddar, vänligen vänta...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' rader per sida';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Visa ' + pageFrom + ' till ' + pageTo + ' av ' + totalRows + ' rader';
},
formatSearch: function () {
return 'Sök';
},
formatNoMatches: function () {
return 'Inga matchande resultat funna.';
},
formatRefresh: function () {
return 'Uppdatera';
},
formatToggle: function () {
return 'Skifta';
},
formatColumns: function () {
return 'kolumn';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['sv-SE']);
})(jQuery);
/**
* Bootstrap Table Thai translation
* Author: Monchai S.<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['th-TH'] = {
formatLoadingMessage: function () {
return 'กำลังโหลดข้อมูล, กรุณารอสักครู่...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' รายการต่อหน้า';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'รายการที่ ' + pageFrom + ' ถึง ' + pageTo + ' จากทั้งหมด ' + totalRows + ' รายการ';
},
formatSearch: function () {
return 'ค้นหา';
},
formatNoMatches: function () {
return 'ไม่พบรายการที่ค้นหา !';
},
formatRefresh: function () {
return 'รีเฟรส';
},
formatToggle: function () {
return 'สลับมุมมอง';
},
formatColumns: function () {
return 'คอลัมน์';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['th-TH']);
})(jQuery);
/**
* Bootstrap Table Turkish translation
* Author: Emin Şen
* Author: Sercan Cakir <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['tr-TR'] = {
formatLoadingMessage: function () {
return 'Yükleniyor, lütfen bekleyin...';
},
formatRecordsPerPage: function (pageNumber) {
return 'Sayfa başına ' + pageNumber + ' kayıt.';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return totalRows + ' kayıttan ' + pageFrom + '-' + pageTo + ' arası gösteriliyor.';
},
formatSearch: function () {
return 'Ara';
},
formatNoMatches: function () {
return 'Eşleşen kayıt bulunamadı.';
},
formatRefresh: function () {
return 'Yenile';
},
formatToggle: function () {
return 'Değiştir';
},
formatColumns: function () {
return 'Sütunlar';
},
formatAllRows: function () {
return 'Tüm Satırlar';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['tr-TR']);
})(jQuery);
/**
* Bootstrap Table Ukrainian translation
* Author: Vitaliy Timchenko <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['uk-UA'] = {
formatLoadingMessage: function () {
return 'Завантаження, будь ласка, зачекайте...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' записів на сторінку';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Показано з ' + pageFrom + ' по ' + pageTo + '. Всього: ' + totalRows;
},
formatSearch: function () {
return 'Пошук';
},
formatNoMatches: function () {
return 'Не знайдено жодного запису';
},
formatRefresh: function () {
return 'Оновити';
},
formatToggle: function () {
return 'Змінити';
},
formatColumns: function () {
return 'Стовпці';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['uk-UA']);
})(jQuery);
/**
* Bootstrap Table Urdu translation
* Author: Malik <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['ur-PK'] = {
formatLoadingMessage: function () {
return 'براۓ مہربانی انتظار کیجئے';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' ریکارڈز فی صفہ ';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'دیکھیں ' + pageFrom + ' سے ' + pageTo + ' کے ' + totalRows + 'ریکارڈز';
},
formatSearch: function () {
return 'تلاش';
},
formatNoMatches: function () {
return 'کوئی ریکارڈ نہیں ملا';
},
formatRefresh: function () {
return 'تازہ کریں';
},
formatToggle: function () {
return 'تبدیل کریں';
},
formatColumns: function () {
return 'کالم';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['ur-PK']);
})(jQuery);
/**
* Bootstrap Table Vietnamese translation
* Author: Duc N. PHAM <[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['vi-VN'] = {
formatLoadingMessage: function () {
return 'Đang tải...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' bản ghi mỗi trang';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Hiển thị từ trang ' + pageFrom + ' đến ' + pageTo + ' của ' + totalRows + ' bảng ghi';
},
formatSearch: function () {
return 'Tìm kiếm';
},
formatNoMatches: function () {
return 'Không có dữ liệu';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['vi-VN']);
})(jQuery);
/**
* Bootstrap Table Chinese translation
* Author: Zhixin Wen<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['zh-CN'] = {
formatLoadingMessage: function () {
return '正在努力地加载数据中,请稍候……';
},
formatRecordsPerPage: function (pageNumber) {
return '每页显示 ' + pageNumber + ' 条记录';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return '显示第 ' + pageFrom + ' 到第 ' + pageTo + ' 条记录,总共 ' + totalRows + ' 条记录';
},
formatSearch: function () {
return '搜索';
},
formatNoMatches: function () {
return '没有找到匹配的记录';
},
formatPaginationSwitch: function () {
return '隐藏/显示分页';
},
formatRefresh: function () {
return '刷新';
},
formatToggle: function () {
return '切换';
},
formatColumns: function () {
return '列';
},
formatExport: function () {
return '导出数据';
},
formatClearFilters: function () {
return '清空过滤';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['zh-CN']);
})(jQuery);
/**
* Bootstrap Table Chinese translation
* Author: Zhixin Wen<[email protected]>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['zh-TW'] = {
formatLoadingMessage: function () {
return '正在努力地載入資料,請稍候……';
},
formatRecordsPerPage: function (pageNumber) {
return '每頁顯示 ' + pageNumber + ' 項記錄';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return '顯示第 ' + pageFrom + ' 到第 ' + pageTo + ' 項記錄,總共 ' + totalRows + ' 項記錄';
},
formatSearch: function () {
return '搜尋';
},
formatNoMatches: function () {
return '沒有找到符合的結果';
},
formatPaginationSwitch: function () {
return '隱藏/顯示分頁';
},
formatRefresh: function () {
return '重新整理';
},
formatToggle: function () {
return '切換';
},
formatColumns: function () {
return '列';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['zh-TW']);
})(jQuery);
|
/*
* Copyright (C) 2015. The CloudKit Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function( factory ) {
if ( typeof define === "function" && define.amd ) {
define( ["jquery", "../jquery.validate"], factory );
} else {
factory( jQuery );
}
}(function( $ ) {
/*
* Translated default messages for the jQuery validation plugin.
* Locale: PL (Polish; język polski, polszczyzna)
*/
$.extend($.validator.messages, {
required: "To pole jest wymagane.",
remote: "Proszę o wypełnienie tego pola.",
email: "Proszę o podanie prawidłowego adresu email.",
url: "Proszę o podanie prawidłowego URL.",
date: "Proszę o podanie prawidłowej daty.",
dateISO: "Proszę o podanie prawidłowej daty (ISO).",
number: "Proszę o podanie prawidłowej liczby.",
digits: "Proszę o podanie samych cyfr.",
creditcard: "Proszę o podanie prawidłowej karty kredytowej.",
equalTo: "Proszę o podanie tej samej wartości ponownie.",
extension: "Proszę o podanie wartości z prawidłowym rozszerzeniem.",
maxlength: $.validator.format("Proszę o podanie nie więcej niż {0} znaków."),
minlength: $.validator.format("Proszę o podanie przynajmniej {0} znaków."),
rangelength: $.validator.format("Proszę o podanie wartości o długości od {0} do {1} znaków."),
range: $.validator.format("Proszę o podanie wartości z przedziału od {0} do {1}."),
max: $.validator.format("Proszę o podanie wartości mniejszej bądź równej {0}."),
min: $.validator.format("Proszę o podanie wartości większej bądź równej {0}.")
});
})); |
"""
ntpc9
OMP_NUM_THREADS 1
MKL_NUM_THREADS 1
nt
3.2839200710877776
[3.289062741678208, 3.2875119652599096, 3.277139882091433]
librosa
1.6728829271160066
[1.6732992688193917, 1.663632761221379, 1.6640413324348629]
scipy
3.2334349588491023
[3.255621672142297, 3.2345176991075277, 3.2378146476112306]
python_speech_features
3.7981791491620243
[3.7941275471821427, 3.8010904281400144, 3.7978039011359215]
"""
import numpy as np
import timeit
import paderbox as pb
import librosa
import scipy.signal
import os
import python_speech_features
from functools import partial
import socket
B = 8
T = 16000 * 5
X = np.random.normal(size=(B, T))
SIZE = 1024
SHIFT = 256
def setup_nt():
fn = partial(pb.transform.stft, size=SIZE, shift=SHIFT, fading=False, pad=False)
return X, fn
def setup_librosa():
# Librosa cache is off by default
# https://librosa.github.io/librosa/cache.html#enabling-the-cache
def fn(x_):
# Does not support independent axis
return [librosa.stft(x__, n_fft=SIZE, hop_length=SHIFT, center=False) for x__ in x_]
return X, fn
def setup_scipy():
fn = partial(scipy.signal.stft, nperseg=SIZE, noverlap=SIZE - SHIFT)
return X, fn
def setup_python_speech_features():
def fn1(x_):
frames = python_speech_features.sigproc.framesig(
x_, frame_len=SIZE, frame_step=SHIFT
)
return np.fft.rfft(frames, SIZE)
def fn2(x_):
return [fn1(x__) for x__ in x_]
return X, fn2
if __name__ == '__main__':
print(socket.gethostname())
print('OMP_NUM_THREADS', os.environ.get('OMP_NUM_THREADS'))
print('MKL_NUM_THREADS', os.environ.get('MKL_NUM_THREADS'))
print()
repeats = 100
for library in 'nt librosa scipy python_speech_features'.split():
print(library)
t = timeit.Timer(
'fn(x)',
setup=(
f'from __main__ import setup_{library}; '
f'x, fn = setup_{library}()'
)
)
print(t.timeit(number=repeats))
print(t.repeat(number=repeats))
print()
|
const avatars = [
"/home/pringon/dev/workshops/frontend/vanilla/icons/alek.jpg",
"/home/pringon/dev/workshops/frontend/vanilla/icons/dan.png",
"/home/pringon/dev/workshops/frontend/vanilla/icons/nicky.jpg",
"/home/pringon/dev/workshops/frontend/vanilla/icons/velina.jpg",
];
function addAvatars(labels) {
return labels.map((label) => {
const avatarIndex = Math.floor(Math.random() * avatars.length);
return {
label,
avatar: avatars[avatarIndex],
};
});
}
|
/* this 是函数的运行上下文 */
let log = console.log;
function bbb() {
log(this);
}
let objA = {
b: bbb,
c: {
d: bbb
}
};
bbb(); // this -> window
objA.b(); // this -> objA
objA.c.d(); // this -> objA.c |
var path = require("path");
/**
* OPTIONS /coursereserves/courses?query=courseListingId=="79702b52-0d7e-48ea-b7ef-a4e74289f000" not (id=="c62e7511-dbe4-44e5-8592-9777d409ed42")
*
* connection: keep-alive
* host: folio-snapshot-okapi.dev.folio.org
* proxy-connection: keep-alive
* accept: * / *
* access-control-request-method: GET
* access-control-request-headers: content-type,x-okapi-tenant,x-okapi-token
* origin: http://localhost:3001
* user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_1) AppleWebKit/537.36 (KHTML, like Gecko) Cypress/6.5.0 Chrome/87.0.4280.141 Electron/11.2.3 Safari/537.36
* sec-fetch-mode: cors
* sec-fetch-site: same-site
* sec-fetch-dest: empty
* referer: http://localhost:3001/cr/courses/c62e7511-dbe4-44e5-8592-9777d409ed42?sort=name
* accept-encoding: gzip
* accept-language: en-US
*/
module.exports = function (req, res) {
res.statusCode = 204;
res.setHeader("date", "Mon, 01 Mar 2021 17:54:56 GMT");
res.setHeader("connection", "keep-alive");
res.setHeader("access-control-allow-origin", "*");
res.setHeader("access-control-allow-methods", "PUT,PATCH,DELETE,GET,POST");
res.setHeader("access-control-allow-headers", "content-type,X-Okapi-Tenant,X-Okapi-Token,Authorization,X-Okapi-Request-Id,X-Okapi-Module-Id");
res.setHeader("x-yakbak-tape", path.basename(__filename, ".js"));
res.end();
return __filename;
};
|
def extractHakushakuToYousei(item):
"""
Parser for 'Hakushaku to Yousei'
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
if 'WATTT' in item['tags']:
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False
|
import {
createStore, combineReducers,
} from 'redux';
import app from './models/app/model';
import book from './models/book/model';
import DevTool from './DevTool';
import { destruct } from '../src/index';
const store = createStore(() => {}, {}, DevTool().instrument());
const { reducers, connect } = destruct(store)({ app, book });
store.replaceReducer(combineReducers(reducers));
export {
store,
connect,
};
|
# coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd">
# Copyright (c) 2003-2021 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
from __future__ import absolute_import
import unittest
import os
from groupdocs_signature_cloud import *
from test.test_context import TestContext
from test.test_file import TestFile
class TestVerifyBarcode(TestContext):
def test_verify_barcode_image(self):
test_file = TestFile.image_signed()
settings = self.populate_options(test_file)
response = self.sign_api.verify_signatures(VerifySignaturesRequest(settings))
self.check_response(response, test_file)
def test_verify_barcode_pdf(self):
test_file = TestFile.pdf_signed()
settings = self.populate_options(test_file)
response = self.sign_api.verify_signatures(VerifySignaturesRequest(settings))
self.check_response(response, test_file)
def test_verify_barcode_presentation(self):
test_file = TestFile.presentation_signed()
settings = self.populate_options(test_file)
response = self.sign_api.verify_signatures(VerifySignaturesRequest(settings))
self.check_response(response, test_file)
def test_verify_barcode_spreadsheet(self):
test_file = TestFile.spreadsheet_signed()
settings = self.populate_options(test_file)
response = self.sign_api.verify_signatures(VerifySignaturesRequest(settings))
self.check_response(response, test_file)
def test_verify_barcode_wordprocessing(self):
test_file = TestFile.wordprocessing_signed()
settings = self.populate_options(test_file)
response = self.sign_api.verify_signatures(VerifySignaturesRequest(settings))
self.check_response(response, test_file)
@staticmethod
def populate_options(testFile):
opts = VerifyBarcodeOptions()
opts.signature_type = 'Barcode'
opts.text = '123456789012'
opts.barcode_type = 'Code39Standard'
opts.match_type = 'Contains'
opts.page = 1
opts.all_pages = True
ps = PagesSetup()
ps.even_pages = False
ps.first_page = True
ps.last_page = False
ps.odd_pages = False
ps.page_numbers = [1]
opts.pages_setup = ps
settings = VerifySettings()
settings.file_info = testFile.ToFileInfo()
settings.options = [opts]
return settings
def check_response(self, response, test_file):
self.assertTrue(response)
self.assertTrue(response.file_info)
self.assertEqual(response.file_info.file_path, test_file.FilePath())
if __name__ == '__main__':
unittest.main()
|
"""Utility meter from sensors providing raw data."""
from datetime import date, timedelta
from decimal import Decimal, DecimalException
import logging
import voluptuous as vol
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONF_NAME,
EVENT_HOMEASSISTANT_START,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from homeassistant.helpers import entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import (
async_track_state_change_event,
async_track_time_change,
)
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.dt as dt_util
from .const import (
ATTR_VALUE,
BIMONTHLY,
CONF_METER,
CONF_METER_NET_CONSUMPTION,
CONF_METER_OFFSET,
CONF_METER_TYPE,
CONF_SOURCE_SENSOR,
CONF_TARIFF,
CONF_TARIFF_ENTITY,
DAILY,
DATA_UTILITY,
HOURLY,
MONTHLY,
QUARTER_HOURLY,
QUARTERLY,
SERVICE_CALIBRATE_METER,
SIGNAL_RESET_METER,
WEEKLY,
YEARLY,
)
_LOGGER = logging.getLogger(__name__)
ATTR_SOURCE_ID = "source"
ATTR_STATUS = "status"
ATTR_PERIOD = "meter_period"
ATTR_LAST_PERIOD = "last_period"
ATTR_LAST_RESET = "last_reset"
ATTR_TARIFF = "tariff"
ICON = "mdi:counter"
PRECISION = 3
PAUSED = "paused"
COLLECTING = "collecting"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the utility meter sensor."""
if discovery_info is None:
_LOGGER.error("This platform is only available through discovery")
return
meters = []
for conf in discovery_info:
meter = conf[CONF_METER]
conf_meter_source = hass.data[DATA_UTILITY][meter][CONF_SOURCE_SENSOR]
conf_meter_type = hass.data[DATA_UTILITY][meter].get(CONF_METER_TYPE)
conf_meter_offset = hass.data[DATA_UTILITY][meter][CONF_METER_OFFSET]
conf_meter_net_consumption = hass.data[DATA_UTILITY][meter][
CONF_METER_NET_CONSUMPTION
]
conf_meter_tariff_entity = hass.data[DATA_UTILITY][meter].get(
CONF_TARIFF_ENTITY
)
meters.append(
UtilityMeterSensor(
conf_meter_source,
conf.get(CONF_NAME),
conf_meter_type,
conf_meter_offset,
conf_meter_net_consumption,
conf.get(CONF_TARIFF),
conf_meter_tariff_entity,
)
)
async_add_entities(meters)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_CALIBRATE_METER,
{vol.Required(ATTR_VALUE): vol.Coerce(Decimal)},
"async_calibrate",
)
class UtilityMeterSensor(RestoreEntity, SensorEntity):
"""Representation of an utility meter sensor."""
def __init__(
self,
source_entity,
name,
meter_type,
meter_offset,
net_consumption,
tariff=None,
tariff_entity=None,
):
"""Initialize the Utility Meter sensor."""
self._sensor_source_id = source_entity
self._state = 0
self._last_period = 0
self._last_reset = dt_util.now()
self._collecting = None
if name:
self._name = name
else:
self._name = f"{source_entity} meter"
self._unit_of_measurement = None
self._period = meter_type
self._period_offset = meter_offset
self._sensor_net_consumption = net_consumption
self._tariff = tariff
self._tariff_entity = tariff_entity
@callback
def async_reading(self, event):
"""Handle the sensor state changes."""
old_state = event.data.get("old_state")
new_state = event.data.get("new_state")
if (
old_state is None
or new_state is None
or old_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE]
or new_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE]
):
return
self._unit_of_measurement = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
try:
diff = Decimal(new_state.state) - Decimal(old_state.state)
if (not self._sensor_net_consumption) and diff < 0:
# Source sensor just rolled over for unknown reasons,
return
self._state += diff
except ValueError as err:
_LOGGER.warning("While processing state changes: %s", err)
except DecimalException as err:
_LOGGER.warning(
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
)
self.async_write_ha_state()
@callback
def async_tariff_change(self, event):
"""Handle tariff changes."""
new_state = event.data.get("new_state")
if new_state is None:
return
self._change_status(new_state.state)
def _change_status(self, tariff):
if self._tariff == tariff:
self._collecting = async_track_state_change_event(
self.hass, [self._sensor_source_id], self.async_reading
)
else:
if self._collecting:
self._collecting()
self._collecting = None
_LOGGER.debug(
"%s - %s - source <%s>",
self._name,
COLLECTING if self._collecting is not None else PAUSED,
self._sensor_source_id,
)
self.async_write_ha_state()
async def _async_reset_meter(self, event):
"""Determine cycle - Helper function for larger than daily cycles."""
now = dt_util.now().date()
if (
self._period == WEEKLY
and now != now - timedelta(days=now.weekday()) + self._period_offset
):
return
if (
self._period == MONTHLY
and now != date(now.year, now.month, 1) + self._period_offset
):
return
if (
self._period == BIMONTHLY
and now
!= date(now.year, (((now.month - 1) // 2) * 2 + 1), 1) + self._period_offset
):
return
if (
self._period == QUARTERLY
and now
!= date(now.year, (((now.month - 1) // 3) * 3 + 1), 1) + self._period_offset
):
return
if self._period == YEARLY and now != date(now.year, 1, 1) + self._period_offset:
return
await self.async_reset_meter(self._tariff_entity)
async def async_reset_meter(self, entity_id):
"""Reset meter."""
if self._tariff_entity != entity_id:
return
_LOGGER.debug("Reset utility meter <%s>", self.entity_id)
self._last_reset = dt_util.now()
self._last_period = str(self._state)
self._state = 0
self.async_write_ha_state()
async def async_calibrate(self, value):
"""Calibrate the Utility Meter with a given value."""
_LOGGER.debug("Calibrate %s = %s", self._name, value)
self._state = value
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
if self._period == QUARTER_HOURLY:
for quarter in range(4):
async_track_time_change(
self.hass,
self._async_reset_meter,
minute=(quarter * 15)
+ self._period_offset.seconds % (15 * 60) // 60,
second=self._period_offset.seconds % 60,
)
elif self._period == HOURLY:
async_track_time_change(
self.hass,
self._async_reset_meter,
minute=self._period_offset.seconds // 60,
second=self._period_offset.seconds % 60,
)
elif self._period in [DAILY, WEEKLY, MONTHLY, BIMONTHLY, QUARTERLY, YEARLY]:
async_track_time_change(
self.hass,
self._async_reset_meter,
hour=self._period_offset.seconds // 3600,
minute=self._period_offset.seconds % 3600 // 60,
second=self._period_offset.seconds % 3600 % 60,
)
async_dispatcher_connect(self.hass, SIGNAL_RESET_METER, self.async_reset_meter)
state = await self.async_get_last_state()
if state:
self._state = Decimal(state.state)
self._unit_of_measurement = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
self._last_period = state.attributes.get(ATTR_LAST_PERIOD)
self._last_reset = dt_util.parse_datetime(
state.attributes.get(ATTR_LAST_RESET)
)
if state.attributes.get(ATTR_STATUS) == COLLECTING:
# Fake cancellation function to init the meter in similar state
self._collecting = lambda: None
@callback
def async_source_tracking(event):
"""Wait for source to be ready, then start meter."""
if self._tariff_entity is not None:
_LOGGER.debug(
"<%s> tracks utility meter %s", self.name, self._tariff_entity
)
async_track_state_change_event(
self.hass, [self._tariff_entity], self.async_tariff_change
)
tariff_entity_state = self.hass.states.get(self._tariff_entity)
self._change_status(tariff_entity_state.state)
return
_LOGGER.debug("<%s> collecting from %s", self.name, self._sensor_source_id)
self._collecting = async_track_state_change_event(
self.hass, [self._sensor_source_id], self.async_reading
)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, async_source_tracking
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def extra_state_attributes(self):
"""Return the state attributes of the sensor."""
state_attr = {
ATTR_SOURCE_ID: self._sensor_source_id,
ATTR_STATUS: PAUSED if self._collecting is None else COLLECTING,
ATTR_LAST_PERIOD: self._last_period,
ATTR_LAST_RESET: self._last_reset,
}
if self._period is not None:
state_attr[ATTR_PERIOD] = self._period
if self._tariff is not None:
state_attr[ATTR_TARIFF] = self._tariff
return state_attr
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
|
"""Utilities for input validation"""
# Original authors from Sckit-Learn:
# Olivier Grisel
# Gael Varoquaux
# Andreas Mueller
# Lars Buitinck
# Alexandre Gramfort
# Nicolas Tresegnie
# Sylvain Marie
# License: BSD 3 clause
# This code originates from the Scikit-Learn library,
# it was since modified to allow GPU acceleration.
# This code is under BSD 3 clause license.
# Authors mentioned above do not endorse or promote this production.
from functools import wraps
import warnings
import numbers
import numpy as np
import scipy.sparse as sp
from distutils.version import LooseVersion
from inspect import signature, isclass, Parameter
import joblib
from ..exceptions import NonBLASDotWarning, PositiveSpectrumWarning
from ..exceptions import NotFittedError
from ..exceptions import DataConversionWarning
from ....thirdparty_adapters import check_array
FLOAT_DTYPES = (np.float64, np.float32, np.float16)
# Silenced by default to reduce verbosity. Turn on at runtime for
# performance profiling.
warnings.simplefilter('ignore', NonBLASDotWarning)
def _deprecate_positional_args(f):
"""Decorator for methods that issues warnings for positional arguments
Using the keyword-only argument syntax in pep 3102, arguments after the
* will issue a warning when passed as a positional argument.
Parameters
----------
f : function
function to check arguments on
"""
sig = signature(f)
kwonly_args = []
all_args = []
for name, param in sig.parameters.items():
if param.kind == Parameter.POSITIONAL_OR_KEYWORD:
all_args.append(name)
elif param.kind == Parameter.KEYWORD_ONLY:
kwonly_args.append(name)
@wraps(f)
def inner_f(*args, **kwargs):
extra_args = len(args) - len(all_args)
if extra_args > 0:
# ignore first 'self' argument for instance methods
args_msg = ['{}={}'.format(name, arg)
for name, arg in zip(kwonly_args[:extra_args],
args[-extra_args:])]
warnings.warn("Pass {} as keyword args. From version 0.25 "
"passing these as positional arguments will "
"result in an error".format(", ".join(args_msg)),
FutureWarning)
kwargs.update({k: arg for k, arg in zip(sig.parameters, args)})
return f(**kwargs)
return inner_f
def _assert_all_finite(X, allow_nan=False, msg_dtype=None):
"""Like assert_all_finite, but only for ndarray."""
# validation is also imported in extmath
from .extmath import _safe_accumulator_op
X = np.asanyarray(X)
# First try an O(n) time, O(1) space solution for the common case that
# everything is finite; fall back to O(n) space np.isfinite to prevent
# false positives from overflow in sum method. The sum is also calculated
# safely to reduce dtype induced overflows.
is_float = X.dtype.kind in 'fc'
if is_float and (np.isfinite(_safe_accumulator_op(np.sum, X))):
pass
elif is_float:
msg_err = "Input contains {} or a value too large for {!r}."
if (allow_nan and np.isinf(X).any() or
not allow_nan and not np.isfinite(X).all()):
type_err = 'infinity' if allow_nan else 'NaN, infinity'
raise ValueError(
msg_err.format
(type_err,
msg_dtype if msg_dtype is not None else X.dtype)
)
@_deprecate_positional_args
def assert_all_finite(X, *, allow_nan=False):
"""Throw a ValueError if X contains NaN or infinity.
Parameters
----------
X : array or sparse matrix
allow_nan : bool
"""
_assert_all_finite(X.data if sp.issparse(X) else X, allow_nan)
@_deprecate_positional_args
def as_float_array(X, *, copy=True, force_all_finite=True):
"""Converts an array-like to an array of floats.
The new dtype will be np.float32 or np.float64, depending on the original
type. The function can create a copy or modify the argument depending
on the argument copy.
Parameters
----------
X : {array-like, sparse matrix}
copy : bool, optional
If True, a copy of X will be created. If False, a copy may still be
returned if X's dtype is not a floating point type.
force_all_finite : boolean or 'allow-nan', (default=True)
Whether to raise an error on np.inf, np.nan, pd.NA in X. The
possibilities are:
- True: Force all values of X to be finite.
- False: accepts np.inf, np.nan, pd.NA in X.
- 'allow-nan': accepts only np.nan and pd.NA values in X. Values cannot
be infinite.
.. versionadded:: 0.20
``force_all_finite`` accepts the string ``'allow-nan'``.
.. versionchanged:: 0.23
Accepts `pd.NA` and converts it into `np.nan`
Returns
-------
XT : {array, sparse matrix}
An array of type np.float
"""
if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray)
and not sp.issparse(X)):
return check_array(X, accept_sparse=['csr', 'csc', 'coo'],
dtype=np.float64, copy=copy,
force_all_finite=force_all_finite, ensure_2d=False)
elif sp.issparse(X) and X.dtype in [np.float32, np.float64]:
return X.copy() if copy else X
elif X.dtype in [np.float32, np.float64]: # is numpy array
return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X
else:
if X.dtype.kind in 'uib' and X.dtype.itemsize <= 4:
return_dtype = np.float32
else:
return_dtype = np.float64
return X.astype(return_dtype)
def _is_arraylike(x):
"""Returns whether the input is array-like"""
return (hasattr(x, '__len__') or
hasattr(x, 'shape') or
hasattr(x, '__array__'))
def _num_samples(x):
"""Return number of samples in array-like x."""
message = 'Expected sequence or array-like, got %s' % type(x)
if hasattr(x, 'fit') and callable(x.fit):
# Don't get num_samples from an ensembles length!
raise TypeError(message)
if not hasattr(x, '__len__') and not hasattr(x, 'shape'):
if hasattr(x, '__array__'):
x = np.asarray(x)
else:
raise TypeError(message)
if hasattr(x, 'shape') and x.shape is not None:
if len(x.shape) == 0:
raise TypeError("Singleton array %r cannot be considered"
" a valid collection." % x)
# Check that shape is returning an integer or default to len
# Dask dataframes may not return numeric shape[0] value
if isinstance(x.shape[0], numbers.Integral):
return x.shape[0]
try:
return len(x)
except TypeError:
raise TypeError(message)
def check_memory(memory):
"""Check that ``memory`` is joblib.Memory-like.
joblib.Memory-like means that ``memory`` can be converted into a
joblib.Memory instance (typically a str denoting the ``location``)
or has the same interface (has a ``cache`` method).
Parameters
----------
memory : None, str or object with the joblib.Memory interface
Returns
-------
memory : object with the joblib.Memory interface
Raises
------
ValueError
If ``memory`` is not joblib.Memory-like.
"""
if memory is None or isinstance(memory, str):
if LooseVersion(joblib.__version__) < '0.12':
memory = joblib.Memory(cachedir=memory, verbose=0)
else:
memory = joblib.Memory(location=memory, verbose=0)
elif not hasattr(memory, 'cache'):
raise ValueError("'memory' should be None, a string or have the same"
" interface as joblib.Memory."
" Got memory='{}' instead.".format(memory))
return memory
def check_consistent_length(*arrays):
"""Check that all arrays have consistent first dimensions.
Checks whether all objects in arrays have the same shape or length.
Parameters
----------
*arrays : list or tuple of input objects.
Objects that will be checked for consistent length.
"""
lengths = [_num_samples(X) for X in arrays if X is not None]
uniques = np.unique(lengths)
if len(uniques) > 1:
raise ValueError("Found input variables with inconsistent numbers of"
" samples")
def _make_indexable(iterable):
"""Ensure iterable supports indexing or convert to an indexable variant.
Convert sparse matrices to csr and other non-indexable iterable to arrays.
Let `None` and indexable objects (e.g. pandas dataframes) pass unchanged.
Parameters
----------
iterable : {list, dataframe, array, sparse} or None
Object to be converted to an indexable iterable.
"""
if sp.issparse(iterable):
return iterable.tocsr()
elif hasattr(iterable, "__getitem__") or hasattr(iterable, "iloc"):
return iterable
elif iterable is None:
return iterable
return np.array(iterable)
def indexable(*iterables):
"""Make arrays indexable for cross-validation.
Checks consistent length, passes through None, and ensures that everything
can be indexed by converting sparse matrices to csr and converting
non-interable objects to arrays.
Parameters
----------
*iterables : lists, dataframes, arrays, sparse matrices
List of objects to ensure sliceability.
"""
result = [_make_indexable(X) for X in iterables]
check_consistent_length(*result)
return result
def _ensure_sparse_format(spmatrix, accept_sparse, dtype, copy,
force_all_finite, accept_large_sparse):
"""Convert a sparse matrix to a given format.
Checks the sparse format of spmatrix and converts if necessary.
Parameters
----------
spmatrix : scipy sparse matrix
Input to validate and convert.
accept_sparse : string, boolean or list/tuple of strings
String[s] representing allowed sparse matrix formats ('csc',
'csr', 'coo', 'dok', 'bsr', 'lil', 'dia'). If the input is sparse but
not in the allowed format, it will be converted to the first listed
format. True allows the input to be any format. False means
that a sparse matrix input will raise an error.
dtype : string, type or None
Data type of result. If None, the dtype of the input is preserved.
copy : boolean
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean or 'allow-nan', (default=True)
Whether to raise an error on np.inf, np.nan, pd.NA in X. The
possibilities are:
- True: Force all values of X to be finite.
- False: accepts np.inf, np.nan, pd.NA in X.
- 'allow-nan': accepts only np.nan and pd.NA values in X. Values cannot
be infinite.
.. versionadded:: 0.20
``force_all_finite`` accepts the string ``'allow-nan'``.
.. versionchanged:: 0.23
Accepts `pd.NA` and converts it into `np.nan`
Returns
-------
spmatrix_converted : scipy sparse matrix.
Matrix that is ensured to have an allowed type.
"""
if dtype is None:
dtype = spmatrix.dtype
changed_format = False
if isinstance(accept_sparse, str):
accept_sparse = [accept_sparse]
# Indices dtype validation
_check_large_sparse(spmatrix, accept_large_sparse)
if accept_sparse is False:
raise TypeError('A sparse matrix was passed, but dense '
'data is required. Use X.toarray() to '
'convert to a dense numpy array.')
elif isinstance(accept_sparse, (list, tuple)):
if len(accept_sparse) == 0:
raise ValueError("When providing 'accept_sparse' "
"as a tuple or list, it must contain at "
"least one string value.")
# ensure correct sparse format
if spmatrix.format not in accept_sparse:
# create new with correct sparse
spmatrix = spmatrix.asformat(accept_sparse[0])
changed_format = True
elif accept_sparse is not True:
# any other type
raise ValueError("Parameter 'accept_sparse' should be a string, "
"boolean or list of strings. You provided "
"'accept_sparse={}'.".format(accept_sparse))
if dtype != spmatrix.dtype:
# convert dtype
spmatrix = spmatrix.astype(dtype)
elif copy and not changed_format:
# force copy
spmatrix = spmatrix.copy()
if force_all_finite:
if not hasattr(spmatrix, "data"):
warnings.warn("Can't check %s sparse matrix for nan or inf."
% spmatrix.format, stacklevel=2)
else:
_assert_all_finite(spmatrix.data,
allow_nan=force_all_finite == 'allow-nan')
return spmatrix
def _ensure_no_complex_data(array):
if hasattr(array, 'dtype') and array.dtype is not None \
and hasattr(array.dtype, 'kind') and array.dtype.kind == "c":
raise ValueError("Complex data not supported\n"
"{}\n".format(array))
def _check_large_sparse(X, accept_large_sparse=False):
"""Raise a ValueError if X has 64bit indices and accept_large_sparse=False
"""
if not accept_large_sparse:
supported_indices = ["int32"]
if X.getformat() == "coo":
index_keys = ['col', 'row']
elif X.getformat() in ["csr", "csc", "bsr"]:
index_keys = ['indices', 'indptr']
else:
return
for key in index_keys:
indices_datatype = getattr(X, key).dtype
if (indices_datatype not in supported_indices):
raise ValueError("Only sparse matrices with 32-bit integer"
" indices are accepted. Got %s indices."
% indices_datatype)
@_deprecate_positional_args
def check_X_y(X, y, accept_sparse=False, *, accept_large_sparse=True,
dtype="numeric", order=None, copy=False, force_all_finite=True,
ensure_2d=True, allow_nd=False, multi_output=False,
ensure_min_samples=1, ensure_min_features=1, y_numeric=False,
estimator=None):
"""Input validation for standard estimators.
Checks X and y for consistent length, enforces X to be 2D and y 1D. By
default, X is checked to be non-empty and containing only finite values.
Standard input checks are also applied to y, such as checking that y
does not have np.nan or np.inf targets. For multi-label y, set
multi_output=True to allow 2D and sparse y. If the dtype of X is
object, attempt converting to float, raising on failure.
Parameters
----------
X : nd-array, list or sparse matrix
Input data.
y : nd-array, list or sparse matrix
Labels.
accept_sparse : string, boolean or list of string (default=False)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. If the input is sparse but not in the allowed format,
it will be converted to the first listed format. True allows the input
to be any format. False means that a sparse matrix input will
raise an error.
accept_large_sparse : bool (default=True)
If a CSR, CSC, COO or BSR sparse matrix is supplied and accepted by
accept_sparse, accept_large_sparse will cause it to be accepted only
if its indices are stored with a 32-bit dtype.
.. versionadded:: 0.20
dtype : string, type, list of types or None (default="numeric")
Data type of result. If None, the dtype of the input is preserved.
If "numeric", dtype is preserved unless array.dtype is object.
If dtype is a list of types, conversion on the first type is only
performed if the dtype of the input is not in the list.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean or 'allow-nan', (default=True)
Whether to raise an error on np.inf, np.nan, pd.NA in X. This parameter
does not influence whether y can have np.inf, np.nan, pd.NA values.
The possibilities are:
- True: Force all values of X to be finite.
- False: accepts np.inf, np.nan, pd.NA in X.
- 'allow-nan': accepts only np.nan or pd.NA values in X. Values cannot
be infinite.
.. versionadded:: 0.20
``force_all_finite`` accepts the string ``'allow-nan'``.
.. versionchanged:: 0.23
Accepts `pd.NA` and converts it into `np.nan`
ensure_2d : boolean (default=True)
Whether to raise a value error if X is not 2D.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
multi_output : boolean (default=False)
Whether to allow 2D y (array or sparse matrix). If false, y will be
validated as a vector. y cannot have np.nan or np.inf values if
multi_output=True.
ensure_min_samples : int (default=1)
Make sure that X has a minimum number of samples in its first
axis (rows for a 2D array).
ensure_min_features : int (default=1)
Make sure that the 2D array has some minimum number of features
(columns). The default value of 1 rejects empty datasets.
This check is only enforced when X has effectively 2 dimensions or
is originally 1D and ``ensure_2d`` is True. Setting to 0 disables
this check.
y_numeric : boolean (default=False)
Whether to ensure that y has a numeric type. If dtype of y is object,
it is converted to float64. Should only be used for regression
algorithms.
estimator : str or estimator instance (default=None)
If passed, include the name of the estimator in warning messages.
Returns
-------
X_converted : object
The converted and validated X.
y_converted : object
The converted and validated y.
"""
if y is None:
raise ValueError("y cannot be None")
X = check_array(X, accept_sparse=accept_sparse,
accept_large_sparse=accept_large_sparse,
dtype=dtype, order=order, copy=copy,
force_all_finite=force_all_finite,
ensure_2d=ensure_2d, allow_nd=allow_nd,
ensure_min_samples=ensure_min_samples,
ensure_min_features=ensure_min_features,
estimator=estimator)
if multi_output:
y = check_array(y, accept_sparse='csr', force_all_finite=True,
ensure_2d=False, dtype=None)
else:
y = column_or_1d(y, warn=True)
_assert_all_finite(y)
if y_numeric and y.dtype.kind == 'O':
y = y.astype(np.float64)
check_consistent_length(X, y)
return X, y
@_deprecate_positional_args
def column_or_1d(y, *, warn=False):
""" Ravel column or 1d numpy array, else raises an error
Parameters
----------
y : array-like
warn : boolean, default False
To control display of warnings.
Returns
-------
y : array
"""
y = np.asarray(y)
shape = np.shape(y)
if len(shape) == 1:
return np.ravel(y)
if len(shape) == 2 and shape[1] == 1:
if warn:
warnings.warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples, ), for example using ravel().",
DataConversionWarning, stacklevel=2)
return np.ravel(y)
raise ValueError(
"y should be a 1d array, "
"got an array of shape {} instead.".format(shape))
def check_random_state(seed):
"""Turn seed into a np.random.RandomState instance
Parameters
----------
seed : None | int | instance of RandomState
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, numbers.Integral):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
def has_fit_parameter(estimator, parameter):
"""Checks whether the estimator's fit method supports the given parameter.
Parameters
----------
estimator : object
An estimator to inspect.
parameter : str
The searched parameter.
Returns
-------
is_parameter: bool
Whether the parameter was found to be a named parameter of the
estimator's fit method.
Examples
--------
>>> from sklearn.svm import SVC
>>> has_fit_parameter(SVC(), "sample_weight")
True
"""
return parameter in signature(estimator.fit).parameters
@_deprecate_positional_args
def check_symmetric(array, *, tol=1E-10, raise_warning=True,
raise_exception=False):
"""Make sure that array is 2D, square and symmetric.
If the array is not symmetric, then a symmetrized version is returned.
Optionally, a warning or exception is raised if the matrix is not
symmetric.
Parameters
----------
array : nd-array or sparse matrix
Input object to check / convert. Must be two-dimensional and square,
otherwise a ValueError will be raised.
tol : float
Absolute tolerance for equivalence of arrays. Default = 1E-10.
raise_warning : boolean (default=True)
If True then raise a warning if conversion is required.
raise_exception : boolean (default=False)
If True then raise an exception if array is not symmetric.
Returns
-------
array_sym : ndarray or sparse matrix
Symmetrized version of the input array, i.e. the average of array
and array.transpose(). If sparse, then duplicate entries are first
summed and zeros are eliminated.
"""
if (array.ndim != 2) or (array.shape[0] != array.shape[1]):
raise ValueError("array must be 2-dimensional and square. "
"shape = {0}".format(array.shape))
if sp.issparse(array):
diff = array - array.T
# only csr, csc, and coo have `data` attribute
if diff.format not in ['csr', 'csc', 'coo']:
diff = diff.tocsr()
symmetric = np.all(abs(diff.data) < tol)
else:
symmetric = np.allclose(array, array.T, atol=tol)
if not symmetric:
if raise_exception:
raise ValueError("Array must be symmetric")
if raise_warning:
warnings.warn("Array is not symmetric, and will be converted "
"to symmetric by average with its transpose.",
stacklevel=2)
if sp.issparse(array):
conversion = 'to' + array.format
array = getattr(0.5 * (array + array.T), conversion)()
else:
array = 0.5 * (array + array.T)
return array
@_deprecate_positional_args
def check_is_fitted(estimator, attributes=None, *, msg=None, all_or_any=all):
"""Perform is_fitted validation for estimator.
Checks if the estimator is fitted by verifying the presence of
fitted attributes (ending with a trailing underscore) and otherwise
raises a NotFittedError with the given message.
This utility is meant to be used internally by estimators themselves,
typically in their own predict / transform methods.
Parameters
----------
estimator : estimator instance.
estimator instance for which the check is performed.
attributes : str, list or tuple of str, default=None
Attribute name(s) given as string or a list/tuple of strings
Eg.: ``["coef_", "estimator_", ...], "coef_"``
If `None`, `estimator` is considered fitted if there exist an
attribute that ends with a underscore and does not start with double
underscore.
msg : string
The default error message is, "This %(name)s instance is not fitted
yet. Call 'fit' with appropriate arguments before using this
estimator."
For custom messages if "%(name)s" is present in the message string,
it is substituted for the estimator name.
Eg. : "Estimator, %(name)s, must be fitted before sparsifying".
all_or_any : callable, {all, any}, default all
Specify whether all or any of the given attributes must exist.
Returns
-------
None
Raises
------
NotFittedError
If the attributes are not found.
"""
if isclass(estimator):
raise TypeError("{} is a class, not an instance.".format(estimator))
if msg is None:
msg = ("This %(name)s instance is not fitted yet. Call 'fit' with "
"appropriate arguments before using this estimator.")
if not hasattr(estimator, 'fit'):
raise TypeError("%s is not an estimator instance." % (estimator))
if attributes is not None:
if not isinstance(attributes, (list, tuple)):
attributes = [attributes]
attrs = all_or_any([hasattr(estimator, attr) for attr in attributes])
else:
attrs = [v for v in vars(estimator)
if v.endswith("_") and not v.startswith("__")]
if not attrs:
raise NotFittedError(msg % {'name': type(estimator).__name__})
def check_non_negative(X, whom):
"""
Check if there is any negative value in an array.
Parameters
----------
X : array-like or sparse matrix
Input data.
whom : string
Who passed X to this function.
"""
# avoid X.min() on sparse matrix since it also sorts the indices
if sp.issparse(X):
if X.format in ['lil', 'dok']:
X = X.tocsr()
if X.data.size == 0:
X_min = 0
else:
X_min = X.data.min()
else:
X_min = X.min()
if X_min < 0:
raise ValueError("Negative values in data passed to %s" % whom)
def check_scalar(x, name, target_type, *, min_val=None, max_val=None):
"""Validate scalar parameters type and value.
Parameters
----------
x : object
The scalar parameter to validate.
name : str
The name of the parameter to be printed in error messages.
target_type : type or tuple
Acceptable data types for the parameter.
min_val : float or int, optional (default=None)
The minimum valid value the parameter can take. If None (default) it
is implied that the parameter does not have a lower bound.
max_val : float or int, optional (default=None)
The maximum valid value the parameter can take. If None (default) it
is implied that the parameter does not have an upper bound.
Raises
-------
TypeError
If the parameter's type does not match the desired type.
ValueError
If the parameter's value violates the given bounds.
"""
if not isinstance(x, target_type):
raise TypeError('`{}` must be an instance of {}, not {}.'
.format(name, target_type, type(x)))
if min_val is not None and x < min_val:
raise ValueError('`{}`= {}, must be >= {}.'.format(name, x, min_val))
if max_val is not None and x > max_val:
raise ValueError('`{}`= {}, must be <= {}.'.format(name, x, max_val))
def _check_psd_eigenvalues(lambdas, enable_warnings=False):
"""Check the eigenvalues of a positive semidefinite (PSD) matrix.
Checks the provided array of PSD matrix eigenvalues for numerical or
conditioning issues and returns a fixed validated version. This method
should typically be used if the PSD matrix is user-provided (e.g. a
Gram matrix) or computed using a user-provided dissimilarity metric
(e.g. kernel function), or if the decomposition process uses approximation
methods (randomized SVD, etc.).
It checks for three things:
- that there are no significant imaginary parts in eigenvalues (more than
1e-5 times the maximum real part). If this check fails, it raises a
``ValueError``. Otherwise all non-significant imaginary parts that may
remain are set to zero. This operation is traced with a
``PositiveSpectrumWarning`` when ``enable_warnings=True``.
- that eigenvalues are not all negative. If this check fails, it raises a
``ValueError``
- that there are no significant negative eigenvalues with absolute value
more than 1e-10 (1e-6) and more than 1e-5 (5e-3) times the largest
positive eigenvalue in double (simple) precision. If this check fails,
it raises a ``ValueError``. Otherwise all negative eigenvalues that may
remain are set to zero. This operation is traced with a
``PositiveSpectrumWarning`` when ``enable_warnings=True``.
Finally, all the positive eigenvalues that are too small (with a value
smaller than the maximum eigenvalue divided by 1e12) are set to zero.
This operation is traced with a ``PositiveSpectrumWarning`` when
``enable_warnings=True``.
Parameters
----------
lambdas : array-like of shape (n_eigenvalues,)
Array of eigenvalues to check / fix.
enable_warnings : bool, default=False
When this is set to ``True``, a ``PositiveSpectrumWarning`` will be
raised when there are imaginary parts, negative eigenvalues, or
extremely small non-zero eigenvalues. Otherwise no warning will be
raised. In both cases, imaginary parts, negative eigenvalues, and
extremely small non-zero eigenvalues will be set to zero.
Returns
-------
lambdas_fixed : ndarray of shape (n_eigenvalues,)
A fixed validated copy of the array of eigenvalues.
Examples
--------
>>> _check_psd_eigenvalues([1, 2]) # nominal case
array([1, 2])
>>> _check_psd_eigenvalues([5, 5j]) # significant imag part
Traceback (most recent call last):
...
ValueError: There are significant imaginary parts in eigenvalues (1
of the maximum real part). Either the matrix is not PSD, or there was
an issue while computing the eigendecomposition of the matrix.
>>> _check_psd_eigenvalues([5, 5e-5j]) # insignificant imag part
array([5., 0.])
>>> _check_psd_eigenvalues([-5, -1]) # all negative
Traceback (most recent call last):
...
ValueError: All eigenvalues are negative (maximum is -1). Either the
matrix is not PSD, or there was an issue while computing the
eigendecomposition of the matrix.
>>> _check_psd_eigenvalues([5, -1]) # significant negative
Traceback (most recent call last):
...
ValueError: There are significant negative eigenvalues (0.2 of the
maximum positive). Either the matrix is not PSD, or there was an issue
while computing the eigendecomposition of the matrix.
>>> _check_psd_eigenvalues([5, -5e-5]) # insignificant negative
array([5., 0.])
>>> _check_psd_eigenvalues([5, 4e-12]) # bad conditioning (too small)
array([5., 0.])
"""
lambdas = np.array(lambdas)
is_double_precision = lambdas.dtype == np.float64
# note: the minimum value available is
# - single-precision: np.finfo('float32').eps = 1.2e-07
# - double-precision: np.finfo('float64').eps = 2.2e-16
# the various thresholds used for validation
# we may wish to change the value according to precision.
significant_imag_ratio = 1e-5
significant_neg_ratio = 1e-5 if is_double_precision else 5e-3
significant_neg_value = 1e-10 if is_double_precision else 1e-6
small_pos_ratio = 1e-12
# Check that there are no significant imaginary parts
if not np.isreal(lambdas).all():
max_imag_abs = np.abs(np.imag(lambdas)).max()
max_real_abs = np.abs(np.real(lambdas)).max()
if max_imag_abs > significant_imag_ratio * max_real_abs:
raise ValueError(
"There are significant imaginary parts in eigenvalues (%g "
"of the maximum real part). Either the matrix is not PSD, or "
"there was an issue while computing the eigendecomposition "
"of the matrix."
% (max_imag_abs / max_real_abs))
# warn about imaginary parts being removed
if enable_warnings:
warnings.warn("There are imaginary parts in eigenvalues (%g "
"of the maximum real part). Either the matrix is not"
" PSD, or there was an issue while computing the "
"eigendecomposition of the matrix. Only the real "
"parts will be kept."
% (max_imag_abs / max_real_abs),
PositiveSpectrumWarning)
# Remove all imaginary parts (even if zero)
lambdas = np.real(lambdas)
# Check that there are no significant negative eigenvalues
max_eig = lambdas.max()
if max_eig < 0:
raise ValueError("All eigenvalues are negative (maximum is %g). "
"Either the matrix is not PSD, or there was an "
"issue while computing the eigendecomposition of "
"the matrix." % max_eig)
else:
min_eig = lambdas.min()
if (min_eig < -significant_neg_ratio * max_eig
and min_eig < -significant_neg_value):
raise ValueError("There are significant negative eigenvalues (%g"
" of the maximum positive). Either the matrix is "
"not PSD, or there was an issue while computing "
"the eigendecomposition of the matrix."
% (-min_eig / max_eig))
elif min_eig < 0:
# Remove all negative values and warn about it
if enable_warnings:
warnings.warn("There are negative eigenvalues (%g of the "
"maximum positive). Either the matrix is not "
"PSD, or there was an issue while computing the"
" eigendecomposition of the matrix. Negative "
"eigenvalues will be replaced with 0."
% (-min_eig / max_eig),
PositiveSpectrumWarning)
lambdas[lambdas < 0] = 0
# Check for conditioning (small positive non-zeros)
too_small_lambdas = (0 < lambdas) & (lambdas < small_pos_ratio * max_eig)
if too_small_lambdas.any():
if enable_warnings:
warnings.warn("Badly conditioned PSD matrix spectrum: the largest "
"eigenvalue is more than %g times the smallest. "
"Small eigenvalues will be replaced with 0."
"" % (1 / small_pos_ratio),
PositiveSpectrumWarning)
lambdas[too_small_lambdas] = 0
return lambdas
def _check_sample_weight(sample_weight, X, dtype=None):
"""Validate sample weights.
Note that passing sample_weight=None will output an array of ones.
Therefore, in some cases, you may want to protect the call with:
if sample_weight is not None:
sample_weight = _check_sample_weight(...)
Parameters
----------
sample_weight : {ndarray, Number or None}, shape (n_samples,)
Input sample weights.
X : nd-array, list or sparse matrix
Input data.
dtype: dtype
dtype of the validated `sample_weight`.
If None, and the input `sample_weight` is an array, the dtype of the
input is preserved; otherwise an array with the default numpy dtype
is be allocated. If `dtype` is not one of `float32`, `float64`,
`None`, the output will be of dtype `float64`.
Returns
-------
sample_weight : ndarray, shape (n_samples,)
Validated sample weight. It is guaranteed to be "C" contiguous.
"""
n_samples = _num_samples(X)
if dtype is not None and dtype not in [np.float32, np.float64]:
dtype = np.float64
if sample_weight is None:
sample_weight = np.ones(n_samples, dtype=dtype)
elif isinstance(sample_weight, numbers.Number):
sample_weight = np.full(n_samples, sample_weight, dtype=dtype)
else:
if dtype is None:
dtype = [np.float64, np.float32]
sample_weight = check_array(
sample_weight, accept_sparse=False, ensure_2d=False, dtype=dtype,
order="C"
)
if sample_weight.ndim != 1:
raise ValueError("Sample weights must be 1D array or scalar")
if sample_weight.shape != (n_samples,):
raise ValueError("sample_weight.shape == {}, expected {}!"
.format(sample_weight.shape, (n_samples,)))
return sample_weight
def _allclose_dense_sparse(x, y, rtol=1e-7, atol=1e-9):
"""Check allclose for sparse and dense data.
Both x and y need to be either sparse or dense, they
can't be mixed.
Parameters
----------
x : array-like or sparse matrix
First array to compare.
y : array-like or sparse matrix
Second array to compare.
rtol : float, optional
relative tolerance; see numpy.allclose
atol : float, optional
absolute tolerance; see numpy.allclose. Note that the default here is
more tolerant than the default for numpy.testing.assert_allclose, where
atol=0.
"""
if sp.issparse(x) and sp.issparse(y):
x = x.tocsr()
y = y.tocsr()
x.sum_duplicates()
y.sum_duplicates()
return (np.array_equal(x.indices, y.indices) and
np.array_equal(x.indptr, y.indptr) and
np.allclose(x.data, y.data, rtol=rtol, atol=atol))
elif not sp.issparse(x) and not sp.issparse(y):
return np.allclose(x, y, rtol=rtol, atol=atol)
raise ValueError("Can only compare two sparse matrices, not a sparse "
"matrix and an array")
def _check_fit_params(X, fit_params, indices=None):
"""Check and validate the parameters passed during `fit`.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Data array.
fit_params : dict
Dictionary containing the parameters passed at fit.
indices : array-like of shape (n_samples,), default=None
Indices to be selected if the parameter has the same size as `X`.
Returns
-------
fit_params_validated : dict
Validated parameters. We ensure that the values support indexing.
"""
from . import _safe_indexing
fit_params_validated = {}
for param_key, param_value in fit_params.items():
if (not _is_arraylike(param_value) or
_num_samples(param_value) != _num_samples(X)):
# Non-indexable pass-through (for now for backward-compatibility).
# https://github.com/scikit-learn/scikit-learn/issues/15805
fit_params_validated[param_key] = param_value
else:
# Any other fit_params should support indexing
# (e.g. for cross-validation).
fit_params_validated[param_key] = _make_indexable(param_value)
fit_params_validated[param_key] = _safe_indexing(
fit_params_validated[param_key], indices
)
return fit_params_validated
|
import itertools
import warnings
# 3rd-party modules
import holoviews as hv
from holoviews import opts, dim
from holoviews.operation.datashader import datashade, bundle_graph
import networkx as nx
import pandas as pd
# My handwritten modules
from .s3_utils import savefig
from . import knn
from . import sourmash_utils
# don't warn me about too many figures open
import matplotlib.pyplot as plt
plt.rcParams.update({'figure.max_open_warning': 0})
KSIZES = 9, 12, 15, 21
LOG2SKETCHSIZES = 10, 12, 14, 16
MOLECULES = 'dna', 'protein'
COLOR_COLS = ['species', 'cell_label', ]
PALETTES = dict(species='Set2', cell_label='tab20')
SKETCH_ID_TEMPLATE = 'molecule-{molecule}_ksize-{ksize}_log2sketchsize-{log2sketchsize}'
N_NEIGHBORS = 5
def build_graph_and_plot(data, metadata, n_neighbors, color_cols, palettes,
figure_folder, figure_prefix, title):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
graph = knn.nearest_neighbor_graph(data, metadata,
n_neighbors=n_neighbors,
color_cols=color_cols,
palettes=palettes)
pos = nx.spring_layout(graph, seed=0)
for label in color_cols:
fig, ax = plt.subplots()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
knn.draw_graph(graph, edge_color='black', label_col=label, pos=pos)
ax.set_title(title)
figure_suffix = f'graph_nneighbors-{n_neighbors}_colorby-{label}'
png = f'{figure_folder}/{figure_prefix}_{figure_suffix}.png'
savefig(fig, png, dpi=150)
return graph, pos
def get_similarity_graphs(csv_template, metadata, figure_folder,
groupby='species', ksizes=KSIZES,
log2sketchsizes=LOG2SKETCHSIZES, molecules=MOLECULES,
sketch_id_template=SKETCH_ID_TEMPLATE,
n_neighbors=N_NEIGHBORS, plaidplot=False,
palettes=PALETTES, color_cols=COLOR_COLS,
verbose=False, make_within_groupby_graphs=False):
"""Read similarity csvs and create holoviews graphs
Parameters
----------
csv_template : str
format-string to insert molecule, ksize, and log2sketchsize values
into to get csv. e.g.:
'similarities_molecule-{molecule}_ksize-{ksize}_log2sketchsize-{log2sketchsize}.csv'
metadata : pandas.DataFrame
Sample-by-feature metadata encoding additional information about
samples, such as species, cell type label, or tissue
groupby : str
Which column of the metadata to groupby to get sub-graphs for
ksizes : tuple of int
Which k-mer sizes to look for similarity files for,
default (9, 12, 15, 21)
log2sketchsizes : tuple of int
Which log2 sketch sizes to look for similarity files for,
default (10, 12, 14, 16)
molecules : tuple of str
Which molecules to use, default both 'dna' and 'protein'
sketch_id_template : str
String to use as a unique identifier for the sketch, e.g.
'molecule-{molecule}_ksize-{ksize}_log2sketchsize-{log2sketchsize}'
plaidplot : bool
If true, make a clustered heatmap with the sides labeled with the
color_cols
palettes : dict
Column name (must be in 'metadata') to palette name mapping
color_cols : list
Column names in 'metadata' to color by
Returns
-------
graph_dict : dict of holoviews.Graph
(molecule, ksize, log2sketchsize) : holoviews.Graph mapping for all
similarity matrices found. To be used by 'draw_holoviews_graphs'
"""
# Strip the final slash because it makes s3 stuff weird
figure_folder = figure_folder.rstrip('/')
iterable = itertools.product(molecules, ksizes, log2sketchsizes)
graph_dict = {}
categories = metadata[color_cols]
for molecule, ksize, log2sketchsize in iterable:
template_kwargs = dict(molecule=molecule, ksize=ksize,
log2sketchsize=log2sketchsize)
sketch_id = sketch_id_template.format(**template_kwargs)
if verbose:
print(sketch_id.replace('-', ": ").replace("_", ", "))
csv = csv_template.format(**template_kwargs)
try:
similarities = pd.read_csv(csv)
except FileNotFoundError:
warnings.warn(f"file {csv} not found")
# File doesn't exist yet
continue
similarities.index = similarities.columns
if verbose:
print(f"\tsimilarities.shape: {similarities.shape}")
title = f"molecule: {molecule}, ksize: {ksize}, " \
f"log2sketchsize: {log2sketchsize}"
if plaidplot:
try:
g = sourmash_utils.plaidplot(similarities,
metric='cosine',
row_categories=categories,
col_categories=categories,
row_palette=palettes,
col_palette=palettes)
g.fig.suptitle(title)
png = f'{figure_folder}/{sketch_id}_plaidplot.png'
savefig(g, png, dpi=150)
except FloatingPointError:
warnings.warn("\tCouldn't compute linkage -- no plaidplot " \
"generated")
graph, pos = build_graph_and_plot(similarities, metadata,
n_neighbors, color_cols, palettes,
figure_folder,
sketch_id, title)
# hv.extension('matplotlib')
graph_hv = hv.Graph.from_networkx(graph, pos)
graph_hv = graph_hv.opts(node_size=10, edge_line_width=1, cmap='Set2',
node_color=dim(groupby),
node_line_color='gray')
bundled = bundle_graph(graph_hv)
# hv.save(bundled, '.pdf', backend='matplotlib')
graph_dict[(molecule, ksize, log2sketchsize)] = bundled
if make_within_groupby_graphs:
# make within-group (e.g. within-species) graphs
for species, df in metadata.groupby(groupby):
data = similarities.loc[df.index, df.index]
figure_prefix = f"{sketch_id}_{species}"
graph_title = f"{title} ({species})"
build_graph_and_plot(
data, df, n_neighbors, color_cols, palettes, figure_folder,
figure_prefix, graph_title)
return graph_dict
def draw_holoviews_graphs(graph_dict):
# use first key to determine default settings
first_key = list(graph_dict.keys())[0]
molecule, ksize, log2sketchsize = first_key
hv.extension('bokeh')
defaults = dict(width=400, height=400, padding=0.1)
hv.opts.defaults(
opts.EdgePaths(**defaults), opts.Graph(**defaults),
opts.Nodes(**defaults))
kdims = [
hv.Dimension(('molecule', "molecule"), default=molecule),
hv.Dimension(('ksize', "k-mer size"), default=ksize),
hv.Dimension(('log2_num_hashes', "$\log_2$ num hashes"),
default=log2sketchsize),
]
kwargs = dict(width=800, height=800, xaxis=None, yaxis=None)
opts.defaults(opts.Nodes(**kwargs), opts.Graph(**kwargs))
kwargs = dict(node_size=10, edge_line_width=1, cmap='Set2',
node_color=dim("species"),
node_line_color='gray', width=600, height=600, xaxis=None,
yaxis=None)
holomap = hv.HoloMap(graph_dict, kdims=kdims)
holomap.opts(opts.Graph(**kwargs))
return holomap
|
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.metadataConfig = exports.lyricConfig = exports.cliOptions = void 0;
const yargs_1 = __importDefault(require("yargs"));
const helpers_1 = require("yargs/helpers");
const core_config_1 = require("../core/core-config");
const debug_1 = require("../core/debug");
const config_file_1 = require("./config-file");
const options = (0, yargs_1.default)((0, helpers_1.hideBin)(process.argv))
.parserConfiguration({
"short-option-groups": false,
})
.option('cover', {
alias: 'c',
type: 'boolean',
default: false,
description: '是否将封面保存为独立文件',
})
.option('debug', {
alias: 'd',
type: 'boolean',
default: false,
description: '是否启用调试模式, 输出更杂碎的日志',
})
.option('source', {
alias: 's',
type: 'string',
default: 'thb-wiki',
choices: ['thb-wiki', 'local-mp3', 'local-json'],
description: '设置数据源',
})
.option('lyric', {
alias: 'l',
type: 'boolean',
default: false,
description: '是否启用歌词写入 (会增加运行时间)',
})
.option('lyric-type', {
alias: 'lt',
type: 'string',
default: 'original',
choices: ['original', 'translated', 'mixed'],
description: '歌词类型, 可以选择原文/译文/混合模式',
})
.option('lyric-output', {
alias: 'lo',
type: 'string',
default: 'metadata',
choices: ['metadata', 'lrc'],
description: '歌词输出方式, 可以选择写入歌曲元数据或者保存为 lrc 文件',
})
.option('translation-separator', {
alias: 'ts',
type: 'string',
default: ' // ',
description: '指定混合歌词模式下, 使用的分隔符',
})
.option('lyric-time', {
alias: 'lt',
type: 'boolean',
default: true,
description: '是否启用歌词时轴',
})
.option('batch', {
alias: 'b',
type: 'string',
description: '是否使用批量模式, 参数为开始批量运行的路径',
})
.option('batch-depth', {
alias: 'bd',
type: 'number',
default: 1,
description: '指定批量模式的文件夹层级',
})
.option('separator', {
type: 'string',
default: core_config_1.DefaultMetadataSeparator,
description: '指定 mp3 元数据的分隔符',
})
.option('timeout', {
type: 'number',
default: 30,
description: '指定一次运行的超时时间',
})
.option('retry', {
type: 'number',
default: 3,
description: '指定超时后自动重试的最大次数',
})
.option('interactive', {
alias: 'i',
type: 'boolean',
default: true,
description: '是否允许交互',
})
.parseSync();
(0, debug_1.setDebug)(options.debug);
const configFile = (0, config_file_1.loadConfigFile)();
if (configFile !== null) {
(0, debug_1.log)('config file: ', configFile);
const { lyric, ...restConfig } = configFile;
if (lyric !== undefined) {
if (options.lyricOutput === undefined) {
options.lyricOutput = lyric.output;
}
if (options.lyricType === undefined) {
options.lyricType = lyric.type;
}
options.translationSeparator = lyric.translationSeparator;
}
Object.assign(options, restConfig);
}
const lyric = {
type: options.lyricType,
output: options.lyricOutput,
time: options.lyricTime,
translationSeparator: options.translationSeparator,
};
const metadata = {
lyric: options.lyric ? lyric : undefined,
separator: options.separator,
timeout: options.timeout,
retry: options.retry,
};
(0, debug_1.log)(options);
(0, debug_1.log)(metadata);
(0, config_file_1.saveConfigFile)({ ...metadata, lyric });
exports.cliOptions = options;
exports.lyricConfig = lyric;
exports.metadataConfig = metadata;
|
/* eslint-disable */
var path = require("path");
var express = require("express");
var webpack = require("webpack");
var config = require("./webpack.config");
var app = express();
var compiler = webpack(config);
var serverPort = process.env.PORT || 3000;
app.use(require("webpack-dev-middleware")(compiler, {
publicPath: config.output.publicPath
}));
app.use(require("webpack-hot-middleware")(compiler));
app.use(express.static('assets'));
app.get("*", function(req, res) {
res.sendFile(path.join(__dirname, "index.html"));
});
app.listen(serverPort, "localhost", function (err) {
if (err) {
console.log(err);
return;
}
console.log("Listening at http://localhost:" + serverPort);
});
|
/*!
* fullpage.js Reset Scroll Overflow 0.0.3 for fullPage.js v3
* https://github.com/alvarotrigo/fullPage.js
*
* @license This code has been bought from www.alvarotrigo.com/fullPage/extensions/ and it is not free to use or distribute.
* Copyright (C) 2016 alvarotrigo.com - A project by Alvaro Trigo
*/
window.fp_scrollOverflowResetExtension=function(){var s=this,o=window.fp_utils,t=window.fp_utils.$,e=window.fullpage_api.getFullpageData(),n=e.options,l=e.internals;s.reset=function(){var e=o.hasClass(s.prevDestiny,"fp-slide");if(s.prevDestiny&&(1==n.scrollOverflowReset||"slides"==n.scrollOverflowReset&&e||"sections"==n.scrollOverflowReset&&!e)){var l=t(".fp-scrollable",s.prevDestiny);null!=l&&l.forEach(function(e){var l=e.fp_iscrollInstance;null!=l&&l.scrollTo(0,0)})}},s.setPrevious=function(e){s.prevDestiny=e},s.c=l.c;var r=s["common".charAt(0)];return"complete"===document.readyState&&r("scrollOverflowReset"),window.addEventListener("load",function(){r("scrollOverflowReset")}),s}; |
/**
* Created by Игорь on 14.09.2018.
*/
var cache=require('../../lib/cache');
var request=require('request');
var moment=require('moment')
var path=require('path');
var zlib = require('zlib');
var isWin = /^win/.test(process.platform);
/*if(!isWin){
var mongoose = require('mongoose');
var Account=mongoose.model('Account');
var VirtualAccount=mongoose.model('VirtualAccount');
}*/
function activateBookkeep(store) {
return new Promise(async function(resolve,reject){
if(false){
if(cache.stores[store._id] && cache.stores[store._id].dbDataBookkeep && cache.stores[store._id].dbDataBookkeep.exp && moment().unix()< cache.stores[store._id].dbData.exp){
zlib.unzip(cache.stores[store._id].dbDataBookkeep.data, function(err, buffer) {
console.log('from buffer cached dbDataBookkeep')
if (!err) {
try{
return resolve(JSON.parse(buffer))
}catch(err){reject(err)}
}else{
reject(err)
}
});
return;
}
try{
let data = await getAccounts(store)
let o={
accounts:data[0],
virtualAccounts:data[1]
}
zlib.deflate(JSON.stringify(o), function(err, buffer) {
if (!err) {
if(!cache.stores){cache.stores={}}
if(!cache.stores[store._id]){cache.stores[store._id]={}}
if(!cache.stores[store._id].dbDataBookkeep){cache.stores[store._id].dbDataBookkeep={}}
cache.stores[store._id].dbDataBookkeep.data=buffer;
let seconds = (store.cache && store.cache.dbDataBookkeep)?store.cache.dbDataBookkeep:100000;
console.log('seconds',seconds)
cache.stores[store._id].dbDataBookkeep.exp=moment().add(seconds, 'seconds').unix()
}else {
console.log(err)
}
});
}catch(err){
return reject(err)
}
return resolve(o);
}else{
if(cache.stores[store._id] && cache.stores[store._id].dbDataBookkeep && cache.stores[store._id].dbDataBookkeep.exp && moment().unix()< cache.stores[store._id].dbDataBookkeep.exp){
zlib.unzip(cache.stores[store._id].dbDataBookkeep.data, function(err, buffer) {
console.log('from buffer cached dbDataBookkeep')
if (!err) {
try{
return resolve(JSON.parse(buffer))
}catch(err){reject(err)}
}else{
return reject(err)
}
});
return
}
let url =store.bookkeepHost+"/api/bookkeep/getAccounts?store="+store._id+"&subDomain="+store.subDomain+"&lang="+store.lang;
console.log(url)
let headers = {
"accept-charset" : "ISO-8859-1,utf-8;q=0.7,*;q=0.3",
"accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8;application/json;charset=utf-8,*/*",
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2",
"accept-encoding" : "gzip",
'Accept-Encoding': 'gzip',
};
let options = {
uri: url,
json: true,
method: 'GET',
timeout: 1500,
headers:headers
/* href: '',
pathname: '/'*/
};
let req = request.get(options);
req.on('response', function(res) {
var chunks = [];
res.on('data', function(chunk) {
chunks.push(chunk);
});
res.on('end', function() {
var buffer = Buffer.concat(chunks);
var encoding = res.headers['content-encoding'];
//console.log(buffer.toString())
zlib.unzip(buffer, function(err, unzipbuffer) {
if (!err) {
try{
let results =JSON.parse(unzipbuffer.toString('utf8'));
var o={
accounts:results[0],
virtualAccounts:results[1],
}
zlib.deflate(JSON.stringify(o), function(err, buffer) {
if (!err) {
if(!cache.stores){cache.stores={}}
if(!cache.stores[store._id]){cache.stores[store._id]={}}
if(!cache.stores[store._id].dbDataBookkeep){cache.stores[store._id].dbDataBookkeep={}}
cache.stores[store._id].dbDataBookkeep.data=buffer;
let seconds = (store.cache && store.cache.dbDataBookkeep)?store.cache.dbDataBookkeep:100000;
console.log('seconds',seconds)
cache.stores[store._id].dbDataBookkeep.exp=moment().add(seconds, 'seconds').unix()
}else {
console.log(err)
}
});
return resolve(o);
}catch(err){
console.log('error in try',err)
return reject(err)
}
}else{
console.log('err',err)
reject(err)
}
});
});
res.on('error', function(err) {
console.log('msg',err)
//reject(err)
})
});
}
})
}
module.exports = activateBookkeep;
|
import cv2
import numpy as np
img=cv2.imread("face1.jpg",1)
print(img) |
(window.webpackJsonp=window.webpackJsonp||[]).push([[62],{222:function(t,a,s){"use strict";s.r(a);var r=s(0),e=Object(r.a)({},function(){this.$createElement;this._self._c;return this._m(0)},[function(){var t=this,a=t.$createElement,s=t._self._c||a;return s("div",{staticClass:"content"},[s("h1",{attrs:{id:"props"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#props","aria-hidden":"true"}},[t._v("#")]),t._v(" Props")]),t._v(" "),s("h4",{attrs:{id:"_1、和props相关"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_1、和props相关","aria-hidden":"true"}},[t._v("#")]),t._v(" 1、和props相关")]),t._v(" "),s("ul",[s("li",[t._v("只读,不准修改")]),t._v(" "),s("li",[t._v("componentWillReceiveProps / 生命周期")]),t._v(" "),s("li",[t._v("propTypes")]),t._v(" "),s("li",[t._v("defaultProps")]),t._v(" "),s("li",[t._v("children")])]),t._v(" "),s("h4",{attrs:{id:"_2、类型检查"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_2、类型检查","aria-hidden":"true"}},[t._v("#")]),t._v(" 2、类型检查")]),t._v(" "),s("div",{staticClass:"language- extra-class"},[s("pre",{pre:!0,attrs:{class:"language-text"}},[s("code",[t._v("import PropTypes from 'prop-types';\n\nList.propTypes = {\n list:PropTypes.array/object/number/string\n}\n")])])]),s("h4",{attrs:{id:"_3、默认值"}},[s("a",{staticClass:"header-anchor",attrs:{href:"#_3、默认值","aria-hidden":"true"}},[t._v("#")]),t._v(" 3、默认值")]),t._v(" "),s("div",{staticClass:"language- extra-class"},[s("pre",{pre:!0,attrs:{class:"language-text"}},[s("code",[t._v("List.defaultProps = {\n list:[{},{},{}]\n}\n")])])])])}],!1,null,null,null);a.default=e.exports}}]); |
from numpy import *
from pylab import *
import postgkyl
import scipy.optimize
style.use('../code/postgkyl.mplstyle')
def func(an, f0, f1):
g0 = an[0]; g1 = an[1]
rhs0 = ((exp(g1+g0))/(g1)-(exp(g0-g1))/(g1))/(sqrt(2.0))
rhs1 = (sqrt(3)*(((exp(g0)*g1-exp(g0))*exp(g1))/(g1**2)+((exp(g0)*g1+exp(g0))*exp(-g1))/(g1**2)))/(sqrt(2))
return rhs0-f0, rhs1-f1
def fitExp(f0, f1):
aout = scipy.optimize.fsolve(func, [1.0, 0.01], args=(f0, f1))
return aout[0], aout[1]
def getExpRecon(pre, fr):
d = postgkyl.GData("%s-relax_neut_%d.bp" % (pre, fr))
q = d.getValues()
f0 = q[0,:,0]
f1 = q[0,:,2]
gcoeff = zeros((f0.shape[0],2), float)
for i in range(f0.shape[0]):
gcoeff[i][0], gcoeff[i][1] = fitExp(f0[i], f1[i])
return gcoeff
def evalLin(f0, f1, X):
return f0/sqrt(2.0) + sqrt(3.0/2.0)*f1*X
def evalExp(g0, g1, X):
return exp(g0+g1*X)
vlo = -6.0
vup = 6.0
d = postgkyl.GData("r1-relax_neut_100.bp")
q = d.getValues()
f0 = q[0,:,0]
f1 = q[0,:,2]
X = linspace(-1, 1, 20) # for interpolation
dx = 12/f0.shape[0]
figure(1)
for i in range(f0.shape[0]):
v = linspace(-6.0+i*dx, -6.0+(i+1)*dx, X.shape[0])
plot(v, evalLin(f0[i], f1[i], X), 'r-')
for i in range(2, 6):
g0, g1 = fitExp(f0[i], f1[i])
v = linspace(-6.0+i*dx, -6.0+(i+1)*dx, X.shape[0])
plot(v, evalExp(g0, g1, X), 'k-')
|
/* @flow */
import { inBrowser } from './dom'
import { saveScrollPosition } from './scroll'
// 判断浏览器是否支持pushState方法
export const supportsPushState = inBrowser && (function () {
const ua = window.navigator.userAgent
// 这些浏览器都支持
if (
(ua.indexOf('Android 2.') !== -1 || ua.indexOf('Android 4.0') !== -1) &&
ua.indexOf('Mobile Safari') !== -1 &&
ua.indexOf('Chrome') === -1 &&
ua.indexOf('Windows Phone') === -1
) {
return false
}
// in 判断是否存在
return window.history && 'pushState' in window.history
})()
// use User Timing api (if present) for more accurate key precision
// 浏览器支持程度 优先取window.performance(从页面初始化到调用该方法时的毫秒数 以微秒为单位的时间,更加精准)
// 与Date.now()会受系统程序执行阻塞的影响不同,performance.now()的时间是以恒定速率递增的,不受系统时间的影响(系统时间可被人为或软件调整)
const Time = inBrowser && window.performance && window.performance.now
? window.performance
: Date
// 初始化的时候生成一个唯一key
let _key: string = genKey()
// 获取时间 当成key
function genKey (): string {
// 转换成字符串保留三位小数
return Time.now().toFixed(3)
}
export function getStateKey () {
return _key
}
// 对key做改变 上面get 这里set
export function setStateKey (key: string) {
_key = key
}
export function pushState (url?: string, replace?: boolean) {
// 保留当前的滚动位置
saveScrollPosition()
// try...catch the pushState call to get around Safari
// DOM Exception 18 where it limits to 100 pushState calls
const history = window.history
try {
// replace 还是 push
if (replace) {
history.replaceState({ key: _key }, '', url)
} else {
// push要重新生成key 作为新页面的标识 后续pop等可以使用
_key = genKey()
history.pushState({ key: _key }, '', url)
}
} catch (e) {
// assign 就是href的函数形式
window.location[replace ? 'replace' : 'assign'](url)
}
}
// 调用pushState 第二个参数为true就是替换
export function replaceState (url?: string) {
pushState(url, true)
}
|
import dataclasses
import pandas as pd
from lib.akibasouken.scrape.anime import Anime
def long_text_to_df(anime: Anime) -> pd.DataFrame:
data = {
'anime_id': anime.anime_id,
**dataclasses.asdict(anime.long_text),
}
return pd.Series(data).to_frame().T
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.