id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/htmd-pdb2pqr-0.0.2.tar.gz/htmd-pdb2pqr-0.0.2/pdb2pqr/main.py
|
__date__ = "5 April 2010"
__author__ = "Todd Dolinsky, Nathan Baker, Jens Nielsen, Paul Czodrowski, Jan Jensen, Samir Unni, Yong Huang"
from optparse import OptionParser, OptionGroup
import time
from .src import utilities
from .src.errors import PDB2PQRError
from .src.hydrogens import *
__version__ = "FIXME"
from . import extensions
def getOldHeader(pdblist):
oldHeader = StringIO()
headerTypes = (HEADER, TITLE, COMPND, SOURCE,
KEYWDS, EXPDTA, AUTHOR, REVDAT,
JRNL, REMARK, SPRSDE, NUMMDL)
for pdbObj in pdblist:
if not isinstance(pdbObj,headerTypes):
break
oldHeader.write(str(pdbObj))
oldHeader.write('\n')
return oldHeader.getvalue()
def printPQRHeader(pdblist,
atomlist,
reslist,
charge,
ff,
warnings,
ph_calc_method,
pH,
ffout,
cl_args,
include_old_header = False):
"""
Print the header for the PQR file
Parameters:
atomlist: A list of atoms that were unable to have
charges assigned (list)
reslist: A list of residues with non-integral charges
(list)
charge: The total charge on the protein (float)
ff: The forcefield name (string)
warnings: A list of warnings generated from routines (list)
pH : pH value, if any. (float)
ffout : ff used for naming scheme (string)
cl_args : the command line argument used when running pdb2pqr (string)
Returns
header: The header for the PQR file (string)
"""
if ff is None:
ff = 'User force field'
else:
ff = ff.upper()
header = "REMARK 1 PQR file generated by PDB2PQR (Version %s)\n" % __version__
header = header + "REMARK 1\n"
if cl_args is not None:
header += "REMARK 1 Command line used to generate this file:\n"
header += "REMARK 1 " + cl_args + "\n"
header = header + "REMARK 1\n"
header = header + "REMARK 1 Forcefield Used: %s\n" % ff
if not ffout is None:
header = header + "REMARK 1 Naming Scheme Used: %s\n" % ffout
header = header + "REMARK 1\n"
if ph_calc_method is not None:
header = header + "REMARK 1 pKas calculated by %s and assigned using pH %.2f\n" % (ph_calc_method, pH)
header = header + "REMARK 1\n"
for warning in warnings:
header = header + "REMARK 5 " + warning
header = header + "REMARK 5\n"
if len(atomlist) != 0:
header += "REMARK 5 WARNING: PDB2PQR was unable to assign charges\n"
header += "REMARK 5 to the following atoms (omitted below):\n"
for atom in atomlist:
header += "REMARK 5 %i %s in %s %i\n" % \
(atom.get("serial"), atom.get("name"), \
atom.get("residue").get("name"), \
atom.get("residue").get("resSeq"))
header += "REMARK 5 This is usually due to the fact that this residue is not\n"
header += "REMARK 5 an amino acid or nucleic acid; or, there are no parameters\n"
header += "REMARK 5 available for the specific protonation state of this\n"
header += "REMARK 5 residue in the selected forcefield.\n"
header += "REMARK 5\n"
if len(reslist) != 0:
header += "REMARK 5 WARNING: Non-integral net charges were found in\n"
header += "REMARK 5 the following residues:\n"
for residue in reslist:
header += "REMARK 5 %s - Residue Charge: %.4f\n" % \
(residue, residue.getCharge())
header += "REMARK 5\n"
header += "REMARK 6 Total charge on this protein: %.4f e\n" % charge
header += "REMARK 6\n"
if include_old_header:
header += "REMARK 7 Original PDB header follows\n"
header += "REMARK 7\n"
header += getOldHeader(pdblist)
return header
def runPDB2PQR(pdblist, ff,
outname = "",
ph = None,
verbose = False,
selectedExtensions = [],
extensionOptions = utilities.ExtraOptions(),
ph_calc_method = None,
ph_calc_options = None,
clean = False,
neutraln = False,
neutralc = False,
ligand = None,
assign_only = False,
chain = False,
drop_water = False,
debump = True,
opt = True,
typemap = False,
userff = None,
usernames = None,
ffout = None,
holdList = None,
commandLine = None,
include_old_header = False):
"""
Run the PDB2PQR Suite
Arguments:
pdblist: The list of objects that was read from the PDB file
given as input (list)
ff: The name of the forcefield (string)
Keyword Arguments:
outname: The name of the desired output file
ph: The desired ph of the system (float)
verbose: When True, script will print information to stdout
When False, no detailed information will be printed (float)
extensions: List of extensions to run
extensionOptions:optionParser like option object that is passed to each object.
ph_calc_method: pKa calculation method ("propka","propka31","pdb2pka")
ph_calc_options: optionParser like option object for propka30.
clean: only return original PDB file in aligned format.
neutraln: Make the N-terminus of this protein neutral
neutralc: Make the C-terminus of this protein neutral
ligand: Calculate the parameters for the ligand in mol2 format at the given path.
assign_only: Only assign charges and radii - do not add atoms, debump, or optimize.
chain: Keep the chain ID in the output PQR file
drop_water: Remove water molecules from output
debump: When 1, debump heavy atoms (int)
opt: When 1, run hydrogen optimization (int)
typemap: Create Typemap output.
userff: The user created forcefield file to use. Overrides ff.
usernames: The user created names file to use. Required if using userff.
ffout: Instead of using the standard canonical naming scheme for residue and atom names, +
use the names from the given forcefield
commandLine: command line used (if any) to launch the program. Included in output header.
include_old_header: Include most of the PDB header in output.
holdlist: A list of residues not to be optimized, as [(resid, chain, icode)]
pdb2pka_params: parameters for running pdb2pka.
Returns
header: The PQR file header (string)
lines: The PQR file atoms (list)
missedligandresidues: A list of ligand residue names whose charges could
not be assigned (ligand)
protein: The protein object
"""
pkaname = ""
lines = []
Lig = None
atomcount = 0 # Count the number of ATOM records in pdb
outroot = utilities.getPQRBaseFileName(outname)
if ph_calc_method == 'propka':
pkaname = outroot + ".propka"
#TODO: What? Shouldn't it be up to propka on how to handle this?
if os.path.isfile(pkaname):
os.remove(pkaname)
start = time.time()
if verbose:
print("Beginning PDB2PQR...\n")
myDefinition = Definition()
if verbose:
print("Parsed Amino Acid definition file.")
if drop_water:
# Remove the waters
pdblist_new = []
for record in pdblist:
if isinstance(record, (HETATM, ATOM, SIGATM, SEQADV)):
if record.resName in WAT.water_residue_names:
continue
pdblist_new.append(record)
pdblist = pdblist_new
# Check for the presence of a ligand! This code is taken from pdb2pka/pka.py
if not ligand is None:
from pdb2pka.ligandclean import ligff
myProtein, myDefinition, Lig = ligff.initialize(myDefinition, ligand, pdblist, verbose)
for atom in myProtein.getAtoms():
if atom.type == "ATOM":
atomcount += 1
else:
myProtein = Protein(pdblist, myDefinition)
if verbose:
print("Created protein object -")
print("\tNumber of residues in protein: %s" % myProtein.numResidues())
print("\tNumber of atoms in protein : %s" % myProtein.numAtoms())
myRoutines = Routines(myProtein, verbose)
for residue in myProtein.getResidues():
multoccupancy = 0
for atom in residue.getAtoms():
if atom.altLoc != "":
multoccupancy = 1
txt = "Warning: multiple occupancies found: %s in %s\n" % (atom.name, residue)
# sys.stdout.write(txt)
if multoccupancy == 1:
myRoutines.warnings.append("WARNING: multiple occupancies found in %s,\n" % (residue))
myRoutines.warnings.append(" at least one of the instances is being ignored.\n")
myRoutines.setTermini(neutraln, neutralc)
myRoutines.updateBonds()
if clean:
header = ""
lines = myProtein.printAtoms(myProtein.getAtoms(), chain)
# Process the extensions
for ext in selectedExtensions:
module = extensions.extDict[ext]
#TODO: figure out a way to do this without crashing...
#tempRoutines = copy.deepcopy(myRoutines)
module.run_extension(myRoutines, outroot, extensionOptions)
if verbose:
print("Total time taken: %.2f seconds\n" % (time.time() - start))
#Be sure to include None for missed ligand residues
return dict(header=header, lines=lines)
#remove any future need to convert to lower case
if not ff is None:
ff = ff.lower()
if not ffout is None:
ffout = ffout.lower()
if not assign_only:
# It is OK to process ligands with no ATOM records in the pdb
if atomcount == 0 and Lig != None:
pass
else:
myRoutines.findMissingHeavy()
myRoutines.updateSSbridges()
if debump:
myRoutines.debumpProtein()
if ph_calc_method == 'propka':
myRoutines.runPROPKA(ph, ff, outroot, pkaname, ph_calc_options, version=30)
elif ph_calc_method == 'propka31':
myRoutines.runPROPKA(ph, ff, outroot, pkaname, ph_calc_options, version=31)
elif ph_calc_method == 'pdb2pka':
myRoutines.runPDB2PKA(ph, ff, pdblist, ligand, verbose, ph_calc_options)
myRoutines.addHydrogens()
myhydRoutines = hydrogenRoutines(myRoutines)
if debump:
myRoutines.debumpProtein()
if opt:
myhydRoutines.setOptimizeableHydrogens()
# TONI fixing residues - myhydRoutines has a reference to myProtein, so i'm altering it in place
myRoutines.holdResidues(holdList)
myhydRoutines.initializeFullOptimization()
myhydRoutines.optimizeHydrogens()
else:
myhydRoutines.initializeWaterOptimization()
myhydRoutines.optimizeHydrogens()
# Special for GLH/ASH, since both conformations were added
myhydRoutines.cleanup()
else: # Special case for HIS if using assign-only
for residue in myProtein.getResidues():
if isinstance(residue, HIS):
myRoutines.applyPatch("HIP", residue)
myRoutines.setStates()
myForcefield = Forcefield(ff, myDefinition, userff, usernames)
hitlist, misslist = myRoutines.applyForcefield(myForcefield)
ligsuccess = 0
if not ligand is None:
# If this is independent, we can assign charges and radii here
for residue in myProtein.getResidues():
if isinstance(residue, LIG):
templist = []
Lig.make_up2date(residue)
for atom in residue.getAtoms():
atom.ffcharge = Lig.ligand_props[atom.name]["charge"]
atom.radius = Lig.ligand_props[atom.name]["radius"]
if atom in misslist:
misslist.pop(misslist.index(atom))
templist.append(atom)
charge = residue.getCharge()
if abs(charge - int(charge)) > 0.001:
# Ligand parameterization failed
myRoutines.warnings.append("WARNING: PDB2PQR could not successfully parameterize\n")
myRoutines.warnings.append(" the desired ligand; it has been left out of\n")
myRoutines.warnings.append(" the PQR file.\n")
myRoutines.warnings.append("\n")
# remove the ligand
myProtein.residues.remove(residue)
for myChain in myProtein.chains:
if residue in myChain.residues: myChain.residues.remove(residue)
else:
ligsuccess = 1
# Mark these atoms as hits
hitlist = hitlist + templist
# Temporary fix; if ligand was successful, pull all ligands from misslist
if ligsuccess:
templist = misslist[:]
for atom in templist:
if isinstance(atom.residue, (Amino, Nucleic)):
continue
misslist.remove(atom)
# Create the Typemap
if typemap:
typemapname = "%s-typemap.html" % outroot
myProtein.createHTMLTypeMap(myDefinition, typemapname)
# Grab the protein charge
reslist, charge = myProtein.getCharge()
# If we want a different naming scheme, use that
if not ffout is None:
scheme = ffout
userff = None # Currently not supported
if scheme != ff:
myNameScheme = Forcefield(scheme, myDefinition, userff)
else:
myNameScheme = myForcefield
myRoutines.applyNameScheme(myNameScheme)
header = printPQRHeader(pdblist, misslist, reslist, charge, ff,
myRoutines.getWarnings(), ph_calc_method, ph, ffout, commandLine,
include_old_header=include_old_header)
lines = myProtein.printAtoms(hitlist, chain)
# Determine if any of the atoms in misslist were ligands
missedligandresidues = []
for atom in misslist:
if isinstance(atom.residue, (Amino, Nucleic)):
continue
if atom.resName not in missedligandresidues:
missedligandresidues.append(atom.resName)
# Process the extensions
for ext in selectedExtensions:
module = extensions.extDict[ext]
#TODO: figure out a way to do this without crashing...
#tempRoutines = copy.deepcopy(myRoutines)
module.run_extension(myRoutines, outroot, extensionOptions)
if verbose:
print("Total time taken: %.2f seconds\n" % (time.time() - start))
return dict(header=header, lines=lines,
missedligands=missedligandresidues,
protein=myProtein, routines=myRoutines)
def mainCommand(argv):
"""
Main driver for running program from the command line.
"""
fieldNames = ('amber','charmm','parse', 'tyl06','peoepb','swanson')
validForcefields = []
validForcefields.extend(fieldNames)
validForcefields.extend((x.upper() for x in fieldNames))
description = 'This module takes a PDB file as input and performs ' +\
'optimizations before yielding a new PQR-style file in PQR_OUTPUT_PATH.\n' +\
'If PDB_PATH is an ID it will automatically be obtained from the PDB archive.'
usage = 'Usage: %prog [options] PDB_PATH PQR_OUTPUT_PATH'
parser = OptionParser(description=description, usage=usage, version='%prog (Version ' + __version__ + ')')
group = OptionGroup(parser,"Manditory options", "One of the following options must be used.")
group.add_option('--ff', dest='ff', metavar='FIELD_NAME', choices=validForcefields,
help='The forcefield to use - currently amber, ' +
'charmm, parse, tyl06, peoepb and swanson ' +
'are supported.')
group.add_option('--userff', dest='userff', metavar='USER_FIELD_FILE',
help='The user created forcefield file to use. Requires --usernames overrides --ff')
group.add_option('--clean', dest='clean', action='store_true', default=False,
help='Do no optimization, atom addition, or parameter assignment, ' +
'just return the original PDB file in aligned format. ' +
'Overrides --ff and --userff')
parser.add_option_group(group)
group = OptionGroup(parser,"General options")
group.add_option('--nodebump', dest='debump', action='store_false', default=True,
help='Do not perform the debumping operation')
group.add_option('--noopt', dest='opt', action='store_false', default=True,
help='Do not perform hydrogen optimization')
group.add_option('--chain', dest='chain', action='store_true', default=False,
help='Keep the chain ID in the output PQR file')
group.add_option('--assign-only', dest='assign_only', action='store_true', default=False,
help='Only assign charges and radii - do not add atoms, debump, or optimize.')
group.add_option('--ffout', dest='ffout', metavar='FIELD_NAME',choices=validForcefields,
help='Instead of using the standard canonical naming scheme for residue and atom names, ' +
'use the names from the given forcefield - currently amber, ' +
'charmm, parse, tyl06, peoepb and swanson are supported.')
group.add_option('--usernames', dest='usernames', metavar='USER_NAME_FILE',
help='The user created names file to use. Required if using --userff')
group.add_option('--apbs-input', dest='input', action='store_true', default=False,
help='Create a template APBS input file based on the generated PQR file. Also creates a Python ' +
'pickle for using these parameters in other programs.')
group.add_option('--ligand', dest='ligand', metavar='PATH',
help='Calculate the parameters for the ligand in mol2 format at the given path. ' +
'Pdb2pka must be compiled.')
group.add_option('--whitespace', dest='whitespace', action='store_true', default=False,
help='Insert whitespaces between atom name and residue name, between x and y, and between y and z.')
group.add_option('--typemap', dest='typemap', action='store_true', default=False,
help='Create Typemap output.')
group.add_option('--neutraln', dest='neutraln', action='store_true', default=False,
help='Make the N-terminus of this protein neutral (default is charged). '
'Requires PARSE force field.')
group.add_option('--neutralc', dest='neutralc', action='store_true', default=False,
help='Make the C-terminus of this protein neutral (default is charged). '
'Requires PARSE force field.')
group.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False,
help='Print information to stdout.')
group.add_option('--drop-water', dest='drop_water', action='store_true', default=False,
help='Drop waters before processing protein. Currently recognized and deleted are the following water types: %s' % ', '.join(WAT.water_residue_names))
group.add_option('--include-header', dest='include_header', action='store_true', default=False,
help='Include pdb header in pqr file. '
'WARNING: The resulting PQR file will not work with APBS versions prior to 1.5')
parser.add_option_group(group)
pka_group = OptionGroup(parser,"pH options")
pka_group.add_option('--ph-calc-method', dest='ph_calc_method', metavar='PH_METHOD', choices=('propka', 'propka31', 'pdb2pka'),
help='Method used to calculate ph values. If a pH calculation method is selected, for each'
' titratable residue pH values will be calculated and the residue potentially modified'
' after comparison with the pH value supplied by --with_ph. Valid options are: '
'propka - Use PROPKA to calculate pH values. Actual PROPKA results will be output to <output-path>.propka.\n'
'propka31 - Use PROPKA 3.1 to calculate pH values. Actual PROPKA results will be output to <output-path>.propka.\n'
'pdb2pka - (EXPERIMENTAL) Use PDB2PKA to calculate pH values. Requires the use of the PARSE force field.'
' Warning: Larger residues can take a very long time to run using this method. ')
pka_group.add_option('--with-ph', dest='ph', action='store', type='float', default=7.0,
help='pH values to use when applying the results of the selected pH calculation method.'
' Defaults to %default')
parser.add_option_group(pka_group)
pdb2pka_group = OptionGroup(parser,"PDB2PKA method options")
pdb2pka_group.add_option('--pdb2pka-out', dest='pdb2pka_out', action='store', default='pdb2pka_output',
help='Output directory for PDB2PKA results. Defaults to %default')
pdb2pka_group.add_option('--pdb2pka-resume', dest='pdb2pka_resume', action="store_true", default=False,
help='Resume run from state saved in output directory.')
pdb2pka_group.add_option('--pdie', dest='pdb2pka_pdie', default=8,type='int',
help='Protein dielectric constant. Defaults to %default')
pdb2pka_group.add_option('--sdie', dest='pdb2pka_sdie', default=80, type='int',
help='Solvent dielectric constant. Defaults to %default')
# pdb2pka_group.add_option('--maps', dest='maps', default=None, type='int',
# help='<1 for using provided 3D maps; 2 for genereting new maps>')
# pdb2pka_group.add_option('--xdiel', dest='xdiel', default=None, type='str',
# help='<xdiel maps>')
# pdb2pka_group.add_option('--ydiel', dest='ydiel', default=None, type='str',
# help='<ydiel maps>')
# pdb2pka_group.add_option('--zdiel', dest='zdiel', default=None, type='str',
# help='<zdiel maps>')
# pdb2pka_group.add_option('--kappa', dest='kappa', default=None, type='str',
# help='<ion-accessibility map>')
# pdb2pka_group.add_option('--smooth', dest='sd', default=None, type='float',
# help='<st.dev [A] of Gaussian smooting of 3D maps at the boundary, bandthwith=3 st.dev>')
#
# Cut off energy for calculating non-charged-charged interaction energies
#
pdb2pka_group.add_option('--pairene',dest='pdb2pka_pairene',type='float',default=1.0,
help='Cutoff energy in kT for calculating non charged-charged interaction energies. Default: %default')
parser.add_option_group(pdb2pka_group)
propka_group = OptionGroup(parser,"PROPKA method options")
propka_group.add_option("--propka-reference", dest="propka_reference", default="neutral", choices=('neutral','low-pH'),
help="Setting which reference to use for stability calculations. See PROPKA 3.0 documentation.")
propka_group.add_option('--propka-verbose', dest='propka_verbose', action='store_true', default=False,
help='Print extra proPKA information to stdout. '
'WARNING: This produces an incredible amount of output.')
parser.add_option_group(propka_group)
extensions.setupExtensionsOptions(parser)
(options, args) = parser.parse_args(argv[1:])
commandLine = ' '.join(argv[1:])
if len(args) != 2:
parser.error('Incorrect number (%d) of arguments!\nargs: %s' % (len(args), args))
if options.assign_only or options.clean:
options.debump = options.optflag = False
userfffile = None
usernamesfile = None
if not options.clean:
if not options.usernames is None:
try:
usernamesfile = open(options.usernames, 'rU')
except IOError:
parser.error('Unable to open user names file %s' % options.usernames)
if not options.userff is None:
try:
userfffile = open(options.userff, 'rU')
except IOError:
parser.error('Unable to open user force field file %s' % options.userff)
if options.usernames is None:
parser.error('--usernames must be specified if using --userff')
else:
if options.ff is None:
parser.error('One of the manditory options was not specified.\n' +
'Please specify either --ff, --userff, or --clean')
if getFFfile(options.ff) == '':
parser.error('Unable to find parameter files for forcefield %s!' % options.ff)
if options.ph < 0.0 or options.ph > 14.0:
parser.error('%i is not a valid pH! Please choose a pH between 0.0 and 14.0.' % options.pH)
ph_calc_options = None
if options.ph_calc_method == 'propka':
ph_calc_options = utilities.createPropkaOptions(options.ph,
verbose=options.propka_verbose,
reference=options.propka_reference)
elif options.ph_calc_method == 'propka31':
import propka.lib
ph_calc_options, _ = propka.lib.loadOptions('--quiet')
elif options.ph_calc_method == 'pdb2pka':
if options.ff.lower() != 'parse':
parser.error('PDB2PKA requires the PARSE force field.')
ph_calc_options = {'output_dir': options.pdb2pka_out,
'clean_output': not options.pdb2pka_resume,
'pdie': options.pdb2pka_pdie,
'sdie': options.pdb2pka_sdie,
'pairene': options.pdb2pka_pairene}
if options.ligand is not None:
try:
options.ligand = open(options.ligand, 'rU')
except IOError:
parser.error('Unable to find ligand file %s!' % options.ligand)
if options.neutraln and (options.ff is None or options.ff.lower() != 'parse'):
parser.error('--neutraln option only works with PARSE forcefield!')
if options.neutralc and (options.ff is None or options.ff.lower() != 'parse'):
parser.error('--neutralc option only works with PARSE forcefield!')
text = """
--------------------------
PDB2PQR - a Python-based structural conversion utility
--------------------------
Please cite your use of PDB2PQR as:
Dolinsky TJ, Nielsen JE, McCammon JA, Baker NA.
PDB2PQR: an automated pipeline for the setup, execution,
and analysis of Poisson-Boltzmann electrostatics calculations.
Nucleic Acids Research 32 W665-W667 (2004).
"""
sys.stdout.write(text)
path = args[0]
pdbFile = getPDBFile(path)
pdblist, errlist = readPDB(pdbFile)
if len(pdblist) == 0 and len(errlist) == 0:
parser.error("Unable to find file %s!" % path)
if len(errlist) != 0 and options.verbose:
print("Warning: %s is a non-standard PDB file.\n" % path)
print(errlist)
outpath = args[1]
options.outname = outpath
#In case no extensions were specified or no extensions exist.
if not hasattr(options, 'active_extensions' ) or options.active_extensions is None:
options.active_extensions = []
#I see no point in hiding options from extensions.
extensionOpts = options
#TODO: The ideal would be to pass a file like object for the second
# argument and add a third for names then
# get rid of the userff and username arguments to this function.
# This would also do away with the redundent checks and such in
# the Forcefield constructor.
try:
pqr = runPDB2PQR(pdblist,
options.ff,
outname = options.outname,
ph = options.ph,
verbose = options.verbose,
selectedExtensions = options.active_extensions,
ph_calc_method = options.ph_calc_method,
ph_calc_options = ph_calc_options,
extensionOptions = extensionOpts,
clean = options.clean,
neutraln = options.neutraln,
neutralc = options.neutralc,
ligand = options.ligand,
assign_only = options.assign_only,
chain = options.chain,
drop_water = options.drop_water,
debump = options.debump,
opt = options.opt,
typemap = options.typemap,
userff = userfffile,
usernames = usernamesfile,
ffout = options.ffout,
commandLine = commandLine,
include_old_header = options.include_header)
header, lines = pqr['header'], pqr['lines']
except PDB2PQRError as er:
print(er)
sys.exit(2)
# Print the PQR file
outfile = open(outpath,"w")
outfile.write(header)
# Adding whitespaces if --whitespace is in the options
for line in lines:
if options.whitespace:
if line[0:4] == 'ATOM':
newline = line[0:6] + ' ' + line[6:16] + ' ' + line[16:38] + ' ' + line[38:46] + ' ' + line[46:]
outfile.write(newline)
elif line[0:6] == 'HETATM':
newline = line[0:6] + ' ' + line[6:16] + ' ' + line[16:38] + ' ' + line[38:46] + ' ' + line[46:]
outfile.write(newline)
else:
outfile.write(line)
outfile.close()
if options.input:
from src import inputgen
from src import psize
method = "mg-auto"
size = psize.Psize()
size.parseInput(outpath)
size.runPsize(outpath)
asynch = 0 # No asynch files here!
input = inputgen.Input(outpath, size, method, asynch, potdx=True)
input.printInputFiles()
input.dumpPickle()
# Follow https://chriswarrick.com/blog/2014/09/15/python-apps-the-right-way-entry_points-and-scripts/
def main(args=None):
"""The main routine."""
if args is None:
args = sys.argv
mainCommand(args)
if __name__ == "__main__":
main()
|
PypiClean
|
/py-pde-0.32.2.tar.gz/py-pde-0.32.2/examples/jupyter/Discretized Fields.ipynb
|
```
import sys
sys.path.append('../..') # add the pde package to the python path
import pde
# define a simple grid
grid = pde.UnitGrid([32, 32])
grid.plot()
# define scalar field, initially filled with zeros
field = pde.ScalarField(grid)
field.average
# do computations on the field
field += 1
field.average
# define a scalar field initialized with random colored noise and plot it
scalar = pde.ScalarField.random_colored(grid, exponent=-2)
scalar.plot(colorbar=True);
# apply operators to the field
smoothed = scalar.smooth(1)
laplace = smoothed.laplace(bc='natural')
laplace.plot(colorbar=True);
# initialize a vector field and plot it
vector = pde.VectorField.random_colored(grid, exponent=-4)
vector.plot(method='streamplot');
# plot the first component of the vector field
vector[0].plot()
```
|
PypiClean
|
/diffpy.utils-3.1.0.tar.gz/diffpy.utils-3.1.0/src/diffpy/utils/wx/gridutils.py
|
import wx
def getSelectionRows(grid):
"""Indices of the rows that have any cell selected.
"""
rows = grid.GetNumberRows()
rset = set()
if grid.GetSelectedCols():
rset.update(range(rows))
rset.update(grid.GetSelectedRows())
for r, c in grid.GetSelectedCells():
rset.add(r)
blocks = zip(grid.GetSelectionBlockTopLeft(),
grid.GetSelectionBlockBottomRight())
for tl, br in blocks:
rset.update(range(tl[0], br[0] + 1))
rv = sorted(rset)
return rv
def getSelectionColumns(grid):
"""Indices of columns that have any cell selected.
"""
cols = grid.GetNumberCols()
cset = set()
if grid.GetSelectedRows():
cset.update(range(cols))
cset.update(grid.GetSelectedCols())
for r, c in grid.GetSelectedCells():
cset.add(c)
blocks = zip(grid.GetSelectionBlockTopLeft(),
grid.GetSelectionBlockBottomRight())
for tl, br in blocks:
cset.update(range(tl[1], br[1] + 1))
rv = sorted(cset)
return rv
def getSelectedCells(grid):
"""Get list of (row, col) pairs of all selected cells.
Unlike grid.GetSelectedCells this returns them all no matter
how they were selected.
"""
rows = grid.GetNumberRows()
cols = grid.GetNumberCols()
allrows = range(rows)
allcols = range(cols)
rcset = set()
for r in grid.GetSelectedRows():
rcset.update(zip(cols * [r], allcols))
for c in grid.GetSelectedCols():
rcset.update(zip(allrows, rows * [c]))
blocks = zip(grid.GetSelectionBlockTopLeft(),
grid.GetSelectionBlockBottomRight())
for tl, br in blocks:
brows = range(tl[0], br[0] + 1)
bcols = range(tl[1], br[1] + 1)
rcset.update((r, c) for r in brows for c in bcols)
rcset.update(grid.GetSelectedCells())
rv = sorted(rcset)
return rv
def limitSelectionToRows(grid, indices):
'''Limit selection to the specified row indices.
No action for empty indices.
grid -- instance of wx.grid.Grid
indices -- list of row indices to be selected, must be sorted and unique.
No return value.
'''
import bisect
if not indices: return
rowblocks = _indicesToBlocks(indices)
cindices = getSelectionColumns(grid) or [grid.GetGridCursorCol()]
colblocks = _indicesToBlocks(cindices)
grid.ClearSelection()
for rlo, rhi in rowblocks:
for clo, chi in colblocks:
grid.SelectBlock(rlo, clo, rhi, chi, True)
# move cursor to the selected area
krow = bisect.bisect_left(indices, grid.GetGridCursorRow())
krow = min(krow, len(indices) - 1)
kcol = bisect.bisect_left(cindices, grid.GetGridCursorCol())
kcol = min(kcol, len(cindices) - 1)
grid.SetGridCursor(indices[krow], cindices[kcol])
return
def quickResizeColumns(grid, indices):
"""Resize the columns that were recently affected by cell changes.
This is faster than the normal grid AutoSizeColumns, since the latter loops
over the entire grid. In addition, this will not cause a
EVT_GRID_CMD_CELL_CHANGE event to be thrown, which can cause recursion.
This method will only increase column size.
"""
# Get the columns and maximum text width in each one
dc = wx.ScreenDC()
maxSize = {}
for (i, j) in indices:
if j not in maxSize:
renderer = grid.GetCellRenderer(i, j)
attr = grid.GetOrCreateCellAttr(i, j)
size = renderer.GetBestSize(grid, attr, dc, i, j).width
size += 10 # Need a small buffer
maxSize[j] = size
grid.BeginBatch()
for (j, size) in maxSize.items():
if size > grid.GetColSize(j):
grid.SetColSize(j, size)
grid.EndBatch()
return
# Local Helpers --------------------------------------------------------------
def _indicesToBlocks(indices):
'''Convert a list of integer indices to a list of (start, stop) tuples.
The (start, stop) tuple defines a continuous block, where the stop index
is included in the block.
indices -- list of integer indices, must be unique and sorted.
Return a list of (start, stop) tuples.
'''
rngs = []
i0 = -100
for i in indices:
if i > i0 + 1:
rngs.append([i, i])
else:
rngs[-1][-1] = i
i0 = i
rv = [tuple(ij) for ij in rngs]
return rv
# End of file
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/network/latest/get_route.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetRouteResult',
'AwaitableGetRouteResult',
'get_route',
]
@pulumi.output_type
class GetRouteResult:
"""
Route resource.
"""
def __init__(__self__, address_prefix=None, etag=None, has_bgp_override=None, id=None, name=None, next_hop_ip_address=None, next_hop_type=None, provisioning_state=None, type=None):
if address_prefix and not isinstance(address_prefix, str):
raise TypeError("Expected argument 'address_prefix' to be a str")
pulumi.set(__self__, "address_prefix", address_prefix)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if has_bgp_override and not isinstance(has_bgp_override, bool):
raise TypeError("Expected argument 'has_bgp_override' to be a bool")
pulumi.set(__self__, "has_bgp_override", has_bgp_override)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if next_hop_ip_address and not isinstance(next_hop_ip_address, str):
raise TypeError("Expected argument 'next_hop_ip_address' to be a str")
pulumi.set(__self__, "next_hop_ip_address", next_hop_ip_address)
if next_hop_type and not isinstance(next_hop_type, str):
raise TypeError("Expected argument 'next_hop_type' to be a str")
pulumi.set(__self__, "next_hop_type", next_hop_type)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> Optional[str]:
"""
The destination CIDR to which the route applies.
"""
return pulumi.get(self, "address_prefix")
@property
@pulumi.getter
def etag(self) -> str:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="hasBgpOverride")
def has_bgp_override(self) -> Optional[bool]:
"""
A value indicating whether this route overrides overlapping BGP routes regardless of LPM.
"""
return pulumi.get(self, "has_bgp_override")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nextHopIpAddress")
def next_hop_ip_address(self) -> Optional[str]:
"""
The IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance.
"""
return pulumi.get(self, "next_hop_ip_address")
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> str:
"""
The type of Azure hop the packet should be sent to.
"""
return pulumi.get(self, "next_hop_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the route resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetRouteResult(GetRouteResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRouteResult(
address_prefix=self.address_prefix,
etag=self.etag,
has_bgp_override=self.has_bgp_override,
id=self.id,
name=self.name,
next_hop_ip_address=self.next_hop_ip_address,
next_hop_type=self.next_hop_type,
provisioning_state=self.provisioning_state,
type=self.type)
def get_route(resource_group_name: Optional[str] = None,
route_name: Optional[str] = None,
route_table_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRouteResult:
"""
Use this data source to access information about an existing resource.
:param str resource_group_name: The name of the resource group.
:param str route_name: The name of the route.
:param str route_table_name: The name of the route table.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['routeName'] = route_name
__args__['routeTableName'] = route_table_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:network/latest:getRoute', __args__, opts=opts, typ=GetRouteResult).value
return AwaitableGetRouteResult(
address_prefix=__ret__.address_prefix,
etag=__ret__.etag,
has_bgp_override=__ret__.has_bgp_override,
id=__ret__.id,
name=__ret__.name,
next_hop_ip_address=__ret__.next_hop_ip_address,
next_hop_type=__ret__.next_hop_type,
provisioning_state=__ret__.provisioning_state,
type=__ret__.type)
|
PypiClean
|
/okpy-1.18.1.tar.gz/okpy-1.18.1/client/sources/doctest/__init__.py
|
from client import exceptions as ex
from client.sources.common import importing
from client.sources.doctest import models
import logging
import os
import traceback
log = logging.getLogger(__name__)
def load(file, name, assign):
"""Loads doctests from a specified filepath.
PARAMETERS:
file -- str; a filepath to a Python module containing OK-style
tests.
name -- str; optional parameter that specifies a particular function in
the file. If omitted, all doctests will be included.
RETURNS:
Test
"""
if not os.path.isfile(file) or not file.endswith('.py'):
raise ex.LoadingException('Cannot import doctests from {}'.format(file))
try:
module = importing.load_module(file)
except Exception:
# Assume that part of the traceback includes frames from importlib.
# Begin printing the traceback after the last line involving importlib.
# TODO(albert): Try to find a cleaner way to do this. Also, might want
# to move this to a more general place.
print('Traceback (most recent call last):')
stacktrace = traceback.format_exc().split('\n')
start = 0
for i, line in enumerate(stacktrace):
if 'importlib' in line:
start = i + 1
print('\n'.join(stacktrace[start:]))
raise ex.LoadingException('Error importing file {}'.format(file))
if name:
return {name: _load_test(file, module, name, assign)}
else:
return _load_tests(file, module, assign)
def _load_tests(file, module, assign):
"""Recursively find doctests from all objects in MODULE."""
tests = {}
def _load_tests_from_obj(obj, attribute_path):
for attr in dir(obj):
to_test = getattr(obj, attr)
if callable(to_test) and getattr(to_test, '__module__', None) == module.__name__:
path = attribute_path + [attr]
name = '.'.join(path)
tests[name] = _load_test(file, module, name, assign)
_load_tests_from_obj(to_test, path)
_load_tests_from_obj(module, [])
return tests
def _load_test(file, module, name, assign):
namespace = module
for attr in name.split('.'):
if not hasattr(namespace, attr):
raise ex.LoadingException('Module {} has no attribute {}'.format(
module.__name__, name))
namespace = getattr(namespace, attr)
func = namespace
if not callable(func):
raise ex.LoadingException('Attribute {} is not a function'.format(name))
docstring = func.__doc__ if func.__doc__ else ''
try:
return models.Doctest(file, assign.cmd_args.verbose, assign.cmd_args.interactive,
assign.cmd_args.timeout, name=name, points=1,
docstring=docstring)
except ex.SerializeException:
raise ex.LoadingException('Unable to load doctest for {} '
'from {}'.format(name, file))
|
PypiClean
|
/intel_tensorflow_avx512-2.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/tensorflow/python/tpu/tpu_feed.py
|
import itertools
import numpy as np
from tensorflow.python.compiler.xla.experimental import xla_sharding
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.tpu import tpu_name_util
from tensorflow.python.tpu import tpu_sharding
from tensorflow.python.tpu.ops import tpu_ops
from tensorflow.python.util import nest
def partition_or_replicate_on_host(tensor, dims):
"""Partitions or replicates the input tensor.
The ops inside this function are placed on the host side.
Args:
tensor: The input tensor which will be partitioned or replicated.
dims: A list of integer describes how to partition the input tensor.
Returns:
An iterator of `Tensor`s or a list of partitioned tensors.
"""
if dims is None:
return itertools.repeat(tensor)
dims = np.array(dims)
output = [tensor]
shape_list = np.array(tensor.shape.as_list())
quotients, remainders = np.divmod(shape_list, dims)
for axis, (quotient, remainder, dim, original_size) in enumerate(
zip(quotients, remainders, dims, shape_list)):
if dim <= 1:
continue
if remainder > 0:
# For each dimension, when it cannot be evenly partitioned, XLA assumes
# tensors are partitioned in a greedy manner by using
# ceil_ratio(size/dim) first. E.g. 2D tensor with shape (5, 14) and dims
# are (2, 4). Since 5 % 2 = 1 and 14 % 4 = 2, [5, 14] =>
# [[(3, 4), (3, 4), (2, 4), (2, 2)],
# [(2, 4), (2, 4), (2, 4), (2, 2)]]
ceil_ratio = quotient + 1
num_full_slots, left_over = np.divmod(original_size, ceil_ratio)
num_or_size_splits = [ceil_ratio] * num_full_slots + [left_over]
if len(num_or_size_splits) < dim:
num_or_size_splits += [0] * (dim - len(num_or_size_splits))
new_output = []
for x in output:
new_output.append(
array_ops.split(
x, num_or_size_splits=num_or_size_splits, axis=axis))
output = new_output
else:
output = [array_ops.split(x, int(dim), axis=axis) for x in output]
output = nest.flatten(output)
return output
def _tag_sharding_attribute_for_dequeued_tensor(tensor, dims):
"""Tags appropriate XLA sharding attribute to the dequeued tensor.
The sharding attribute of the dequeued tensor will be a tuple.
Args:
tensor: The dequeued tensor on TPU.
dims: A list of integer describes how the tensor is partitioned.
Returns:
The same tensor with the xla_sharding attribute.
"""
if dims is None:
return xla_sharding.replicate(tensor, assign_tuple_sharding=True)
elif np.prod(dims) == 1:
return xla_sharding.assign_device(tensor, 0, assign_tuple_sharding=True)
else:
tile_assignment = np.arange(np.prod(dims)).reshape(dims)
return xla_sharding.tile(
tensor=tensor,
tile_assignment=tile_assignment,
assign_tuple_sharding=True)
def tag_sharding_attribute_for_dequeued_tensors(dequeues, dims):
"""Tags appropriate XLA sharding attribute to the dequeued tensors.
Args:
dequeues: A list of dequeued tensors on TPU.
dims: A list of integer describes how the tensor is partitioned.
Returns:
The same dequeues with appropriate xla_sharding attribute.
"""
nest.assert_shallow_structure(dequeues, dims)
return nest.map_structure_up_to(
dequeues, _tag_sharding_attribute_for_dequeued_tensor, dequeues, dims)
class InfeedQueue(object):
"""A helper object to build a device infeed queue.
The InfeedQueue builds the host-side and device-side Ops to enqueue and
dequeue elements, respectively, and ensures that their types and
shapes match.
"""
def __init__(self,
number_of_tuple_elements=None,
tuple_types=None,
tuple_shapes=None,
shard_dimensions=None,
number_of_partitions=None,
name=None):
"""Creates a new InfeedQueue with the given configuration.
The configuration need not be fully specified at creation since it
can be modified subsequently by methods that set the values
explicitly or infer them from the shapes of inputs.
Args:
number_of_tuple_elements: the number of Tensors fed atomically through the
queue, must be present unless it can be inferred from other arguments.
tuple_types: if not None, a list of types of the elements of the queue.
tuple_shapes: if not None, a list of shapes of the elements of the queue.
shard_dimensions: if not None, a list of dimensions on which the
elements of the queue should be sharded during automatic
parallelization.
number_of_partitions: if > 1, the infeed dequeue shape will contain
the full shape that includes all partitions and add corresponding XLA
annotation on the infeed dequeue op. In this case, the infeed is still
data parallel that feeds per-core batch size to each core while the XLA
computation may be partitioned. As XLA requires infeed dequeue shape to
be per-replica shape, thus we need number_of_partitions here to
calculate the per-replica unpartitioned shape.
name: the name of the queue.
Raises:
ValueError: if number_of_tuple_elements <= 0; or
number_of_tuple_arguments, tuple_types, tuple_shapes, and
shard_dimensions are all None; or the length of tuple_types,
tuple_shapes, or shard_dimensions is not equal to
number_of_tuple_elements; or any element of shard_dimensions
can't be converted to a Dimension.
TypeError: if any element of tuple_types or tuple_shapes can't
be converted to a dtype or TensorShape, respectively.
"""
self._frozen = False
self._generated_enqueue_ops = False
self._generated_dequeue_op = False
self._name = "InfeedQueue" if name is None else name
if number_of_partitions is None:
self._number_of_partitions = 1
else:
self._number_of_partitions = number_of_partitions
if number_of_tuple_elements is None:
if tuple_types is not None:
number_of_tuple_elements = len(tuple_types)
elif tuple_shapes is not None:
number_of_tuple_elements = len(tuple_shapes)
elif shard_dimensions is not None:
number_of_tuple_elements = len(shard_dimensions)
else:
raise ValueError(
"number of tuple elements cannot be inferred from InfeedQueue "
"constructor")
if number_of_tuple_elements <= 0:
raise ValueError(f"number_of_tuple_elements {number_of_tuple_elements} "
"must be > 0")
# Make an empty sharding policy for each tuple element.
self._sharding_policies = [
tpu_sharding.ShardingPolicy() for _ in range(number_of_tuple_elements)
]
if tuple_types is not None:
self.set_tuple_types(tuple_types)
else:
self._tuple_types = None
if tuple_shapes is not None:
self.set_tuple_shapes(tuple_shapes)
else:
self._tuple_shapes = None
if shard_dimensions is not None:
self.set_shard_dimensions(shard_dimensions)
self._validate()
def _validate(self):
"""Checks that the configuration is self-consistent.
Raises:
ValueError: if the shapes and sharding policies don't match.
"""
if self.tuple_shapes is not None:
for (policy, shape) in zip(self._sharding_policies, self._tuple_shapes):
# Raise an error if the policy is incompatible with the shape.
_ = policy.get_sharded_shape(shape)
@property
def number_of_tuple_elements(self):
"""Returns the number of InfeedQueue tuple elements."""
return len(self._sharding_policies)
@property
def tuple_types(self):
"""Returns the types of the InfeedQueue tuple elements."""
return self._tuple_types
def set_tuple_types(self, tuple_types):
"""Sets the type of each element of the queue.
tuple_types must be a list of length
self.number_of_tuple_elements, and each element must be
convertible to a dtype.
Args:
tuple_types: the types of each queue element.
Raises:
ValueError: if tuple_types is not of length
self.number_of_tuple_elements.
TypeError: if an element of tuple_types cannot be converted to a
dtype.
"""
if len(tuple_types) != self.number_of_tuple_elements:
raise ValueError(
f"tuple_types is {str(tuple_types)}, but must be a list of "
f"length {self.number_of_tuple_elements}"
)
if self._frozen:
for (frozen, updated) in zip(self._tuple_types, tuple_types):
if frozen != updated:
raise ValueError(
"Trying to update InfeedQueue with frozen configuration with an "
f"incompatible type. Frozen types are {str(self._tuple_types)}, "
f"updated types are {str(tuple_types)}")
else:
try:
self._tuple_types = [dtypes.as_dtype(t) for t in tuple_types]
except (TypeError) as e:
raise TypeError(
f"tuple_types is {str(tuple_types)}, but must be a list of "
f"elements each convertible to dtype: got error {str(e)}") from e
@property
def tuple_shapes(self):
"""Returns the shapes of the InfeedQueue tuple elements."""
return self._tuple_shapes
def set_tuple_shapes(self, tuple_shapes):
"""Sets the shape of each element of the queue.
tuple_shapes must be a list of length
self.number_of_tuple_elements, and each element must be
convertible to a TensorShape.
Args:
tuple_shapes: the shapes of each queue element.
Raises:
ValueError: if tuple_shapes is not of length
self.number_of_tuple_elements.
TypeError: if an element of tuple_shapes cannot be converted to
a TensorShape.
"""
if len(tuple_shapes) != self.number_of_tuple_elements:
raise ValueError(
f"tuple_shapes is {str(tuple_shapes)}, but must be a list of "
f"length {self.number_of_tuple_elements}"
)
try:
tuple_shapes = [tensor_shape.as_shape(shape) for shape in tuple_shapes]
except (ValueError, TypeError) as e:
raise TypeError(
f"tuple_shapes is {str(tuple_shapes)}, but must be a list of "
"elements each convertible to TensorShape: got error "
f"{str(e)}") from e
if self._frozen:
for (frozen, updated) in zip(self._tuple_shapes, tuple_shapes):
if frozen != updated:
raise ValueError(
"Trying to update InfeedQueue with frozen configuration with an "
"incompatible shape. Frozen shapes are "
f"{str(self._tuple_shapes)}, updated shapes are "
f"{str(tuple_shapes)}")
else:
self._tuple_shapes = tuple_shapes
self._validate()
@property
def sharding_policies(self):
"""Returns the sharding policies of the InfeedQueue tuple elements."""
return self._sharding_policies
@property
def shard_dimensions(self):
"""Gets the shard dimension of each tuple element.
Returns:
A list of length number_of_tuple_elements, where each list entry
is the shard dimension of that tuple element or None if the
shard dimension has not been set.
"""
# The number of shards is always the same for all the policies.
return [policy.shard_dimension for policy in self._sharding_policies]
def set_shard_dimensions(self, shard_dimensions):
"""Sets the shard_dimension of each element of the queue.
shard_dimensions must be a list of length
self.number_of_tuple_elements, and each element must be
convertible to a Dimension compatible with self.tuple_shapes.
Args:
shard_dimensions: the dimensions of each queue element.
Raises:
ValueError: if shard_dimensions is not of length
self.number_of_tuple_elements; or an element of
shard_dimensions cannot be converted to a Dimension; or an
element of shard_dimensions is a Dimension that is out of
range for the corresponding tuple element shape.
"""
if len(shard_dimensions) != self.number_of_tuple_elements:
raise ValueError(f"shard_dimensions is {str(shard_dimensions)}, but must "
f"be a list of length {self.number_of_tuple_elements}")
for (policy, dimension) in zip(self._sharding_policies, shard_dimensions):
policy.set_shard_dimension(dimension)
self._validate()
@property
def number_of_shards(self):
"""Gets the number of shards to use for the InfeedQueue.
Returns:
Number of shards or None if the number of shards has not been set.
"""
# The number of shards is always the same for all the policies.
return self._sharding_policies[0].number_of_shards
def set_number_of_shards(self, number_of_shards):
"""Sets the number of shards to use for the InfeedQueue.
Args:
number_of_shards: number of ways to shard the InfeedQueue.
Raises:
ValueError: if number_of_shards is not > 0; or the policies have
been frozen and number_of_shards was already set to something
else.
"""
for policy in self._sharding_policies:
policy.set_number_of_shards(number_of_shards)
policy.set_number_of_partitions(self._number_of_partitions)
self._validate()
def set_configuration_from_input_tensors(self, input_tensors):
"""Sets the shapes and types of the queue tuple elements.
input_tensors is a list of Tensors whose types and shapes are used
to set the queue configuration.
Args:
input_tensors: list of Tensors of the same types and shapes as
the desired queue Tuple.
Raises:
ValueError: if input_tensors is not a list of length
self.number_of_tuple_elements
"""
if len(input_tensors) != self.number_of_tuple_elements:
raise ValueError(f"input_tensors is {str(input_tensors)}, but should be "
f"a list of {self.number_of_tuple_elements} Tensors")
self.set_tuple_shapes([t.shape for t in input_tensors])
self.set_tuple_types([t.dtype for t in input_tensors])
def set_configuration_from_sharded_input_tensors(self, input_tensors):
"""Sets the shapes and types of the queue tuple elements.
input_tensors is a list of lists of Tensors whose types and shapes are used
to set the queue configuration. The length of the outer list is the number
of shards required, and each inner list is the tuple of Tensors to use to
determine the types and shapes of the corresponding shard. This method
depends on the shard dimension, and calling it freezes the shard policy.
Args:
input_tensors: list of lists of Tensors. The outer list length corresponds
to the desired number of shards, and each inner list is the size
and shape of the desired configuration of the corresponding shard.
Raises:
ValueError: if any inner list is not a list of length
self.number_of_tuple_elements; or the inner lists do not combine to
form a consistent unsharded shape.
TypeError: if the types of the Tensors in the inner lists do not match.
"""
if not self._frozen:
# Unset the tuple shapes in case the configuration becomes
# transiently inconsistent.
self._tuple_shapes = None
number_of_shards = len(input_tensors)
self.set_number_of_shards(number_of_shards)
for t in input_tensors:
if len(t) != self.number_of_tuple_elements:
raise ValueError(
f"input_tensors is {str(input_tensors)} but must be a list of "
"lists, where each inner list has length "
f"number_of_tuple_elements={self.number_of_tuple_elements}")
# Transpose the inputs to make a list of shard shapes for each tuple
# element.
sharded_shapes = [[t[i].shape
for t in input_tensors]
for i in range(self.number_of_tuple_elements)]
# For each tuple, get the unsharded shape using that tuple's policy.
unsharded_shapes = [
policy.get_unsharded_shape(s)
for (policy, s) in zip(self._sharding_policies, sharded_shapes)
]
self.set_tuple_shapes(unsharded_shapes)
for i in range(1, self.number_of_shards):
for (t1, t2) in zip(input_tensors[0], input_tensors[i]):
if t1.dtype != t2.dtype:
raise TypeError(
"types of the tuple elements of input_tensors "
f"{str(input_tensors)} are not consistent")
self.set_tuple_types([t.dtype for t in input_tensors[0]])
def freeze(self):
"""Freezes the InfeedQueue so it can no longer be modified.
The configuration is implicitly frozen before any host-side or
device-side Ops are generated. The configuration cannot be frozen
until the types and shapes of the tuple elements have been set.
Raises:
ValueError: if the types or shapes of the tuple elements have not been
set.
"""
self._frozen = True
if self._tuple_types is None:
raise ValueError(
"Can't freeze an InfeedQueue without setting all tuple types.")
if self._tuple_shapes is None:
raise ValueError(
"Can't freeze an InfeedQueue without setting all tuple shapes.")
for shape in self._tuple_shapes:
if shape.dims is None:
raise ValueError(
"Can't freeze an InfeedQueue without setting all tuple shapes.")
for policy in self._sharding_policies:
policy.freeze()
self._validate()
def generate_dequeue_op(self, tpu_device=0):
"""Generates the device-side Op to dequeue a tuple from the queue.
Implicitly freezes the queue configuration if it is not already
frozen, which will raise errors if the shapes and types have not
been fully specified.
Args:
tpu_device: The TPU device ordinal where the infeed instruction should be
placed. If None, no explicit placement will be performed, and it is up
to the user to call this API from within a proper TPU device scope.
The XLA code will fail if the TPU dequeue instruction is not bound to
any device.
Returns:
A list of Outputs corresponding to a shard of infeed dequeued
into XLA, suitable for use within a replicated block.
Raises:
ValueError: if the types or shapes of the tuple elements have not been
set; or if a dequeue op has already been generated.
"""
self.freeze()
if self._generated_dequeue_op and not ops.inside_function():
raise ValueError("Can't generate two dequeue Ops from the same queue")
self._generated_dequeue_op = True
full_name = "%s/dequeue" % self._name
sharded_shapes = [
policy.get_unpartitioned_shape(policy.get_sharded_shape(shape))
for (shape, policy) in zip(self._tuple_shapes, self._sharding_policies)
]
if tpu_device is not None:
with ops.device(tpu_name_util.core(tpu_device)):
dequeue_op = tpu_ops.infeed_dequeue_tuple(
dtypes=self._tuple_types, shapes=sharded_shapes, name=full_name)
else:
dequeue_op = tpu_ops.infeed_dequeue_tuple(
dtypes=self._tuple_types, shapes=sharded_shapes, name=full_name)
if self._number_of_partitions <= 1:
return dequeue_op
partitions = [
policy.get_unpartitioned_shape([1] * shape.ndims).as_list()
for (shape, policy) in zip(self._tuple_shapes, self._sharding_policies)
]
return tag_sharding_attribute_for_dequeued_tensors(dequeue_op, partitions)
def _generate_enqueue_op(self,
inputs,
name_prefix,
index,
device=None,
tpu_ordinal=-1):
"""Generate a host-side Op to enqueue a tuple to the queue.
If device is None the inputs are all required to have the same
device specification, and the enqueue Op is colocated with
inputs[0]. Otherwise the enqueue Op is placed on 'device'.
Args:
inputs: a list of Tensors with the types and shapes of the tuple elements.
name_prefix: the base name for the Op.
index: the shard index, used to uniquify the Op name.
device: device to place the Op on, or None if it should be
colocated with the inputs.
tpu_ordinal: ordinal of the TPU device on the host to use for
infeed if device is a CPU device. Should be set to -1 if device
is a TPU device.
Returns:
An Op corresponding to a shard of infeed enqueued at the host,
suitable for use within a replicated block.
Raises:
ValueError: if device is None and inputs do not all have the
same device specification.
"""
full_name = "%s/%d" % (name_prefix, index)
shapes = [t.shape for t in inputs]
if device is None:
devices = [t.device for t in inputs]
for i in range(1, self.number_of_tuple_elements):
if devices[0] != devices[i]:
raise ValueError(
f"input devices for shard {index} are {str(devices)}, but should "
"all be the same")
with ops.colocate_with(inputs[0]):
return tpu_ops.infeed_enqueue_tuple(
inputs=inputs,
shapes=shapes,
name=full_name,
device_ordinal=tpu_ordinal)
else:
with ops.device(device):
return tpu_ops.infeed_enqueue_tuple(
inputs=inputs,
shapes=shapes,
name=full_name,
device_ordinal=tpu_ordinal)
def generate_enqueue_ops(self,
sharded_inputs,
tpu_ordinal_function=None,
placement_function=None):
"""Generates the host-side Ops to enqueue the shards of a tuple.
sharded_inputs is a list, one for each shard, of lists of
Tensors. sharded_inputs[i] is the tuple of Tensors to use to feed
shard i of the queue. Returns the host-side Ops that must be run to
enqueue the sharded tuple. The Op for shard i is colocated with the inputs
for shard i.
Implicitly freezes the queue configuration if it is not already
frozen. If the configuration has already been frozen, and is not
compatible with the types and shapes of sharded_inputs, an error
will be raised.
Args:
sharded_inputs: a list of lists of Tensors. The length of the outer list
determines the number of shards. Each inner list indicates the types
and shapes of the tuples in the corresponding shard.
tpu_ordinal_function: if not None, a function that takes the
shard index as input and returns the ordinal of the TPU device
the shard's infeed should be placed on. tpu_ordinal_function must be
set if the inputs are placed on CPU devices.
placement_function: if not None, a function that takes the shard index as
input and returns the host device where the enqueue op should be placed
on.
Returns:
A list of host-side Ops, one for each shard, that when executed together
will enqueue a full-size element of infeed.
Raises:
ValueError: if the queue configuration has previously been frozen and the
shapes of the elements of sharded_inputs are not compatible with the
frozen configuration; or if the shapes of the elements of sharded_inputs
don't form a consistent unsharded tuple; or if the elements of a tuple
have different device constraints.
TypeError: if the queue configuration has previously been frozen and the
types of the elements of sharded_inputs are not compatible with the
frozen configuration; or if the types of the elements of sharded_inputs
don't form a consistent unsharded tuple.
"""
self.set_configuration_from_sharded_input_tensors(sharded_inputs)
self.freeze()
if self._generated_enqueue_ops and not ops.inside_function():
raise ValueError("Can't generate two enqueue Ops from the same queue")
self._generated_enqueue_ops = True
if tpu_ordinal_function is None:
tpu_ordinal_function = lambda index: -1
name_prefix = "%s/enqueue" % self._name
return [
self._generate_enqueue_op(
shard,
name_prefix,
index,
tpu_ordinal=tpu_ordinal_function(index),
device=placement_function(index) if placement_function else None)
for (shard, index) in zip(sharded_inputs, range(self.number_of_shards))
]
# TODO(misard) Generalize this to the case of systems that don't
# have 8 devices per host, and figure out what to do with
# model-parallelism.
def _default_placement_function(self, index):
return "/task:%d/device:CPU:0" % (index / 8)
def _default_ordinal_function(self, index):
return index % 8
# TODO(b/36470756) remove this from tutorials once we have a better story
# for automatic placement of input pipelines.
def split_inputs_and_generate_enqueue_ops(self,
inputs,
device_assignment=None,
placement_function=None,
tpu_ordinal_function=None):
"""POORLY-PERFORMING ON MULTI-HOST SYSTEMS.
Generates the host-side Ops to enqueue a tuple.
This method performs poorly because it takes an entire input on a single
host, splits it, and distributes it to all of the cores. It is present only
to simplify tutorial examples.
inputs is a list of Tensors to use to feed the queue. Each input is split
into self.number_of_shards shards. Returns an Op for each shard to enqueue
the shard. The Op for shard i is placed on device placement_function(i).
Implicitly freezes the queue configuration if it is not already
frozen. If the configuration has already been frozen, and is not
compatible with the types and shapes of inputs, an error
will be raised.
Args:
inputs: a list of Tensors which indicates the types and shapes of the
queue tuple.
device_assignment: if not `None`, a TPU `DeviceAssignment`. If
device_assignment is not `None`, but `placement_function` and
`ordinal_function` are None, then `device_assignment` will be used to
place infeeds on the first k TPU shards, where k is the number of shards
in the queue. If all three are `None`, then default placement and
ordinal functions are used.
placement_function: if not None, a function that takes the shard
index as input and returns a device string indicating which
device the shard's infeed should be placed on. If placement_function
and tpu_ordinal_function are None, inputs are sharded round-robin
across the devices in the system.
tpu_ordinal_function: if not None, a function that takes the
shard index as input and returns the ordinal of the TPU device
the shard's infeed should be placed on. If placement_function
and tpu_ordinal_function are None, inputs are sharded round-robin
across the devices in the system.
Returns:
A list of host-side Ops, one for each shard, that when executed together
will enqueue a full-size element of infeed.
Raises:
ValueError: if the queue configuration has previously been frozen and the
shapes of the elements of inputs are not compatible with the frozen
configuration.
TypeError: if the queue configuration has previously been frozen and the
types of the elements of inputs are not compatible with the frozen
configuration.
"""
if device_assignment is None:
if placement_function is None:
placement_function = self._default_placement_function
if tpu_ordinal_function is None:
tpu_ordinal_function = self._default_ordinal_function
else:
def _placement_function_from_map(index):
return device_assignment.host_device(replica=index)
def _ordinal_function_from_map(index):
return device_assignment.tpu_ordinal(replica=index)
if placement_function is None:
placement_function = _placement_function_from_map
if tpu_ordinal_function is None:
tpu_ordinal_function = _ordinal_function_from_map
self.set_configuration_from_input_tensors(inputs)
self.freeze()
if self._generated_enqueue_ops and not ops.inside_function():
raise ValueError("Can't generate two enqueue Ops from the same queue")
self._generated_enqueue_ops = True
split_name_prefix = "%s/split" % self._name
if self.number_of_shards == 1:
transposed_sharded_inputs = [[inp] for inp in inputs]
else:
def split_fn(inp, num_shards, axis, name):
with ops.colocate_with(inp):
return array_ops.split(inp, num_shards, axis=axis, name=name)
transposed_sharded_inputs = [
split_fn(
inp,
self.number_of_shards,
axis=policy.shard_dimension,
name="%s/%d" % (split_name_prefix, index))
for (inp, policy, index) in zip(inputs, self._sharding_policies,
range(self.number_of_tuple_elements))
]
sharded_inputs = [[shard[i]
for shard in transposed_sharded_inputs]
for i in range(self.number_of_shards)]
name_prefix = "%s/enqueue" % self._name
return [
self._generate_enqueue_op(
shard,
name_prefix,
index,
device=placement_function(index),
tpu_ordinal=tpu_ordinal_function(index))
for (shard, index) in zip(sharded_inputs, range(self.number_of_shards))
]
class _PartitionedInfeedQueue(InfeedQueue):
"""A helper object to build a device infeed queue with input partition.
Args:
number_of_tuple_elements: the number of Tensors fed atomically through the
queue, must be present unless it can be inferred from other arguments.
device_assignment: A TPU `DeviceAssignment` which is used to place all the
partitions to different TPU infeed queues.
host_id: The id of the host machine.
input_partition_dims: A nested list/tuple of integers. Each inner
list/tuple describes how to partition the corresponding input tensor.
tuple_types: If not None, a list of types of the elements of the queue.
tuple_shapes: If not None, a list of shapes of the elements of the queue.
name: The name of the queue.
"""
def __init__(self,
number_of_tuple_elements,
device_assignment,
host_id,
input_partition_dims=None,
tuple_types=None,
tuple_shapes=None,
name=None):
super(_PartitionedInfeedQueue, self).__init__(
number_of_tuple_elements=number_of_tuple_elements,
tuple_types=tuple_types,
tuple_shapes=None,
shard_dimensions=None,
name="PartitionedInfeedQueue" if name is None else name)
self._input_partition_dims = input_partition_dims
self._host_id = host_id
self._device_assignment = device_assignment
def generate_dequeue_op(self, tpu_device=0):
"""Generate TPU dequeue ops.
Args:
tpu_device: The TPU device ordinal where the infeed instruction should be
placed.
Returns:
A list of Outputs corresponding to a partition of infeed dequeued
into XLA, suitable for use within a replicated block.
Raises:
ValueError: if the types or shapes of the tuple elements have not been
set; or if a dequeue op has already been generated.
"""
self.freeze()
if self._generated_dequeue_op and not ops.inside_function():
raise ValueError("Can't generate two dequeue Ops from the same queue")
self._generated_dequeue_op = True
full_name = "%s/dequeue" % self._name
sharded_shapes = [
policy.get_sharded_shape(shape)
for (shape, policy) in zip(self._tuple_shapes, self._sharding_policies)
]
with ops.device(tpu_name_util.core(tpu_device)):
values = tpu_ops.infeed_dequeue_tuple(
dtypes=self._tuple_types, shapes=sharded_shapes, name=full_name)
return tag_sharding_attribute_for_dequeued_tensors(
values, self._input_partition_dims)
def generate_enqueue_ops(self, sharded_inputs): # pytype: disable=signature-mismatch # overriding-parameter-count-checks
"""Generates the host-side Ops to enqueue the partitioned inputs.
sharded_inputs is a list, one for each replica, of lists of
Tensors. sharded_inputs[i] is the tuple of Tensors to use to feed
replica i.
sharded_inputs[i][j] is partitioned by self._input_partition_dims[j].
For example, if sharded_inputs[i][j] is a 2-D Tensor:
[[A, B, C, D],
[E ,F, G, H]]
self._input_partition_dims[j] is [2, 4].
sharded_inputs[i][j] will be partitioned and flattened into:
[A, B, C, D, E, F, G, H] and fed into the logical core ids:
[0, 1, 2, 3, 4, 5, 6, 7] respectively.
Args:
sharded_inputs: a list of lists of Tensors. The length of the
outer list determines the number of shards. Each inner list indicates
the types and shapes of the tuples in the corresponding shard.
Returns:
A list of host-side Ops, one for each shard, that when executed together
will enqueue a full-size element of infeed.
Raises:
ValueError: if the queue configuration has previously been frozen and the
shapes of the elements of sharded_inputs are not compatible with the
frozen configuration; or if the shapes of the elements of sharded_inputs
don't form a consistent unsharded tuple; or if the elements of a tuple
have different device constraints; or if the partition dims are invalid.
TypeError: if the queue configuration has previously been frozen and the
types of the elements of sharded_inputs are not compatible with the
frozen configuration; or if the types of the elements of sharded_inputs
don't form a consistent unsharded tuple.
"""
self.set_configuration_from_sharded_input_tensors(sharded_inputs)
number_of_replicas = len(sharded_inputs)
number_of_tuple_elements = len(sharded_inputs[0])
assert len(self._input_partition_dims) == number_of_tuple_elements
enqueue_ops = []
for replica_index in range(number_of_replicas):
flattened_inputs = sharded_inputs[replica_index]
inputs_part_dims_flat = nest.flatten_up_to(flattened_inputs,
self._input_partition_dims)
inputs_parted_iters = [
iter(self._check_dims_and_partition_or_replicate_on_host(x, dims))
for x, dims in zip(sharded_inputs[replica_index],
inputs_part_dims_flat)
]
# Find the replica_id of the host's logical core 0.
# The self._host_id is guaranteed to contain the logical core 0,
# even when num_cores_per_replica > num_cores_per_host -- the function
# caller makes sure that this host_id will must be receiving data (calls
# input_fn).
replica_id = self._device_assignment.lookup_replicas(
task_id=self._host_id, logical_core=0)[replica_index]
for logical_core in range(self._device_assignment.num_cores_per_replica):
# Places different partitions to different logic cores.
# Since there can be multiple hosts per replica, we need to find
# the actual host (device) of this logical core.
device = self._device_assignment.host_device(
replica=replica_id, logical_core=logical_core)
with ops.device(device):
ordinal = self._device_assignment.tpu_ordinal(
replica=replica_id, logical_core=logical_core)
infeed_inputs = []
for it in inputs_parted_iters:
input_for_device = next(it, None)
if input_for_device is not None:
infeed_inputs.append(input_for_device)
if infeed_inputs:
enqueue_ops.append(
tpu_ops.infeed_enqueue_tuple(
inputs=infeed_inputs,
shapes=[x.shape for x in infeed_inputs],
name="enqueue/replica_{0}/input_{1}".format(
replica_index, logical_core),
device_ordinal=ordinal))
return enqueue_ops
def _check_input_partition_dims(self, tensor, dims):
"""Checks that input partition dims are valid for the `Tensor`.
Args:
tensor: Input tensor for partitioning.
dims: A list of integer describes how to partition the input tensor.
Raises:
ValueError: If the tensor can't be partitioned by dims or the
num_cores_per_replica doesn't match the number of
partitions(dims.prod()).
"""
# No partitioning specified, so don't perform further checks.
if dims is None:
return
dims = np.array(dims)
if (dims < 1).any():
raise ValueError("All input partition dims must be >= 1.")
# No partitioning, so don't perform further checks.
if dims.prod() == 1:
return
if dims.prod() != self._device_assignment.num_cores_per_replica:
raise ValueError(
"The product of each input partition dim should equal to "
"num_cores_per_replica. (dim = {}, num_cores_per_replica "
"= {})".format(dims, self._device_assignment.num_cores_per_replica))
if dims.shape[0] != tensor.shape.ndims:
raise ValueError(
"Input partition dims must have the same number of dimensions "
"as the `Tensor` to be partitioned. (tensor shape = {}, input "
"partition dims = {}).".format(tensor.shape.as_list(), dims))
tensor.shape.assert_is_fully_defined()
def _check_dims_and_partition_or_replicate_on_host(self, tensor, dims):
"""Checks dims and partitions or replicates the input tensor.
The ops inside this function are placed on the host side.
Args:
tensor: The input tensor which will be partitioned or replicated.
dims: A list of integer describes how to partition the input tensor.
Returns:
An iterator of `Tensor`s or a list of partitioned tensors.
"""
self._check_input_partition_dims(tensor, dims)
return partition_or_replicate_on_host(tensor, dims)
|
PypiClean
|
/spyder-terminal-1.2.2.tar.gz/spyder-terminal-1.2.2/spyder_terminal/server/static/components/minimist/index.js
|
module.exports = function (args, opts) {
if (!opts) opts = {};
var flags = { bools : {}, strings : {}, unknownFn: null };
if (typeof opts['unknown'] === 'function') {
flags.unknownFn = opts['unknown'];
}
if (typeof opts['boolean'] === 'boolean' && opts['boolean']) {
flags.allBools = true;
} else {
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
flags.bools[key] = true;
});
}
var aliases = {};
Object.keys(opts.alias || {}).forEach(function (key) {
aliases[key] = [].concat(opts.alias[key]);
aliases[key].forEach(function (x) {
aliases[x] = [key].concat(aliases[key].filter(function (y) {
return x !== y;
}));
});
});
[].concat(opts.string).filter(Boolean).forEach(function (key) {
flags.strings[key] = true;
if (aliases[key]) {
flags.strings[aliases[key]] = true;
}
});
var defaults = opts['default'] || {};
var argv = { _ : [] };
Object.keys(flags.bools).forEach(function (key) {
setArg(key, defaults[key] === undefined ? false : defaults[key]);
});
var notFlags = [];
if (args.indexOf('--') !== -1) {
notFlags = args.slice(args.indexOf('--')+1);
args = args.slice(0, args.indexOf('--'));
}
function argDefined(key, arg) {
return (flags.allBools && /^--[^=]+$/.test(arg)) ||
flags.strings[key] || flags.bools[key] || aliases[key];
}
function setArg (key, val, arg) {
if (arg && flags.unknownFn && !argDefined(key, arg)) {
if (flags.unknownFn(arg) === false) return;
}
var value = !flags.strings[key] && isNumber(val)
? Number(val) : val
;
setKey(argv, key.split('.'), value);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), value);
});
}
function setKey (obj, keys, value) {
var o = obj;
for (var i = 0; i < keys.length-1; i++) {
var key = keys[i];
if (key === '__proto__') return;
if (o[key] === undefined) o[key] = {};
if (o[key] === Object.prototype || o[key] === Number.prototype
|| o[key] === String.prototype) o[key] = {};
if (o[key] === Array.prototype) o[key] = [];
o = o[key];
}
var key = keys[keys.length - 1];
if (key === '__proto__') return;
if (o === Object.prototype || o === Number.prototype
|| o === String.prototype) o = {};
if (o === Array.prototype) o = [];
if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') {
o[key] = value;
}
else if (Array.isArray(o[key])) {
o[key].push(value);
}
else {
o[key] = [ o[key], value ];
}
}
function aliasIsBoolean(key) {
return aliases[key].some(function (x) {
return flags.bools[x];
});
}
for (var i = 0; i < args.length; i++) {
var arg = args[i];
if (/^--.+=/.test(arg)) {
// Using [\s\S] instead of . because js doesn't support the
// 'dotall' regex modifier. See:
// http://stackoverflow.com/a/1068308/13216
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
var key = m[1];
var value = m[2];
if (flags.bools[key]) {
value = value !== 'false';
}
setArg(key, value, arg);
}
else if (/^--no-.+/.test(arg)) {
var key = arg.match(/^--no-(.+)/)[1];
setArg(key, false, arg);
}
else if (/^--.+/.test(arg)) {
var key = arg.match(/^--(.+)/)[1];
var next = args[i + 1];
if (next !== undefined && !/^-/.test(next)
&& !flags.bools[key]
&& !flags.allBools
&& (aliases[key] ? !aliasIsBoolean(key) : true)) {
setArg(key, next, arg);
i++;
}
else if (/^(true|false)$/.test(next)) {
setArg(key, next === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
else if (/^-[^-]+/.test(arg)) {
var letters = arg.slice(1,-1).split('');
var broken = false;
for (var j = 0; j < letters.length; j++) {
var next = arg.slice(j+2);
if (next === '-') {
setArg(letters[j], next, arg)
continue;
}
if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) {
setArg(letters[j], next.split('=')[1], arg);
broken = true;
break;
}
if (/[A-Za-z]/.test(letters[j])
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
setArg(letters[j], next, arg);
broken = true;
break;
}
if (letters[j+1] && letters[j+1].match(/\W/)) {
setArg(letters[j], arg.slice(j+2), arg);
broken = true;
break;
}
else {
setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg);
}
}
var key = arg.slice(-1)[0];
if (!broken && key !== '-') {
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
&& !flags.bools[key]
&& (aliases[key] ? !aliasIsBoolean(key) : true)) {
setArg(key, args[i+1], arg);
i++;
}
else if (args[i+1] && /^(true|false)$/.test(args[i+1])) {
setArg(key, args[i+1] === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
}
else {
if (!flags.unknownFn || flags.unknownFn(arg) !== false) {
argv._.push(
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
);
}
if (opts.stopEarly) {
argv._.push.apply(argv._, args.slice(i + 1));
break;
}
}
}
Object.keys(defaults).forEach(function (key) {
if (!hasKey(argv, key.split('.'))) {
setKey(argv, key.split('.'), defaults[key]);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), defaults[key]);
});
}
});
if (opts['--']) {
argv['--'] = new Array();
notFlags.forEach(function(key) {
argv['--'].push(key);
});
}
else {
notFlags.forEach(function(key) {
argv._.push(key);
});
}
return argv;
};
function hasKey (obj, keys) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
o = (o[key] || {});
});
var key = keys[keys.length - 1];
return key in o;
}
function isNumber (x) {
if (typeof x === 'number') return true;
if (/^0x[0-9a-f]+$/i.test(x)) return true;
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
}
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/apimanagement/v20191201preview/tag_by_operation.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['TagByOperation']
class TagByOperation(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_id: Optional[pulumi.Input[str]] = None,
operation_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
tag_id: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Tag Contract details.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_id: API revision identifier. Must be unique in the current API Management service instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:param pulumi.Input[str] operation_id: Operation identifier within an API. Must be unique in the current API Management service instance.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] service_name: The name of the API Management service.
:param pulumi.Input[str] tag_id: Tag identifier. Must be unique in the current API Management service instance.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if api_id is None and not opts.urn:
raise TypeError("Missing required property 'api_id'")
__props__['api_id'] = api_id
if operation_id is None and not opts.urn:
raise TypeError("Missing required property 'operation_id'")
__props__['operation_id'] = operation_id
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
if tag_id is None and not opts.urn:
raise TypeError("Missing required property 'tag_id'")
__props__['tag_id'] = tag_id
__props__['display_name'] = None
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/latest:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:TagByOperation"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20200601preview:TagByOperation")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(TagByOperation, __self__).__init__(
'azure-nextgen:apimanagement/v20191201preview:TagByOperation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'TagByOperation':
"""
Get an existing TagByOperation resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return TagByOperation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
Tag name.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
PypiClean
|
/conan-server-2.0.10.tar.gz/conan-server-2.0.10/conan/internal/api/new/meson_exe.py
|
from conan.internal.api.new.cmake_lib import source_cpp, source_h, test_main
conanfile_exe = """from conan import ConanFile
from conan.tools.meson import MesonToolchain, Meson
from conan.tools.gnu import PkgConfigDeps
class {{package_name}}Conan(ConanFile):
name = "{{name}}"
version = "{{version}}"
package_type = "application"
# Binary configuration
settings = "os", "compiler", "build_type", "arch"
# Sources are located in the same place as this recipe, copy them to the recipe
exports_sources = "meson.build", "src/*"
{% if requires is defined -%}
def requirements(self):
{% for require in requires -%}
self.requires("{{ require }}")
{% endfor %}
{%- endif %}
def layout(self):
self.folders.build = "build"
def generate(self):
deps = PkgConfigDeps(self)
deps.generate()
tc = MesonToolchain(self)
tc.generate()
def build(self):
meson = Meson(self)
meson.configure()
meson.build()
def package(self):
meson = Meson(self)
meson.install()
"""
test_conanfile_exe_v2 = """import os
from conan import ConanFile
from conan.tools.build import can_run
class {{package_name}}TestConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
def requirements(self):
self.requires(self.tested_reference_str)
def test(self):
if can_run(self):
self.run("{{name}}", env="conanrun")
"""
_meson_build_exe = """\
project('{{name}} ', 'cpp')
{% if requires is defined -%}
cxx = meson.get_compiler('cpp')
{% for require in requires -%}
{{as_name(require)}} = dependency('{{as_name(require)}}', required: true)
{% endfor %}
{%- endif %}
{% if requires is defined -%}
executable('{{name}}', 'src/{{name}}.cpp', 'src/main.cpp', install: true,
dependencies: {{ names(requires) }} )
{% else %}
executable('{{name}}', 'src/{{name}}.cpp', 'src/main.cpp', install: true)
{% endif %}
"""
meson_exe_files = {"conanfile.py": conanfile_exe,
"src/{{name}}.cpp": source_cpp,
"src/{{name}}.h": source_h,
"src/main.cpp": test_main,
"meson.build": _meson_build_exe,
"test_package/conanfile.py": test_conanfile_exe_v2
}
|
PypiClean
|
/qubo-nn-0.2.5.tar.gz/qubo-nn-0.2.5/qubo_nn/problems/quadratic_assignment.py
|
import numpy as np
from qubo_nn.problems.problem import Problem
class QuadraticAssignment(Problem):
def __init__(self, cfg, flow_matrix, distance_matrix, P=200):
self.flow_matrix = flow_matrix
self.distance_matrix = distance_matrix
self.P = P
if cfg["problems"]["QA"].get("debug", False):
self.P = 0.
def gen_qubo_matrix(self):
n = len(self.flow_matrix)
Q = np.zeros((n ** 2, n ** 2))
for i in range(n):
for j in range(n):
for k in range(n):
for m in range(n):
val = self.flow_matrix[i][j] * self.distance_matrix[k][m]
if val == 0:
Q[i * n + k][j * n + m] = self.P
else:
Q[i * n + k][j * n + m] = val
for i in range(n ** 2):
Q[i][i] = -self.P * 2
return Q
@classmethod
def gen_problems(self, cfg, n_problems, size=3, **kwargs):
high = cfg["problems"]["QA"].get("high", 50) # Outdated.
debug = cfg["problems"]["QA"].get("debug", False)
problems = []
for _ in range(n_problems):
if debug:
x = np.arange(1, 50)
else:
x = np.arange(1, 1000)
choice = np.random.choice(x, size=(2, size, size), replace=False)
flow = choice[0]
dist = choice[1]
if debug:
dist[1][0] = 1.
flow[1][0] = 1.
# flow = np.random.randint(low=1, high=high, size=(size, size))
# dist = np.random.randint(low=1, high=high, size=(size, size))
np.fill_diagonal(flow, 0)
np.fill_diagonal(dist, 0)
problems.append((
np.tril(flow) + np.tril(flow, -1).T,
np.tril(dist) + np.tril(dist, -1).T,
))
return [
{"flow_matrix": flow_matrix, "distance_matrix": distance_matrix}
for (flow_matrix, distance_matrix) in problems
]
|
PypiClean
|
/librec-auto-0.2.13.tar.gz/librec-auto-0.2.13/librec_auto/core/util/BBO.py
|
from librec_auto.core.util import Status
from librec_auto.core.util.errors import *
import optuna
# Borrowed from https://stackoverflow.com/questions/58820574/how-to-sample-parameters-without-duplicates-in-optuna
# Prevents repeated parameter values in the sampling. It seems like this should be the default
# behavior.
class RepeatPruner(optuna.pruners.BasePruner):
def prune(self, study, trial):
# type: (Study, FrozenTrial) -> bool
trials = study.get_trials(deepcopy=False)
completed_trials = [t.params for t in trials if t.state == optuna.trial.TrialState.COMPLETE]
n_trials = len(completed_trials)
if n_trials == 0:
return False
if trial.params in completed_trials:
return True
return False
#module to optimize
class BBO:
'''
Class for managing hyperopt optimization.
'''
def __init__(self, Ranges, num_of_vars, command, config, file_path = None):
self.Ranges = Ranges
self.num_of_vars = num_of_vars
self.alphabet = 'abcdefghijklmnopqrstuvwxyz'
self.command = command
self.index = 0
self.current_command = self.command[self.index]
self.config = config
self.file_path = file_path
self.create_params = True
self.metric_map = {'auc': 'positive', 'ap': 'positive','arhr': 'positive',
'diversity': 'positive', 'hitrate': 'positive','idcg': 'positive',
'ndcg': 'positive', 'precision': 'positive', 'recall': 'positive',
'rr': 'positive', 'featurediversity': 'positive', 'novelty': 'positive',
'entropy': 'positive','icov': 'positive', 'dppf': 'positive', 'dpcf': 'positive',
'giniindex': 'negative', 'mae': 'negative','mpe': 'negative','mse': 'negative','rmse': 'negative',
'csp': 'negative', 'psp': 'negative','miscalib': 'negative','nonpar': 'negative','valunfairness': 'negative',
'absunfairness': 'negative','overestimate': 'negative','underestimate': 'negative','ppr': 'negative'
}
self.title_map = {'auc': 'AUCEvaluator', 'ap': 'AveragePrecisionEvaluator','arhr': 'AverageReciprocalHitRankEvaluator','diversity': 'DiversityEvaluator',
'hitrate': 'HitRateEvaluator','idcg': 'IdealDCGEvaluator','ndcg': 'NormalizedDCGEvaluator',
'precision': 'PrecisionEvaluator', 'recall': 'RecallEvaluator', 'rr': 'ReciprocalRankEvaluator',
'featurediversity': 'DiversityByFeaturesEvaluator', 'novelty': 'NoveltyEvaluator', 'entropy': 'EntropyEvaluator',
'icov': 'ItemCoverageEvaluator', 'dppf': 'DiscountedProportionalPFairnessEvaluator', 'dpcf': 'DiscountedProportionalCFairnessEvaluator',
'giniindex': 'GiniIndexEvaluator', 'mae': 'MAEEvaluator','mpe': 'MPEEvaluator','mse': 'MSEEvaluator','rmse': 'RMSEEvaluator',
'csp': 'CStatisticalParityEvaluator', 'psp': 'PStatisticalParityEvaluator','miscalib': 'MiscalibrationEvaluator','nonpar': 'NonParityUnfairnessEvaluator','valunfairness': 'ValueUnfairnessEvaluator',
'absunfairness': 'AbsoluteUnfairnessEvaluator','overestimate': 'OverestimationUnfairnessEvaluator','underestimate': 'UnderestimationUnfairnessEvaluator','ppr': 'PPercentRuleEvaluator'
}
#creates hyperparameter dictionary in optuna format
def create_space(self, trial):
# self.space = {self.alphabet[i]: hp.hp.uniform(self.alphabet[i], self.Ranges[i][0], self.Ranges[i][1]) for i in range(self.num_of_vars)}
self.space = {self.alphabet[i]: trial.suggest_float(self.alphabet[i],self.Ranges[i][0], self.Ranges[i][1]) for i in range(self.num_of_vars)}
#uses direction from existing metrics or user chosen direction if custom
def set_optimization_direction(self, metric):
self.metric = metric
if metric == "higher":
self.direction = "positive"
elif metric == "lower":
self.direction = "negative"
else:
if metric not in self.metric_map:
raise InvalidConfiguration("Optimization",
"You must specify whether your metric should be optimized in the positive or negative direction")
self.direction = self.metric_map[metric]
#Uses status object in order to get data from most recent iteration
def get_data(self):
store_val = ''
i = 0
for sub_paths in self.config._files.get_exp_paths_iterator():
if i != self.exp_no:
i += 1
continue
status = Status(sub_paths)
store_val = status.get_metric_info(status._log, BBO = True)[self.title_map[self.metric]]
break
return float(store_val)
#All function required for experiments are called from here
def run_experiments(self, trial):
if self.create_params is not False:
self.create_space(trial)
# self.create_params = False
params = self.space
if self.exp_no != 0:
self.modify_xml(params)
self.current_command.execute(self.config)
data = self.get_data()
self.exp_no += 1
if self.exp_no != self.total_exp_no:
self.change_current_command()
return data
#sends values to write configs to create next experiment
def modify_xml(self, params):
self.config.write_exp_configs(val = list(params.values()), iteration = self.exp_no)
# changes which command needs to be run
def change_current_command(self):
self.index += 1
self.current_command = self.command[self.index]
#purge is run at the beginning, must be handled seperatly from remaining steps
def run_purge(self, command):
command.execute(self.config)
return command._files._study_path
def run(self,total_exp_no):
# self.store_params = self.space
self.exp_no = 0
self.total_exp_no = total_exp_no
self.config.get_sub_exp_count()
# best = fmin(fn=self.run_experiments, space = self.store_params, algo=tpe.suggest, max_evals=total_exp_no)
study = optuna.create_study(pruner=RepeatPruner())
if self.direction == 'positive':
study = optuna.create_study(direction = "maximize")
study.optimize(self.run_experiments, n_trials=total_exp_no)
print("Best Trial:")
trial = study.best_trial
print("Value:", trial.value)
print("Params: ")
for key, value in trial.params.items():
print("{}:{}".format(key,value))
|
PypiClean
|
/RsCMX_Signaling-4.0.120-py3-none-any.whl/RsCMX_Signaling/Implementations/Configure/Signaling/Nradio/Cell/Srs/Resource/Rmapping.py
|
from ........Internal.Core import Core
from ........Internal.CommandsGroup import CommandsGroup
from ........Internal.Types import DataType
from ........Internal.StructBase import StructBase
from ........Internal.ArgStruct import ArgStruct
from ........Internal.ArgSingleList import ArgSingleList
from ........Internal.ArgSingle import ArgSingle
from ........ import enums
# noinspection PyPep8Naming,PyAttributeOutsideInit,SpellCheckingInspection
class RmappingCls:
"""Rmapping commands group definition. 1 total commands, 0 Subgroups, 1 group commands"""
def __init__(self, core: Core, parent):
self._core = core
self._cmd_group = CommandsGroup("rmapping", core, parent)
def set(self, cell_name: str, resource_no: int, start_position: int, no_symbols: enums.NoSymbolsN = None, rep_factor: enums.NoSymbolsN = None) -> None:
"""SCPI: [CONFigure]:SIGNaling:NRADio:CELL:SRS:RESource:RMAPping \n
Snippet: driver.configure.signaling.nradio.cell.srs.resource.rmapping.set(cell_name = '1', resource_no = 1, start_position = 1, no_symbols = enums.NoSymbolsN.N1, rep_factor = enums.NoSymbolsN.N1) \n
No command help available \n
:param cell_name: No help available
:param resource_no: No help available
:param start_position: No help available
:param no_symbols: No help available
:param rep_factor: No help available
"""
param = ArgSingleList().compose_cmd_string(ArgSingle('cell_name', cell_name, DataType.String), ArgSingle('resource_no', resource_no, DataType.Integer), ArgSingle('start_position', start_position, DataType.Integer), ArgSingle('no_symbols', no_symbols, DataType.Enum, enums.NoSymbolsN, is_optional=True), ArgSingle('rep_factor', rep_factor, DataType.Enum, enums.NoSymbolsN, is_optional=True))
self._core.io.write(f'CONFigure:SIGNaling:NRADio:CELL:SRS:RESource:RMAPping {param}'.rstrip())
# noinspection PyTypeChecker
class GetStruct(StructBase):
"""Response structure. Fields: \n
- Start_Position: int: No parameter help available
- No_Symbols: enums.NoSymbolsN: No parameter help available
- Rep_Factor: enums.NoSymbolsN: No parameter help available"""
__meta_args_list = [
ArgStruct.scalar_int('Start_Position'),
ArgStruct.scalar_enum('No_Symbols', enums.NoSymbolsN),
ArgStruct.scalar_enum('Rep_Factor', enums.NoSymbolsN)]
def __init__(self):
StructBase.__init__(self, self)
self.Start_Position: int = None
self.No_Symbols: enums.NoSymbolsN = None
self.Rep_Factor: enums.NoSymbolsN = None
def get(self, cell_name: str, resource_no: int) -> GetStruct:
"""SCPI: [CONFigure]:SIGNaling:NRADio:CELL:SRS:RESource:RMAPping \n
Snippet: value: GetStruct = driver.configure.signaling.nradio.cell.srs.resource.rmapping.get(cell_name = '1', resource_no = 1) \n
No command help available \n
:param cell_name: No help available
:param resource_no: No help available
:return: structure: for return value, see the help for GetStruct structure arguments."""
param = ArgSingleList().compose_cmd_string(ArgSingle('cell_name', cell_name, DataType.String), ArgSingle('resource_no', resource_no, DataType.Integer))
return self._core.io.query_struct(f'CONFigure:SIGNaling:NRADio:CELL:SRS:RESource:RMAPping? {param}'.rstrip(), self.__class__.GetStruct())
|
PypiClean
|
/azure_mgmt_botservice-2.0.0b3-py3-none-any.whl/azure/mgmt/botservice/_azure_bot_service.py
|
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import AzureBotServiceConfiguration
from .operations import BotConnectionOperations, BotsOperations, ChannelsOperations, DirectLineOperations, HostSettingsOperations, OperationResultsOperations, Operations, PrivateEndpointConnectionsOperations, PrivateLinkResourcesOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class AzureBotService:
"""Azure Bot Service is a platform for creating smart conversational agents.
:ivar bots: BotsOperations operations
:vartype bots: azure.mgmt.botservice.operations.BotsOperations
:ivar channels: ChannelsOperations operations
:vartype channels: azure.mgmt.botservice.operations.ChannelsOperations
:ivar direct_line: DirectLineOperations operations
:vartype direct_line: azure.mgmt.botservice.operations.DirectLineOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.botservice.operations.Operations
:ivar bot_connection: BotConnectionOperations operations
:vartype bot_connection: azure.mgmt.botservice.operations.BotConnectionOperations
:ivar host_settings: HostSettingsOperations operations
:vartype host_settings: azure.mgmt.botservice.operations.HostSettingsOperations
:ivar operation_results: OperationResultsOperations operations
:vartype operation_results: azure.mgmt.botservice.operations.OperationResultsOperations
:ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
:vartype private_endpoint_connections:
azure.mgmt.botservice.operations.PrivateEndpointConnectionsOperations
:ivar private_link_resources: PrivateLinkResourcesOperations operations
:vartype private_link_resources:
azure.mgmt.botservice.operations.PrivateLinkResourcesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Azure Subscription ID.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = AzureBotServiceConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.bots = BotsOperations(self._client, self._config, self._serialize, self._deserialize)
self.channels = ChannelsOperations(self._client, self._config, self._serialize, self._deserialize)
self.direct_line = DirectLineOperations(self._client, self._config, self._serialize, self._deserialize)
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.bot_connection = BotConnectionOperations(self._client, self._config, self._serialize, self._deserialize)
self.host_settings = HostSettingsOperations(self._client, self._config, self._serialize, self._deserialize)
self.operation_results = OperationResultsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize)
self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs: Any
) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> AzureBotService
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
|
PypiClean
|
/ruletarusa-0.8.tar.gz/ruletarusa-0.8/README.md
|
# Ruleta rusa
## _Proyecto con fines académicos_
Si quieres testear este paquete, te recomiendo hacerlo con el siguiente código:
```
import ruletarusa as rr
def menu():
"""Funció que conté el menú del programa"""
print("---------------------- RULETA RUSA ------------------------")
print("-----------------------------------------------------------")
numero_jugadores = int(input("Indica el número de jugadores: "))
juego = rr.Juego(numero_jugadores)
while not juego.finJuego():
print()
menu()
juego.ronda()
print("Juego terminado.")
print("-----------------------------------------------------------")
```
También te dejo el enlace a mi repositorio con el proyecto finalizado.
https://github.com/cfsergio/RuletaRusa
|
PypiClean
|
/katalytic_images-0.9.4.tar.gz/katalytic_images-0.9.4/src/katalytic/images/__init__.py
|
import errno
from pathlib import Path
try:
import cv2
except ImportError: # pragma: no cover
msg = [
"Please install one of the following:",
"pip install opencv-python # the most popular",
"pip install opencv-python-headless # the most popular, without GUI functionality (for servers)",
"pip install opencv-contrib-python # extended functionality",
"pip install opencv-contrib-python-headless # extended functionality, without GUI functionality (for servers)",
]
raise ImportError("\n\t".join(msg))
from cv2 import imwrite as __cv2_imwrite, cvtColor as __cv2_cvtColor
import numpy as np
from numpy import array as __np_array, ndarray as __np_ndarray
import PIL.Image
__PIL_Image_open = PIL.Image.open
__PIL_Image_Image = PIL.Image.Image
from katalytic.data.checks import is_iterable, is_number, is_sequence
# noinspection PyProtectedMember
from katalytic._pkg import get_version, mark, KatalyticInterrupt, _UNDEFINED
__version__, __version_info__ = get_version(__name__)
def bhwc(arr):
"""
Returns a tuple representing the shape of the input array in the BHWC format: batch, height, width, and channels. The missing dimensions are filled with 1s.
Parameters:
arr (numpy.ndarray): The input array.
Returns:
tuple: A tuple representing the shape of the input array in the BHWC format.
Raises:
ValueError: If the input array has 5 or more dimensions.
"""
if arr.shape == (0,):
return (0, 0, 0, 0)
elif arr.ndim == 1:
return (1, *arr.shape, 1, 1)
elif arr.ndim == 2:
return (1, *arr.shape, 1)
elif arr.ndim == 3:
return (1, *arr.shape)
elif arr.ndim == 4:
return arr.shape
else:
raise ValueError(f"arr.ndim = {arr.ndim}")
def convert_image(image, before, after):
"""
Converts an image from one color space to another.
Parameters:
image: The input image to be converted. It can be either a NumPy array or a PIL Image object.
before (str): The color space of the input image. It should be a string representing a valid color space, e.g., 'RGB', 'BGR', 'GRAY'.
after (str): The desired color space of the output image. It should be a string representing a valid color space.
Returns:
The converted image as either a NumPy array or a PIL Image object, depending on the type of the input image.
Raises:
TypeError: If the 'before' or after' parameters are not a string.
ValueError: If the conversion code is not found for the specified color space conversion.
"""
if not isinstance(before, str):
raise TypeError(f"type(before) = {type(before)!r}")
elif not isinstance(after, str):
raise TypeError(f"type(after) = {type(after)!r}")
return_PIL = isinstance(image, PIL.Image.Image)
if return_PIL:
image = np.array(image)
else:
image = load_image(image)
conversion_code = f"COLOR_{before}2{after}"
conversion_code = conversion_code.replace("gray", "GRAY")
conversion_code = getattr(cv2, conversion_code, None)
if conversion_code:
img = __cv2_cvtColor(image, conversion_code)
elif before.startswith("binary") or after.startswith("binary"):
raise NotImplementedError
else:
raise ValueError
if return_PIL:
return PIL.Image.fromarray(img)
else:
return img
def create_line(p1, p2, color, *, thickness=3, **kwargs):
"""
Create a dict representing a line shape to be used by draw()() or draw_inplace().
Parameters:
p1 (tuple or list): The (x, y) coordinates of the first point.
p2 (tuple or list): The (x, y) coordinates of the second point.
color: The color of the line. It can be specified in various formats supported by the underlying drawing library.
thickness (int): The thickness of the line. Defaults to 3.
**kwargs: Additional keyword arguments that can be used to customize the shape.
Returns:
dict: A dictionary representing the line to be drawn
"""
return {
"type": "line",
"p1": tuple(map(int, p1)),
"p2": tuple(map(int, p2)),
"color": color,
"thickness": thickness,
**kwargs,
}
def create_circle(center, radius, color, *, thickness=3, **kwargs):
"""
Create a dict representing a circle shape to be used by draw()() or draw_inplace().
Parameters:
center (tuple or list): The (x, y) coordinates of the center.
radius (int): The radius of the circle.
color: The color of the circle. It can be specified in various formats supported by the underlying drawing library.
thickness (int): The thickness of the circle. Use -1 to fill it up. Defaults to 3.
**kwargs: Additional keyword arguments that can be used to customize the circle object.
Returns:
dict: A dictionary representing the circle to be drawn
"""
return {
"type": "circle",
"center": tuple(map(int, center)),
"radius": int(radius),
"color": color,
"thickness": thickness,
**kwargs,
}
def create_rectangle(p1, p2, color, *, thickness=3, **kwargs):
"""
Create a dict representing a rectangle shape to be used by draw()() or draw_inplace().
Parameters:
p1 (tuple or list): The (x, y) coordinates of the top left corner.
p2 (tuple or list): The (x, y) coordinates of the bottom right corner.
color: The color of the rectangle. It can be specified in various formats supported by the underlying drawing library.
thickness (int): The thickness of the line. Use -1 to fill it up. Defaults to 3.
**kwargs: Additional keyword arguments that can be used to customize the shape.
Returns:
dict: A dictionary representing the rectangle to be drawn
"""
return {
"type": "rectangle",
"p1": tuple(map(int, p1)),
"p2": tuple(map(int, p2)),
"color": color,
"thickness": thickness,
**kwargs,
}
def create_text(
text,
origin,
color,
*,
font=cv2.FONT_HERSHEY_SIMPLEX,
scale=1.25,
thickness=3,
bg=None,
bg_pad=None,
**kwargs,
):
"""Create a text shape.
The text shape represents a text string with its origin point, color, font, scale, and thickness. It can also include a background rectangle behind the text.
Parameters:
text (str): The text string to be displayed.
origin (tuple or list): The (x, y) coordinates of the origin point.
color: The color of the text. It can be specified in various formats supported by the underlying drawing library.
font: The font to be used for the text. Defaults to cv2.FONT_HERSHEY_SIMPLEX.
scale (float): The scale factor for the font size. Defaults to 1.25.
thickness (int): The thickness of the text. Defaults to 3.
bg: The color of the background rectangle behind the text. If None, no background rectangle will be created. It can be specified in various formats supported by the underlying drawing library.
bg_pad: The padding for the background rectangle. It can be specified as a single number, a sequence of two numbers for horizontal and vertical padding, or a sequence of four numbers for left, top, right, and bottom padding. Defaults to None.
**kwargs: Additional keyword arguments that can be used to customize the text shape.
Returns:
dict: A dictionary representing the text shape to be drawn
Raises:
ValueError: If the 'bg' parameter is None, but the 'bg_pad' parameter is set to a value.
ValueError: If the 'bg_pad' parameter has an invalid format.
TypeError: If the 'bg_pad' parameter has an unsupported type.
"""
shape = {
"type": "text",
"text": text,
"origin": tuple(map(int, origin)),
"color": color,
"font": font,
"font_scale": scale,
"thickness": thickness,
**kwargs,
}
if bg is None:
if bg_pad is None:
return shape
else:
# bg_pad is set to None by default instead of 5 ot alert the user
# when he sets <bg_pad> and forgets <bg>.
# Otherwise the mistake would be ignored silently
raise ValueError("<bg> is None, even though <bg_pad> is set to a value")
if bg_pad is None:
bg_pad = [5] * 4
elif is_number(bg_pad):
bg_pad = [bg_pad] * 4
elif is_sequence(bg_pad):
if len(bg_pad) == 2:
bg_pad = [*bg_pad, *bg_pad]
elif len(bg_pad) != 4:
raise ValueError(
f"<bg_pad> expects None, a number or a sequence like (horizontal, vertical) or (left, top, right, bottom). Got a sequence of length {len(bg_pad)}"
)
else:
raise TypeError(f"type(bg_pad) = {type(bg_pad)!r}")
shape["background"] = {"color": bg, "pad": bg_pad}
return shape
def create_polylines(pts, color, *, thickness=3, is_closed=True, **kwargs):
"""
Create a dict representing a polylines shape to be used by draw() or draw_inplace().
Parameters:
pts (list or ndarray): The points of the polylines as a list of tuples or an ndarray of shape (N, 2), representing the (x, y) coordinates.
color: The color of the polylines. It can be specified in various formats supported by the underlying drawing library.
thickness (int): The thickness of the polylines. Use -1 to fill it up. Defaults to 3.
is_closed (bool): A flag indicating whether the polylines should be closed or not. Defaults to True.
**kwargs: Additional keyword arguments that can be used to customize the polylines object.
Returns:
dict: A dictionary representing the polylines to be drawn
"""
return {
"type": "polylines",
"pts": pts,
"color": color,
"thickness": thickness,
"is_closed": is_closed,
**kwargs,
}
def create_mask(pts, color, **kwargs):
"""
Create a dict representing a mask shape to be used by draw() or draw_inplace().
Parameters:
pts (list or ndarray): The points of the polylines as a list of tuples or an ndarray of shape (N, 2), representing the (x, y) coordinates.
color: The color of the polylines. It can be specified in various formats supported by the underlying drawing library.
**kwargs: Additional keyword arguments that can be used to customize the polylines object.
Returns:
dict: A dictionary representing the mask to be drawn
"""
return {"type": "mask", "pts": pts, "color": color, **kwargs}
def draw(image, data):
"""Draws shapes on a copy of the input image
This function takes an image and an collection specifying the shapes to be drawn on the image.
The shapes can include lines, circles, rectangles, text, masks, and polylines.
The calls the corresponding drawing functions for each shape in the order it appears in the collection.
Parameters:
image: a numpy array representing the image on which the shapes will be drawn.
data:
The shapes to be drawn. It can be either a dictionary representing a single shape
or a list of dictionaries representing multiple shapes.
Raises:
TypeError: If the 'data' parameter is not an iterable of shapes or a dict
KeyError: If the 'type' key is missing in any shape dictionary.
ValueError: If the 'type' key in any shape dictionary has an invalid value.
ValueError: If the 'pts' key in a mask or polylines shape dictionary has an invalid format.
"""
if isinstance(image, __PIL_Image_Image):
new_image = np.array(image)
return_PIL = True
else:
new_image = image.copy()
return_PIL = False
draw_inplace(new_image, data)
if return_PIL:
return PIL.Image.fromarray(new_image)
else:
return new_image
def draw_inplace(image, data):
"""Draws shapes on the input image by modifying it in-place
This function takes an image and an collection specifying the shapes to be drawn on the image.
The shapes can include lines, circles, rectangles, text, masks, and polylines.
The calls the corresponding drawing functions for each shape in the order it appears in the collection.
Parameters:
image: a numpy array representing the image on which the shapes will be drawn.
data: The shapes to be drawn. It can be either a dictionary representing a single shape or a list of dictionaries representing multiple shapes.
Raises:
TypeError: If the 'data' parameter is not an iterable of shapes or a dict
KeyError: If the 'type' key is missing in any shape dictionary.
ValueError: If the 'type' key in any shape dictionary has an invalid value.
ValueError: If the 'pts' key in a mask or polylines shape dictionary has an invalid format.
"""
if isinstance(data, dict):
data = [data]
elif not is_iterable(data):
raise TypeError(f"type(data) = {type(data)!r}")
for shape in data:
draw_shape = _pick_draw_function(shape["type"])
shape = _rename_kwargs(shape)
if shape["type"] == "text":
# extract background info only after the call to _rename_kwargs()
# Otherwise you might miscalculate the bg size and position
bg = _create_rectangle_for_text_background(
shape.pop("background", None), shape
)
if bg:
draw_inplace(image, bg)
elif shape["type"] in ("mask", "polylines"):
pts = shape["pts"]
if is_iterable(pts):
if not is_iterable(pts[0][0]):
pts = [pts]
if not isinstance(pts, np.ndarray):
pts = np.array(pts, dtype=np.int32)
elif pts.dtype != np.int32:
pts = pts.astype(np.int32)
shape["pts"] = pts
del shape["type"]
draw_shape(image, **shape)
def _create_rectangle_for_text_background(bg, text):
"""Create a rectangle to act as background for the text.
Parameters:
bg: The background information for the text shape. It can be specified as a color value, a dictionary with color and padding information, or None if no background is required.
text: The properties of the text shape. It should be a dictionary containing the text, font face, font scale, thickness, and origin information.
Returns:
dict or None: A dictionary representing the background rectangle for the text shape. If no background is required, None is returned.
Raises:
TypeError: If the 'bg' parameter has an invalid type.
ValueError: If the 'pad' value has an invalid format.
"""
if not bg:
return None
if is_sequence(bg) or is_number(bg):
color = bg
pad = 5 * text["fontScale"]
elif isinstance(bg, dict):
color = bg.pop("color")
pad = bg.pop("pad", 5 * text["fontScale"])
else:
raise TypeError(f"type(background) = {type(bg).__name__!r}")
error_msg = f"Expected <pad> to be an int or a sequence like (horizontal, vertical) or (left, top, right, bottom). Got {pad!r}"
if is_number(pad):
pad = (pad, pad, pad, pad)
elif is_sequence(pad):
if len(pad) == 2:
pad = (*pad, *pad)
elif len(pad) == 4:
pass
else:
raise ValueError(error_msg)
else:
raise ValueError(error_msg)
kwargs = {k: text[k] for k in ["text", "fontFace", "fontScale", "thickness"]}
(w, h), baseline = cv2.getTextSize(**kwargs)
baseline += text["thickness"] * text["fontScale"]
if text.get("bottomLeftOrigin", False):
raise NotImplementedError("Calculate baseline for bottomLeftOrigin=True")
p1 = (text["org"][0] - pad[0], text["org"][1] - h - pad[1])
p2 = (text["org"][0] + w + pad[2], text["org"][1] + baseline + pad[3])
return create_rectangle(p1, p2, color, thickness=-1)
def _rename_kwargs(shape):
"""Rename the keys to match the corresponding OpenCV function parameters."""
conversion = {
"font": "fontFace",
"font_scale": "fontScale",
"line_type": "lineType",
"draw_above_origin": "bottomLeftOrigin",
"p1": "pt1",
"p2": "pt2",
"origin": "org",
"is_closed": "isClosed",
}
return {conversion.get(k, k): v for k, v in shape.items()}
def _pick_draw_function(shape_type):
"""Pick the corresponding OpenCV drawing function based on the shape type."""
fn = {
"arrowed_line": cv2.arrowedLine,
"circle": cv2.circle,
"contours": cv2.drawContours,
"convex_polygon": cv2.fillConvexPoly,
"ellipse": cv2.ellipse,
"ellipse_polygon": cv2.ellipse2Poly,
"line": cv2.line,
"marker": cv2.drawMarker,
"mask": cv2.fillPoly,
"polylines": cv2.polylines,
"rectangle": cv2.rectangle,
"text": cv2.putText,
}
if shape_type not in fn:
raise ValueError(f"Unknown shape: {shape_type!r}")
else:
return fn[shape_type]
@mark("load::png")
@mark("load::jpg")
@mark("load::jpeg")
def load_image(image, mode=None, *, default=_UNDEFINED):
"""
Load an image and return it as a NumPy array.
Parameters:
image: The image to be loaded. It can be specified as a file path (string or Path object), a PIL Image object, or a NumPy array.
mode (str, optional): The mode to be used when loading the image. It should be a string representing the desired mode, e.g., 'RGB', 'L', 'RGBA'. If None, the default mode of the image will be used.
default (Any):
The default value to return if the specified file path does not exist.
Returns:
numpy.ndarray: The loaded image as a NumPy array.
Raises:
TypeError: If the 'mode' parameter is not None or a string.
TypeError: If the 'image' parameter has an unsupported type.
"""
if not (mode is None or isinstance(mode, str)):
raise TypeError(f"mode expected None or str. Got {type(mode)!r}")
if isinstance(image, (str, Path)):
# noinspection PyProtectedMember
from katalytic.files import (
_warn_if_another_function_should_be_used,
_load_funcs,
)
_warn_if_another_function_should_be_used(str(image), _load_funcs)
if not Path(image).exists() and default is not _UNDEFINED:
return default
else:
return __np_array(__PIL_Image_open(image))
elif isinstance(image, __PIL_Image_Image):
return image.copy()
elif isinstance(image, __np_ndarray):
return image.copy()
else:
raise TypeError(f"type(image) = {type(image)!r}")
def hwc(arr):
"""
Returns a tuple representing the shape of the input array in the HWC format: height, width, and channels. The missing dimensions are filled with 1s.
Parameters:
arr (numpy.ndarray): The input array.
Returns:
tuple: A tuple representing the shape of the input array in the HWC format.
Raises:
ValueError: If the input array has 5 or more dimensions.
"""
return bhwc(arr)[1:]
def hw(arr):
"""
Returns a tuple representing the shape of the input array in the HW format: height and width. The missing dimensions are filled with 1s.
Parameters:
arr (numpy.ndarray): The input array.
Returns:
tuple: A tuple representing the shape of the input array in the HW format.
Raises:
ValueError: If the input array has 5 or more dimensions.
"""
return bhwc(arr)[1:3]
def are_arrays_equal(image_1, image_2, check_type=False):
"""
Check if two images represented as NumPy arrays are equal.
The equality comparison is performed based on the shape and optionally the data type of the arrays.
Parameters:
image_1: The first image to compare. It can be specified as a file path (string or Path object), a PIL Image object, or a NumPy array.
image_2: The second image to compare. It can be specified as a file path (string or Path object), a PIL Image object, or a NumPy array.
check_type (bool, optional): A flag indicating whether to perform an additional check on the data type of the arrays. If True, the data type of the arrays must also match for the arrays to be considered equal. Defaults to False.
Returns:
bool: True if the images are equal, False otherwise.
"""
image_1 = load_image(image_1)
image_2 = load_image(image_2)
if image_1.shape != image_2.shape:
return False
elif check_type and image_1.dtype != image_2.dtype:
return False
else:
# noinspection PyUnresolvedReferences
return (image_1 == image_2).all()
@mark("save::png")
@mark("save::jpg")
@mark("save::jpeg")
def save_image(image, path, *, exists="replace", make_dirs=True, mode="RGB"):
"""
Save an image to the specified file path.
The image can be provided as a PIL Image object, a NumPy array, or a file path.
The function supports specifying the behavior when the target file already exists.
Parameters:
image: The image to be saved. It can be specified as a PIL Image object, a NumPy array, or a file path (string or Path object).
path: The file path to save the image to. It should be a string or Path object.
exists (str, optional):
Specifies the behavior if the destination file already exists. Defaults to 'replace'.
- 'error': Raise an error.
- 'replace': Replace the existing file.
- 'skip': Skip copying the file.
make_dirs (bool or str, optional):
Specifies whether to create the destination directory if it doesn't exist. Defaults to True.
- True: Create the directory if it doesn't exist.
- False: Raise an error if the destination directory doesn't exist.
mode (str, optional): The mode to be used when saving the image. It should be a string representing the desired mode, e.g., 'RGB', 'BGR'. Defaults to 'RGB'.
Raises:
TypeError: If the 'mode' parameter is not a string.
ValueError: If the 'exists' parameter is not one of 'error', 'skip', 'replace'.
FileExistsError: If the target file already exists and the 'exists' parameter is set to 'error'.
"""
if not isinstance(mode, str):
raise TypeError(f"type(mode) = {type(mode)!r}")
elif exists not in ("error", "skip", "replace"):
raise ValueError(
f"exists must be one of 'error', 'skip', 'replace'. Got {exists!r}"
)
# noinspection PyProtectedMember
from katalytic.files import _warn_if_another_function_should_be_used, _save_funcs
_warn_if_another_function_should_be_used(str(path), _save_funcs)
if Path(path).exists():
if exists == "error":
raise FileExistsError(f"[Errno {errno.EEXIST}] File exists: {str(path)!r}")
elif exists == "replace":
pass # continue executing
elif exists == "skip":
return
try:
dest_dir = Path(path).parent
if make_dirs:
from katalytic.files import make_dir
make_dir(dest_dir, create_parents=True, exists_ok=True)
elif not dest_dir.exists():
raise FileNotFoundError(
f"[Errno {errno.ENOENT}] Directory does not exist: {str(dest_dir)!r}"
)
elif dest_dir.is_file():
raise NotADirectoryError(
f"[Errno {errno.ENOTDIR}] Not a directory: {str(dest_dir)!r}"
)
if isinstance(image, __PIL_Image_Image):
image = __np_array(image)
if isinstance(image, __np_ndarray):
if mode != "BGR":
image = convert_image(image, mode, "BGR")
ext = str(path).rpartition(".")[2]
tmp_path = f"{path}.part.{ext}"
__cv2_imwrite(tmp_path, image)
if save_image.__katalytic_test_atomicity_race_condition__:
save_image.__katalytic_test_atomicity_race_condition__ = False
# I can't use save_image('race condition', path) directly
# It would replace the tmp_path = f'{path}.part' created above
# and then move it to the target `path`. This function wouldn't
# be able to find the tmp_path anymore and will throw an error
# at the end of the function: `Path(tmp_path).rename(path)`
tmp_path_2 = f"{path}.part2.{ext}"
save_image(np.array([[[0, 255, 0]]], dtype=np.uint8), tmp_path_2)
Path(tmp_path_2).rename(path)
# Checking these conditions again to make the function
# as robust as possible against race conditions
if Path(path).exists():
if exists == "error":
raise FileExistsError(
f"[Errno {errno.EEXIST}] File exists: {str(path)!r}"
)
elif exists == "replace":
pass # continue executing
elif exists == "skip":
return
if save_image.__katalytic_test_atomicity_interrupt__:
save_image.__katalytic_test_atomicity_interrupt__ = False
raise KatalyticInterrupt(f"Testing atomicity ...")
Path(tmp_path).rename(path)
elif isinstance(image, (str, Path)):
from katalytic.files import copy_file
copy_file(image, path, exists=exists)
else:
raise TypeError(f"type(image) = {type(image)!r}")
except BaseException as e:
if not isinstance(e, KatalyticInterrupt):
raise
save_image.__katalytic_test_atomicity_interrupt__ = False
save_image.__katalytic_test_atomicity_race_condition__ = False
|
PypiClean
|
/CHJ-1.0.2.tar.gz/CHJ-1.0.2/_CSS/Length.py
|
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from _CHJ import CHJ
from _TFL import TFL
import _CHJ._CSS
import _CHJ._CSS._TRBL_
import _TFL._Meta.Object
from _TFL._Meta.Once_Property import Once_Property
from _TFL.pyk import pyk
from _TFL.Regexp import Regexp, re
class M_Length (TFL.Meta.Object.__class__) :
"""Meta class for `_Length_`."""
_Unit_Map = {}
def __init__ (cls, name, bases, dct) :
cls.__m_super.__init__ (name, bases, dct)
if name != "_Length_" and cls.unit_name is None :
cls.unit_name = name.lower ()
if cls.unit_name :
assert cls.unit_name not in cls._Unit_Map
cls._Unit_Map [cls.unit_name] = cls
# end def __init__
@Once_Property
def Pat (cls) :
return Regexp \
( r"^"
+ r"(?P<number> [-+]? \d+ (?: \.\d*)?)"
+ r"(?P<unit>"
+ "|".join (re.escape (u) for u in sorted (cls._Unit_Map))
+ r")"
+ r"$"
, re.VERBOSE | re.IGNORECASE
)
# end def Pat
# end class M_Length
@pyk.adapt__bool__
@pyk.adapt__div__
class _Length_ (TFL.Meta.BaM (TFL.Meta.Object, metaclass = M_Length)) :
"""Model a CSS length value.
>>> print (Px (3))
3px
>>> print (Px (3) * 2)
6px
>>> print (Px (3) + Px (2))
5px
>>> print (Px (3) + In (2))
Traceback (most recent call last):
...
TypeError: Cannot add 'Px' and 'In' objects
>>> print (Px (3) % 2)
1px
>>> print (Percent (100), Percent (100) / 2, Percent (100) / 2.5)
100% 50.0% 40.0%
>>> print (Rem (2))
2rem
>>> print (Rem (2.5))
2.5rem
>>> print (Rem (-2))
-2rem
>>> print (abs (Rem (-2)))
2rem
"""
unit_name = None
def __init__ (self, value = 0) :
self.value = value
# end def __init__
def __abs__ (self) :
return self.__class__ (abs (self.value))
# end def __abs__
def __add__ (self, rhs) :
if not isinstance (rhs, self.__class__) :
raise TypeError \
( "Cannot add %r and %r objects"
% (self.__class__.__name__, rhs.__class__.__name__)
)
return self.__class__ (self.value + rhs.value)
# end def __add__
def __truediv__ (self, rhs) :
if not isinstance (rhs, (int, float)) :
raise TypeError \
( "Cannot divide %r and %r objects"
% (self.__class__.__name__, rhs.__class__.__name__)
)
return self.__class__ (self.value / rhs)
# end def __truediv__
def __eq__ (self, rhs) :
ru = getattr (rhs, "unit_name", None)
if self.unit_name == ru :
return self.value == rhs.value
else :
if not rhs :
return not self
return False
# end def __eq__
def __float__ (self) :
return float (self.value)
# end def __float__
def __floordiv__ (self, rhs) :
if not isinstance (rhs, (int, float)) :
raise TypeError \
( "Cannot divide %r and %r objects"
% (self.__class__.__name__, rhs.__class__.__name__)
)
return self.__class__ (self.value // rhs)
# end def __floordiv__
def __int__ (self) :
return int (self.value)
# end def __int__
def __hash__ (self) :
return hash (self.unit_name, self.value)
# end def __hash__
def __mod__ (self, rhs) :
if not isinstance (rhs, (int, float)) :
raise TypeError \
( "Cannot take remainer of %r and %r objects"
% (self.__class__.__name__, rhs.__class__.__name__)
)
return self.__class__ (self.value % rhs)
# end def __mod__
def __mul__ (self, rhs) :
if not isinstance (rhs, (int, float)) :
raise TypeError \
( "Cannot multiply %r and %r objects"
% (self.__class__.__name__, rhs.__class__.__name__)
)
return self.__class__ (self.value * rhs)
# end def __mul__
__rmul__ = __mul__
def __neg__ (self) :
return self.__class__ (- self.value)
# end def __neg__
def __bool__ (self) :
return bool (self.value)
# end def __bool__
def __pos__ (self) :
return self
# end def __pos__
def __sub__ (self, rhs) :
if not isinstance (rhs, self.__class__) :
raise TypeError \
( "Cannot subtract %r and %r objects"
% (self.__class__.__name__, rhs.__class__.__name__)
)
return self.__class__ (self.value - rhs.value)
# end def __sub__
def __str__ (self) :
if self.value :
return "%s%s" % (self.value, self.unit_name)
else :
return "0"
# end def __str__
# end class _Length_
_length_keywords = set (("auto", "inherit"))
def Length (v) :
"""Convert strings and length objects to the appropriate `_Length_` instances.
>>> print (Length ("1em"))
1em
>>> print (Length ("1ch"))
1ch
>>> print (Length ("1vw"))
1vw
>>> print (Length (Px (5)))
5px
>>> print (Length (0))
0
>>> print (Length (0.0))
0
>>> print (Length ("0"))
0
>>> print (Length (1))
Traceback (most recent call last):
...
ValueError: 1
>>> print (Length ("1"))
Traceback (most recent call last):
...
ValueError: 1
>>> print (Length ("1vx"))
Traceback (most recent call last):
...
ValueError: 1vx
"""
if v in (0, "0") :
result = Px (0)
elif isinstance (v, pyk.string_types) :
pat = _Length_.Pat
v = v.strip ()
if v in _length_keywords :
result = v
elif pat.match (v) :
T = _Length_._Unit_Map [pat.unit.lower ()]
n = pat.number
result = T (float (n) if ("." in n) else int (n, 10))
else :
raise ValueError (v)
elif isinstance (v, _Length_) :
result = v
else :
raise ValueError (v)
return result
# end def Length
class Ch (_Length_) :
"""Relative CSS length unit: width of the "0" glyph in the element's font."""
# end class Ch
class Cm (_Length_) :
"""Absolute CSS length unit: centimeters."""
# end class Cm
class Em (_Length_) :
"""Relative CSS length unit: font size of the element."""
# end class Em
class Ex (_Length_) :
"""Relative CSS length unit: x-height of the element's font."""
# end class Ex
class In (_Length_) :
"""Absolute CSS length unit: inches."""
# end class In
class Mm (_Length_) :
"""Absolute CSS length unit: millimeters."""
# end class Mm
class Pc (_Length_) :
"""Absolute CSS length unit: picas (1 pc == 12 points)."""
# end class Pc
class Percent (_Length_) :
"""Relative CSS unit: percentages."""
unit_name = "%"
def __add__ (self, rhs) :
if isinstance (rhs, (int, float)) :
return self.__class__ (self.value + rhs)
return self.__super.__add__ (rhs)
# end def __add__
def __sub__ (self, rhs) :
if isinstance (rhs, (int, float)) :
return self.__class__ (self.value - rhs)
return self.__super.__sub__ (rhs)
# end def __sub__
# end class Percent
class Pt (_Length_) :
"""Absolute CSS length unit: points (1pt == 1/72 inch)."""
# end class Pt
class Px (_Length_) :
"""Relative CSS length unit: pixels (1px == 1/96 inch)."""
# end class Px
class Rem (_Length_) :
"""Relative CSS3 length unit: font size of the element relative to the root font size."""
# end class Rem
class Vh (_Length_) :
"""Relative CSS3 length unit: 1/100th of the viewport's height."""
# end class Vh
class Vm (_Length_) :
"""Relative CSS3 length unit: minimum of `Vh` or `Vw`."""
# end class Vm
class Vw (_Length_) :
"""Relative CSS3 length unit: 1/100th of the viewport's width."""
# end class Vw
@pyk.adapt__div__
class TRBL0 (CHJ.CSS._TRBL0_) :
"""Top/right/bottom/left spec, undefined values are 0.
>>> print (TRBL0 (0))
0
>>> print (TRBL0 (Px (1), Px (2), Px (1), Px (2)))
1px 2px
>>> print (TRBL0 (Px (1), Px (2), Px (3), Px (2)))
1px 2px 3px
>>> print (TRBL0 (Px (1), Px (2), Px (3), Px (4)))
1px 2px 3px 4px
>>> print (TRBL0 (Px (1), Em (1)))
1px 1em 0 0
>>> print (TRBL0 (Px (1), 0, Em (1)))
1px 0 1em
>>> print (TRBL0 (Px (1), 0, Px (1)))
1px 0
>>> print (TRBL0 (t = Px (1)))
1px 0 0
>>> print (TRBL0 (r = Px (1)))
0 1px 0 0
>>> print (TRBL0 (b = Px (1)))
0 0 1px
>>> print (TRBL0 (l = Px (1)))
0 0 0 1px
>>> print (TRBL0 (default = Px(2)))
2px
>>> print (TRBL0 (t = Px (1), default = Px(2)))
1px 2px 2px
>>> print (TRBL0 (r = Px (1), default = Px(2)))
2px 1px 2px 2px
>>> print (TRBL0 (b = Px (1), default = Px(2)))
2px 2px 1px
>>> print (TRBL0 (l = Px (1), default = Px(2)))
2px 2px 2px 1px
"""
default = 0
Type = staticmethod (Length)
def __abs__ (self) :
return self.__class__ (* tuple (abs (v) for v in self.values))
# end def __abs__
def __add__ (self, rhs) :
if isinstance (rhs, _Length_) :
rhs = (rhs, ) * 4
elif isinstance (rhs, (tuple, list)) :
rhs = self.__class__ (* rhs)
return self.__class__ \
(* tuple (v + r for v, r in zip (self.values, rhs)))
# end def __add__
def __truediv__ (self, rhs) :
return self.__class__ (* tuple (v / rhs for v in self.values))
# end def __truediv__
def __floordiv__ (self, rhs) :
return self.__class__ (* tuple (v // rhs for v in self.values))
# end def __floordiv__
def __mod__ (self, rhs) :
return self.__class__ (* tuple (v % rhs for v in self.values))
# end def __mod__
def __mul__ (self, rhs) :
return self.__class__ (* tuple (v * rhs for v in self.values))
# end def __mul__
__rmul__ = __mul__
def __neg__ (self) :
return self.__class__ (* tuple (-v for v in self.values))
# end def __neg__
def __pos__ (self) :
return self
# end def __pos__
def __sub__ (self, rhs) :
if isinstance (rhs, _Length_) :
rhs = (rhs, ) * 4
elif isinstance (rhs, (tuple, list)) :
rhs = self.__class__ (* rhs)
return self.__class__ \
(* tuple (v - r for v, r in zip (self.values, rhs)))
# end def __sub__
# end class TRBL0
class TRBL (CHJ.CSS._TRBL_, TRBL0) :
"""Top/right/bottom/left spec, repeated values.
>>> print (TRBL ())
0
>>> print (TRBL (Em (1)))
1em
>>> print (TRBL (Em (1), Px (2)))
1em 2px
>>> print (TRBL (Em (1), Px (2), Ex (3)))
1em 2px 3ex
>>> print (TRBL (Em (1), Px (2), Ex (3)).l)
2px
>>> print (TRBL (Em (1), Px (2), Ex (3), Cm (4)))
1em 2px 3ex 4cm
>>> print (TRBL (Em (1), Px (0), Ex (3), Cm (4)))
1em 0 3ex 4cm
>>> print (-TRBL (Em (1), Px (2)))
-1em -2px
>>> print (TRBL ("1em", "8vw"))
1em 8vw
>>> print (TRBL ("1em", "8vw") + (Em (3), "7vw"))
4em 15vw
>>> print (TRBL ("1em", "8em") + Em (4))
5em 12em
>>> print (TRBL ("1em", "8em") * 2)
2em 16em
>>> print ((TRBL ("1em", "8em") * 2) - Em (1))
1em 15em
>>> print (4 * TRBL ("1em", "8em"))
4em 32em
>>> print (TRBL ("1em", "auto"))
1em auto
"""
# end class TRBL
@pyk.adapt__bool__
class HV (TFL.Meta.Object) :
"""Horizontal/vertical pair of `Length` of `TRBL`.
>>> print (HV (Px (100), Px (50)))
100px / 50px
>>> print (HV (TRBL (Px (5), Px (10)), TRBL (Px (10), Px (5), Px (10))))
5px 10px / 10px 5px
"""
def __init__ (self, h, v) :
self.h = h
self.v = v
# end def __init__
def __bool__ (self) :
return self.h or self.v
# end def __bool__
def __str__ (self) :
return "%s / %s" % (self.h, self.v)
# end def __str__
# end class HV
__all__ = tuple \
( k for (k, v) in pyk.iteritems (globals ())
if getattr (v, "unit_name", None)
) + ("Length", "TRBL0", "TRBL", "HV")
if __name__ != "__main__" :
CHJ.CSS._Export (* __all__)
### __END__ CHJ.CSS.Length
|
PypiClean
|
/pulsarbat-0.0.9.tar.gz/pulsarbat-0.0.9/docs/development.rst
|
:html_theme.sidebar_secondary.remove:
:nosearch:
===========
Development
===========
Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. This page assumes the reader has some familiarity with contributing to open-source Python projects using GitHub.
Pulsarbat could always use more documentation, especially in the form of worked examples.
For bug reports and feature requests, create an issue on GitHub here: https://github.com/theXYZT/pulsarbat/issues
Developer Workflow
------------------
To contribute changes (fixing bugs, adding features), we follow a typical `GitHub workflow <https://docs.github.com/en/get-started/quickstart/github-flow>`_:
* Create a personal fork of the repository.
* Create a branch (preferrably, with an informative name).
* Make changes, test your contributions and document them!
* Open a pull request.
* Iterate until changes pass various linters and checks.
* Work through code review until your PR is accepted and merged.
Deploying
---------
A reminder for the maintainers on how to deploy a release:
* Make sure all changes are committed.
* Update changelog in ``HISTORY.rst``.
* Update package version in ``pulsarbat/__init__.py`` either manually or
using ``bump2version``.
* Create a tagged commit with tag: ``vX.Y.Z`` and push tags to origin.
A tagged commit should automatically publish the package to PyPI via
Github Actions.
* Create a release on Github on the tagged commit (this will trigger Zenodo).
Useful Things
-------------
This section is an informal reference for contributors and maintainers on how
to do some development-related things.
Testing and Coverage
^^^^^^^^^^^^^^^^^^^^
From the root directory of the repository, run:
.. code-block:: console
$ pytest --cov-report=html --cov=pulsarbat
Then open ``htmlcov/index.html`` in your browser to look at test coverage.
Building Docs
^^^^^^^^^^^^^
From ``docs/``, run:
.. code-block:: console
$ make clean
$ make html && python -m http.server --directory _build/html 9000
Then open ``http://localhost:9000/`` in your browser to access the built docs.
|
PypiClean
|
/qiskit_qryd_provider-0.4.5.tar.gz/qiskit_qryd_provider-0.4.5/qiskit_qryd_provider/pcp_gate.py
|
from cmath import exp
from typing import List
from typing import TYPE_CHECKING
from qiskit.circuit import Gate
from qiskit.circuit import Parameter
from qiskit.circuit import QuantumCircuit
from qiskit.circuit.equivalence_library import SessionEquivalenceLibrary
from qiskit.circuit.library import CPhaseGate
from qiskit.quantum_info import Kraus
if TYPE_CHECKING:
import qiskit
class PCPGate(Gate):
r"""Implements the phase-shifted controlled-phase gate (PCP).
This class implements an idealized version of a controlled-phase gate as it can
potentially be realized by the Rydberg platform. Similarly to the :class:`PCZGate`,
the gate is hereby only realized up to single-qubit phase gates. The phase shift
:math:`\theta` depends on the phase :math:`\lambda` of the gate.
Unitary matrix representation:
.. math::
PCP =
\begin{pmatrix}
1 & 0 & 0 & 0 \\
0 & e^{i\theta(\lambda)} & 0 & 0 \\
0 & 0 & e^{i\theta(\lambda)} & 0 \\
0 & 0 & 0 & e^{i(2\theta(\lambda)+\lambda)}
\end{pmatrix}
Note that the :math:`\lambda`-dependence that is implemented in this class is only
accurate for :math:`\lambda \leq \pi`.
.. testcode::
from qiskit_qryd_provider import PCPGate
import numpy as np
assert np.round(PCPGate.get_theta(np.pi), 2) == 2.13
"""
def __init__(self, lam: float, label: str = None) -> None:
"""Create a new PCP gate.
Args:
lam: Phase of the gate.
label: Optional label for the gate.
"""
super().__init__("pcp", 2, [lam], label=label)
def _define(self) -> None:
"""Define the gate."""
qc = QuantumCircuit(2)
lam = self.params[0]
theta = self.get_theta(lam)
qc.u(0, 0, theta, 0)
qc.u(0, 0, theta, 1)
qc.cp(lam, 0, 1)
self.definition = qc
def to_matrix(self) -> List[List[complex]]:
"""Return the unitary matrix of the gate.
Returns:
A two-dimensional array for the gate unitary matrix.
"""
lam = self.params[0]
theta = self.get_theta(lam)
return [
[1, 0, 0, 0],
[0, exp(1j * theta), 0, 0],
[0, 0, exp(1j * theta), 0],
[0, 0, 0, exp(2j * self._theta + 1j * lam)],
]
def to_kraus(self) -> "qiskit.circuit.Instruction":
"""Return the Kraus representation of the gate.
Raises:
NotImplementedError: If the Kraus representation is not set.
Returns:
An instruction encapsulating the Kraus representation.
"""
if self._kraus is None:
raise NotImplementedError("The Kraus representation is not implemented.")
return self._kraus
@classmethod
def _init_theta(cls) -> None:
r"""Initialize the :math:`\lambda`-dependent phase shift of the gate.
Note that after the initialization, a
decomposition of the PCP gate is stored to Qiskit's
SessionEquivalenceLibrary.
"""
cls._lam = Parameter("lambda")
cls._theta = (
5.11382
- 0.32933
* (
1.63085 * cls._lam * cls._lam * (2 * cls._lam).exp()
+ cls._lam
+ 0.02899
).log()
)
# Reset equivalence library
default = []
for c in SessionEquivalenceLibrary.get_entry(CPhaseGate(cls._lam)):
if not c.get_instructions("pcp"):
default.append(c)
SessionEquivalenceLibrary.set_entry(CPhaseGate(cls._lam), default)
SessionEquivalenceLibrary.set_entry(PCPGate(cls._lam), [])
# Attach new decomposition to the equivalence library
def_pcp_cz = QuantumCircuit(2)
def_pcp_cz.append(PCPGate(cls._lam), [0, 1])
def_pcp_cz.u(0, 0, -cls._theta, 0)
def_pcp_cz.u(0, 0, -cls._theta, 1)
SessionEquivalenceLibrary.add_equivalence(CPhaseGate(cls._lam), def_pcp_cz)
def_cz_pcp = QuantumCircuit(2)
def_cz_pcp.append(CPhaseGate(cls._lam), [0, 1])
def_cz_pcp.u(0, 0, cls._theta, 0)
def_cz_pcp.u(0, 0, cls._theta, 1)
SessionEquivalenceLibrary.add_equivalence(PCPGate(cls._lam), def_cz_pcp)
@classmethod
def get_theta(cls, lam: float) -> float:
r"""Get the phase shift of the gate for a given phase :math:`\lambda`.
Args:
lam: Phase of the gate.
Returns:
Angle of the phase shift.
"""
return float(cls._theta.assign(cls._lam, lam))
@classmethod
def set_kraus(cls, kraus: List[List[List[complex]]] = None) -> None:
"""Set the Kraus representation of the gate.
Args:
kraus: A three-dimensional array encoding the Kraus representation of the
gate.
"""
if kraus is not None:
cls._kraus = Kraus(kraus).to_instruction()
else:
cls._kraus = None
PCPGate._init_theta()
PCPGate.set_kraus(None)
|
PypiClean
|
/ivy-testing-release-0.0.0.1.tar.gz/ivy-testing-release-0.0.0.1/ivy/functional/frontends/paddle/tensor/tensor.py
|
import ivy
import ivy.functional.frontends.paddle as paddle_frontend
from ivy.func_wrapper import (
with_supported_dtypes,
with_unsupported_dtypes,
)
from ivy.functional.frontends.paddle.func_wrapper import _to_ivy_array
class Tensor:
def __init__(self, array, dtype=None, place="cpu", stop_gradient=True):
self._ivy_array = (
ivy.array(array, dtype=dtype, device=place)
if not isinstance(array, ivy.Array)
else array
)
self._dtype = dtype
self._place = place
self._stop_gradient = stop_gradient
def __repr__(self):
return (
str(self._ivy_array.__repr__())
.replace("ivy.array", "ivy.frontends.paddle.Tensor")
.replace("dev", "place")
)
# Properties #
# ---------- #
@property
def ivy_array(self):
return self._ivy_array
@property
def place(self):
return self.ivy_array.device
@property
def dtype(self):
return self._ivy_array.dtype
@property
def shape(self):
return self._ivy_array.shape
@property
def ndim(self):
return self.dim()
# Setters #
# --------#
@ivy_array.setter
def ivy_array(self, array):
self._ivy_array = (
ivy.array(array) if not isinstance(array, ivy.Array) else array
)
# Special Methods #
# -------------------#
def __getitem__(self, item):
ivy_args = ivy.nested_map([self, item], _to_ivy_array)
ret = ivy.get_item(*ivy_args)
return paddle_frontend.Tensor(ret)
def __setitem__(self, item, value):
raise ivy.utils.exceptions.IvyException(
"ivy.functional.frontends.paddle.Tensor object doesn't support assignment"
)
def __iter__(self):
if self.ndim == 0:
raise TypeError("iteration over a 0-d tensor not supported")
for i in range(self.shape[0]):
yield self[i]
# Instance Methods #
# ---------------- #
def reshape(self, *args, shape=None):
if args and shape:
raise TypeError("reshape() got multiple values for argument 'shape'")
if shape is not None:
return paddle_frontend.reshape(self._ivy_array, shape)
if args:
if isinstance(args[0], (tuple, list)):
shape = args[0]
return paddle_frontend.reshape(self._ivy_array, shape)
else:
return paddle_frontend.reshape(self._ivy_array, args)
return paddle_frontend.reshape(self._ivy_array)
def dim(self):
return self.ivy_array.ndim
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def abs(self):
return paddle_frontend.abs(self)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def acosh(self, name=None):
return paddle_frontend.Tensor(ivy.acosh(self._ivy_array))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def ceil(self):
return paddle_frontend.ceil(self)
@with_unsupported_dtypes({"2.5.1 and below": ("complex", "int8")}, "paddle")
def numel(self):
return paddle_frontend.numel(self)
@with_unsupported_dtypes({"2.5.1 and below": ("float16",)}, "paddle")
def asinh(self, name=None):
return paddle_frontend.Tensor(ivy.asinh(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def asin(self, name=None):
return paddle_frontend.Tensor(ivy.asin(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def cosh(self, name=None):
return paddle_frontend.Tensor(ivy.cosh(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def log(self, name=None):
return paddle_frontend.Tensor(ivy.log(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def sin(self, name=None):
return paddle_frontend.Tensor(ivy.sin(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def sinh(self, name=None):
return paddle_frontend.Tensor(ivy.sinh(self._ivy_array))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def argmax(self, axis=None, keepdim=False, dtype=None, name=None):
return paddle_frontend.Tensor(
ivy.argmax(self._ivy_array, axis=axis, keepdims=keepdim, dtype=dtype)
)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def sqrt(self, name=None):
return paddle_frontend.Tensor(ivy.sqrt(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def cos(self, name=None):
return paddle_frontend.Tensor(ivy.cos(self._ivy_array))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def exp(self, name=None):
return paddle_frontend.Tensor(ivy.exp(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def erf(self, name=None):
return paddle_frontend.Tensor(ivy.erf(self._ivy_array))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def subtract(self, y, name=None):
return paddle_frontend.Tensor(ivy.subtract(self._ivy_array, _to_ivy_array(y)))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def log10(self, name=None):
return paddle_frontend.Tensor(ivy.log10(self._ivy_array))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def argsort(self, axis=-1, descending=False, name=None):
return paddle_frontend.Tensor(
ivy.argsort(self._ivy_array, axis=axis, descending=descending)
)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def floor(self, name=None):
return paddle_frontend.Tensor(ivy.floor(self._ivy_array))
@with_supported_dtypes(
{"2.5.1 and below": ("float32", "float64", "int32", "int64")}, "paddle"
)
def clip(self, min=None, max=None, name=None):
ivy.utils.assertions.check_all_or_any_fn(
min,
max,
fn=ivy.exists,
type="any",
limit=[1, 2],
message="at most one of min or max can be None",
)
if min is None:
ret = ivy.minimum(self._ivy_array, max)
elif max is None:
ret = ivy.maximum(self._ivy_array, min)
else:
ret = ivy.clip(self._ivy_array, min, max)
return paddle_frontend.Tensor(ret)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def floor_(self):
return paddle_frontend.Tensor(ivy.floor(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def tanh(self, name=None):
return paddle_frontend.Tensor(ivy.tanh(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def add_(self, name=None):
return paddle_frontend.Tensor(ivy.add(self._ivy_array))
@with_supported_dtypes(
{"2.5.1 and below": ("float16", "float32", "float64", "int32", "int64")},
"paddle",
)
def isinf(self, name=None):
return paddle_frontend.Tensor(ivy.isinf(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def square(self, name=None):
return paddle_frontend.Tensor(ivy.square(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def cholesky(self, upper=False, name=None):
return paddle_frontend.Tensor(ivy.cholesky(self._ivy_array, upper=upper))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def multiply(self, y, name=None):
return paddle_frontend.multiply(self, y)
@with_supported_dtypes(
{"2.5.1 and below": ("float16", "float32", "float64", "int32", "int64")},
"paddle",
)
def isfinite(self, name=None):
return paddle_frontend.Tensor(ivy.isfinite(self._ivy_array))
@with_supported_dtypes({"2.4.2 and below": ("float16", "bfloat16")}, "paddle")
def all(self, axis=None, keepdim=False, dtype=None, name=None):
return paddle_frontend.Tensor(
ivy.all(self.ivy_array, axis=axis, keepdims=keepdim, dtype=dtype)
)
@with_supported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def allclose(self, other, rtol=1e-05, atol=1e-08, equal_nan=False, name=None):
return paddle_frontend.Tensor(
ivy.allclose(
self._ivy_array, other, rtol=rtol, atol=atol, equal_nan=equal_nan
)
)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def sort(self, axis=-1, descending=False, name=None):
return paddle_frontend.Tensor(
ivy.sort(self._ivy_array, axis=axis, descending=descending)
)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def log1p(self, name=None):
return ivy.log1p(self._ivy_array)
@with_supported_dtypes(
{
"2.4.2 and below": (
"bool",
"uint8",
"int8",
"int16",
"int32",
"int64",
)
},
"paddle",
)
def bitwise_and(self, y, out=None, name=None):
return paddle_frontend.bitwise_and(self, y)
@with_supported_dtypes(
{
"2.5.1 and below": (
"bool",
"int8",
"int16",
"int32",
"int64",
"float32",
"float64",
)
},
"paddle",
)
def logical_or(self, y, out=None, name=None):
return paddle_frontend.logical_or(self, y, out=out)
@with_supported_dtypes(
{"2.5.1 and below": ("bool", "uint8", "int8", "int16", "int32", "int64")},
"paddle",
)
def bitwise_xor(self, y, out=None, name=None):
return paddle_frontend.bitwise_xor(self, y)
@with_supported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def any(self, axis=None, keepdim=False, name=None):
return paddle_frontend.Tensor(
ivy.any(self._ivy_array, axis=axis, keepdims=keepdim)
)
@with_unsupported_dtypes({"2.5.1 and below": "bfloat16"}, "paddle")
def astype(self, dtype):
return paddle_frontend.Tensor(ivy.astype(self._ivy_array, dtype))
@with_supported_dtypes(
{"2.5.1 and below": ("bool", "uint8", "int8", "int16", "int32", "int64")},
"paddle",
)
def bitwise_not(self, out=None, name=None):
return paddle_frontend.Tensor(ivy.bitwise_invert(self._ivy_array, out=out))
@with_supported_dtypes(
{
"2.5.1 and below": (
"bool",
"int8",
"int16",
"int32",
"int64",
)
},
"paddle",
)
def bitwise_or(self, y, out=None, name=None):
return paddle_frontend.bitwise_or(self, y, out=out)
@with_supported_dtypes(
{
"2.5.1 and below": (
"bool",
"int8",
"int16",
"int32",
"int64",
"float32",
"float64",
)
},
"paddle",
)
def logical_xor(self, y, out=None, name=None):
return paddle_frontend.logical_xor(self, y, out=out)
@with_supported_dtypes(
{"2.5.1 and below": ("float16", "float32", "float64", "int32", "int64")},
"paddle",
)
def isnan(self, name=None):
return paddle_frontend.isnan(self)
@with_unsupported_dtypes(
{
"2.5.1 and below": (
"bool",
"uint8",
"int8",
"int16",
"complex64",
"complex128",
)
},
"paddle",
)
def greater_than(self, y, name=None):
return paddle_frontend.greater_than(self, y)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def rsqrt(self, name=None):
return paddle_frontend.Tensor(ivy.reciprocal(ivy.sqrt(self._ivy_array)))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def reciprocal(self, name=None):
return paddle_frontend.reciprocal(self)
@with_supported_dtypes(
{
"2.5.1 and below": (
"bool",
"int8",
"int16",
"int32",
"int64",
"float32",
"float64",
)
},
"paddle",
)
def logical_and(self, y, out=None, name=None):
return paddle_frontend.logical_and(self, y, out=out)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def divide(self, y, name=None):
return paddle_frontend.divide(self, y)
@with_unsupported_dtypes(
{
"2.5.1 and below": (
"bool",
"uint8",
"int8",
"int16",
"complex64",
"complex128",
)
},
"paddle",
)
def less_than(self, y, name=None):
return paddle_frontend.less_than(self, y)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def cumprod(self, dim=None, dtype=None, name=None):
return paddle_frontend.Tensor(
ivy.cumprod(self._ivy_array, axis=dim, dtype=dtype)
)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def cumsum(self, axis=None, dtype=None, name=None):
return paddle_frontend.Tensor(
ivy.cumsum(self._ivy_array, axis=axis, dtype=dtype)
)
@with_supported_dtypes(
{"2.5.1 and below": ("complex64", "complex128", "float32", "float64")},
"paddle",
)
def angle(self, name=None):
return paddle_frontend.Tensor(ivy.angle(self._ivy_array))
@with_unsupported_dtypes(
{
"2.5.1 and below": (
"uint8",
"int8",
"int16",
"complex64",
"complex128",
)
},
"paddle",
)
def equal(self, y, name=None):
return paddle_frontend.equal(self, y)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def rad2deg(self, name=None):
return paddle_frontend.Tensor(ivy.rad2deg(self._ivy_array))
@with_unsupported_dtypes(
{
"2.5.1 and below": (
"uint8",
"int8",
"int16",
"float16",
"complex64",
"complex128",
)
},
"paddle",
)
def equal_all(self, y, name=None):
return paddle_frontend.Tensor(
ivy.array_equal(self._ivy_array, _to_ivy_array(y))
)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def maximum(self, other, name=None):
return ivy.maximum(self._ivy_array, other)
@with_unsupported_dtypes({"2.5.1 and below": "bfloat16"}, "paddle")
def fmax(self, y, name=None):
return paddle_frontend.Tensor(ivy.fmax(self._ivy_array, _to_ivy_array(y)))
@with_unsupported_dtypes({"2.5.1 and below": "bfloat16"}, "paddle")
def fmin(self, y, name=None):
return paddle_frontend.Tensor(ivy.fmin(self._ivy_array, _to_ivy_array(y)))
@with_supported_dtypes(
{"2.5.1 and below": ("float32", "float64", "int32", "int64")}, "paddle"
)
def minimum(self, y, name=None):
return paddle_frontend.Tensor(ivy.minimum(self._ivy_array, _to_ivy_array(y)))
@with_supported_dtypes(
{"2.5.1 and below": ("float32", "float64", "int32", "int64")}, "paddle"
)
def max(self, axis=None, keepdim=False, name=None):
return paddle_frontend.Tensor(
ivy.max(self._ivy_array, axis=axis, keepdims=keepdim)
)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def deg2rad(self, name=None):
return paddle_frontend.Tensor(ivy.deg2rad(self._ivy_array))
@with_supported_dtypes(
{"2.5.1 and below": ("float32", "float64", "int32", "int64", "bool")}, "paddle"
)
def rot90(self, k=1, axes=(0, 1), name=None):
return paddle_frontend.Tensor(ivy.rot90(self._ivy_array, k=k, axes=axes))
@with_supported_dtypes(
{"2.5.1 and below": ("complex64", "complex128")},
"paddle",
)
def imag(self, name=None):
return paddle_frontend.imag(self)
def is_tensor(self):
return paddle_frontend.is_tensor(self._ivy_array)
@with_supported_dtypes(
{
"2.5.1 and below": (
"float32",
"float64",
)
},
"paddle",
)
def isclose(self, y, rtol=1e-05, atol=1e-08, equal_nan=False, name=None):
return paddle_frontend.isclose(
self, y, rtol=rtol, atol=atol, equal_nan=equal_nan
)
@with_supported_dtypes({"2.5.1 and below": ("int32", "int64")}, "paddle")
def floor_divide(self, y, name=None):
return paddle_frontend.Tensor(
ivy.floor_divide(self._ivy_array, _to_ivy_array(y))
)
# cond
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def cond(self, p=None, name=None):
return paddle_frontend.cond(self, p=p, name=name)
@with_unsupported_dtypes({"2.4.2 and below": ("int16", "float16")}, "paddle")
def conj(self, name=None):
return paddle_frontend.Tensor(ivy.conj(self._ivy_array))
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def log2(self, name=None):
return paddle_frontend.Tensor(ivy.log2(self._ivy_array))
@with_unsupported_dtypes(
{"2.4.2 and below": ("float32", "float64", "int32", "int64")}, "paddle"
)
def neg(self, name=None):
return paddle_frontend.neg(self)
@with_supported_dtypes(
{
"2.5.1 and below": (
"bool",
"int8",
"int16",
"int32",
"int64",
"float32",
"float64",
)
},
"paddle",
)
def logical_not(self, out=None, name=None):
return paddle_frontend.Tensor(ivy.logical_not(self.ivy_array))
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def sign(self, name=None):
return ivy.sign(self._ivy_array)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def var(self, axis=None, unbiased=True, keepdim=False, name=None):
return paddle_frontend.Tensor(
ivy.var(
self._ivy_array, axis=axis, correction=int(unbiased), keepdims=keepdim
)
)
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def sgn(self, name=None):
return paddle_frontend.Tensor(ivy.sign(self._ivy_array, np_variant=True))
def tolist(self):
return paddle_frontend.Tensor(ivy.to_list(self._ivy_array))
@with_supported_dtypes(
{"2.5.1 and below": ("float32", "float64", "int32", "int64")},
"paddle",
)
def min(self, axis=None, keepdim=False, name=None):
return ivy.min(self._ivy_array, axis=axis, keepdims=keepdim)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def atan(self, name=None):
return ivy.atan(self._ivy_array)
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def atanh(self, name=None):
return ivy.atanh(self._ivy_array)
@with_unsupported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle")
def std(self, axis=None, unbiased=True, keepdim=False, name=None):
return paddle_frontend.Tensor(
ivy.std(self._ivy_array, axis=axis, keepdims=keepdim)
)
|
PypiClean
|
/modeltranslation-lokalise-0.2.0.tar.gz/modeltranslation-lokalise-0.2.0/modeltranslation_lokalise/signals.py
|
from functools import partial
from django.db.models.signals import pre_save, post_save, post_delete
from modeltranslation.translator import translator
from modeltranslation.utils import get_translation_fields
from .lokalise_client import create_or_update_translations, delete_translations
def register_translation(model_class, trans_opts_class):
"""
This function registers model_class as a translatable model into
django-modeltranslation using trans_opts_class as
modelstranslation.TranslationOptions class.
Also, it connects pre_save, post_save and post_delete signals to the
model in order to attend this events and proper update translations on
lokalise.
:param model_class: The class to be registered as translatable.
:param trans_opts_class: modelstranslation.TranslationOptions class
containing translatable fields.
"""
translator.register(model_class, trans_opts_class)
pre_save.connect(
partial(note_down_translatable_fields,
trans_opts_class=trans_opts_class),
sender=model_class,
weak=False,
)
post_save.connect(partial(notify_changes_lokalise,
trans_opts_class=trans_opts_class),
sender=model_class,
weak=False)
post_delete.connect(partial(remove_lokalise_keys,
trans_opts_class=trans_opts_class),
sender=model_class,
weak=False)
def get_lokalise_fields(trans_opts_class):
non_lokalise_fields = getattr(trans_opts_class, 'non_lokalise_fields',
tuple())
lokalise_fields = tuple(t for t in trans_opts_class.fields if
t not in non_lokalise_fields)
return lokalise_fields
def note_down_translatable_fields(sender, instance, **kwargs):
"""
Note down the translatable fields that have been modified on the
updated_trans_fields field of the instance being saved.
:param sender: the instance's model class.
:param instance: object being saved.
:param kwargs: keyword arguments, including trans_opts_class key that
contains the modeltranslation.TranslationOptions which contains the
translatable fields of the model.
"""
instance.updated_trans_fields = []
trans_opts_class = kwargs.get('trans_opts_class')
lokalise_fields = get_lokalise_fields(trans_opts_class)
try:
db_instance = sender.objects.get(pk=instance.pk)
except sender.DoesNotExist:
instance.updated_trans_fields = lokalise_fields
return
for parent_field in lokalise_fields:
trans_fields = get_translation_fields(parent_field)
for field in trans_fields:
if getattr(db_instance, field) != getattr(instance, field):
instance.updated_trans_fields.append(parent_field)
break
def notify_changes_lokalise(**kwargs):
"""
Notify modeltranslation_lokalise about changes on translatable models:
(e.g. create keys for new translatable fields, update values for
existent keys, etc.)
"""
instance = kwargs['instance']
if len(instance.updated_trans_fields) == 0:
# Instance has no translatable fields modified
return
lokalise_fields = get_lokalise_fields(kwargs.get('trans_opts_class'))
create_or_update_translations(list(lokalise_fields), instance)
def remove_lokalise_keys(**kwargs):
"""
Remove all keys on lokalise for a given instance.
:param sender: the instance's model class.
:param instance: object being saved.
"""
delete_translations(kwargs['instance'])
|
PypiClean
|
/open-nre-0.1.1.tar.gz/open-nre-0.1.1/opennre/module/pool/max_pool.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class MaxPool(nn.Module):
def __init__(self, kernel_size, segment_num=None):
"""
Args:
input_size: dimention of input embedding
kernel_size: kernel_size for CNN
padding: padding for CNN
hidden_size: hidden size
"""
super().__init__()
self.segment_num = segment_num
if self.segment_num != None:
self.mask_embedding = nn.Embedding(segment_num + 1, segment_num)
self.mask_embedding.weight.data.copy_(torch.FloatTensor(np.concatenate([np.zeros((1, segment_num)), np.identity(segment_num)], axis=0)))
self.mask_embedding.weight.requires_grad = False
self._minus = -100
self.pool = nn.MaxPool1d(kernel_size)
def forward(self, x, mask=None):
"""
Args:
input features: (B, L, I_EMBED)
Return:
output features: (B, H_EMBED)
"""
# Check size of tensors
if mask is None or self.segment_num is None or self.segment_num == 1:
x = x.transpose(1, 2) # (B, L, I_EMBED) -> (B, I_EMBED, L)
x = self.pool(x).squeeze(-1) # (B, I_EMBED, 1) -> (B, I_EMBED)
return x
else:
B, L, I_EMBED = x.size()[:3]
# mask = 1 - self.mask_embedding(mask).transpose(1, 2).unsqueeze(2) # (B, L) -> (B, L, S) -> (B, S, L) -> (B, S, 1, L)
# x = x.transpose(1, 2).unsqueeze(1) # (B, L, I_EMBED) -> (B, I_EMBED, L) -> (B, 1, I_EMBED, L)
# x = (x + self._minus * mask).contiguous().view([-1, I_EMBED, L]) # (B, S, I_EMBED, L) -> (B * S, I_EMBED, L)
# x = self.pool(x).squeeze(-1) # (B * S, I_EMBED, 1) -> (B * S, I_EMBED)
# x = x.view([B, -1]) # (B, S * I_EMBED)
# return x
mask = 1 - self.mask_embedding(mask).transpose(1, 2)
x = x.transpose(1, 2)
pool1 = self.pool(x + self._minus * mask[:, 0:1, :])
pool2 = self.pool(x + self._minus * mask[:, 1:2, :])
pool3 = self.pool(x + self._minus * mask[:, 2:3, :])
x = torch.cat([pool1, pool2, pool3], 1)
# x = x.squeeze(-1)
return x
|
PypiClean
|
/dagster_cloud-1.4.11.tar.gz/dagster_cloud-1.4.11/dagster_cloud/workspace/kubernetes/utils.py
|
import copy
import re
import time
from typing import Optional
import kubernetes
from dagster._utils.merger import merge_dicts
from dagster_k8s.client import DagsterKubernetesClient
from dagster_k8s.models import k8s_model_from_dict
from kubernetes import client
from ..user_code_launcher.utils import (
deterministic_label_for_location,
get_human_readable_label,
unique_resource_name,
)
MANAGED_RESOURCES_LABEL = {"managed_by": "K8sUserCodeLauncher"}
SERVICE_PORT = 4000
def _sanitize_k8s_resource_name(name):
filtered_name = re.sub("[^a-z0-9-]", "", name.lower())
# ensure it doesn't start with a non-alpha character
while filtered_name and re.match("[^a-z].*", filtered_name):
filtered_name = filtered_name[1:]
filtered_name = filtered_name.strip("-")
# always return something that starts with a letter in the unlikely event that everything is
# filtered out (doesn't have to be unique)
return filtered_name or "k8s"
def unique_k8s_resource_name(deployment_name, location_name):
"""https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names.
K8s resource names are restricted, so we must sanitize the location name to not include disallowed characters.
"""
return unique_resource_name(
deployment_name, location_name, length_limit=63, sanitize_fn=_sanitize_k8s_resource_name
)
def get_k8s_human_readable_label(name):
"""https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#syntax-and-character-set.
K8s label values are restricted, so we must sanitize the location name to not include disallowed characters.
These are purely to help humans debug, so they don't need to be unique.
"""
return get_human_readable_label(
name,
length_limit=63,
sanitize_fn=lambda name: (
re.sub("[^a-zA-Z0-9-_.]", "", name).strip("-").strip("_").strip(".")
),
)
def construct_code_location_service(deployment_name, location_name, service_name):
return client.V1Service(
metadata=client.V1ObjectMeta(
name=service_name,
labels={
**MANAGED_RESOURCES_LABEL,
"location_hash": deterministic_label_for_location(deployment_name, location_name),
"location_name": get_k8s_human_readable_label(location_name),
"deployment_name": get_k8s_human_readable_label(deployment_name),
},
),
spec=client.V1ServiceSpec(
selector={"user-deployment": service_name},
ports=[client.V1ServicePort(name="grpc", protocol="TCP", port=SERVICE_PORT)],
),
)
def construct_code_location_deployment(
instance,
deployment_name,
location_name,
k8s_deployment_name,
metadata,
container_context,
args,
):
pull_policy = container_context.image_pull_policy
env_config_maps = container_context.env_config_maps
env_secrets = container_context.env_secrets
service_account_name = container_context.service_account_name
image_pull_secrets = container_context.image_pull_secrets
volume_mounts = container_context.volume_mounts
volumes = container_context.volumes
labels = container_context.labels
resources = container_context.resources
scheduler_name = container_context.scheduler_name
security_context = container_context.security_context
env = merge_dicts(
metadata.get_grpc_server_env(
SERVICE_PORT, location_name, instance.ref_for_deployment(deployment_name)
),
container_context.get_environment_dict(),
)
user_defined_config = container_context.get_server_user_defined_k8s_config()
container_config = copy.deepcopy(user_defined_config.container_config)
container_config["args"] = args
if pull_policy:
container_config["image_pull_policy"] = pull_policy
user_defined_env_vars = container_config.pop("env", [])
user_defined_env_from = container_config.pop("env_from", [])
user_defined_volume_mounts = container_config.pop("volume_mounts", [])
user_defined_resources = container_config.pop("resources", {})
user_defined_security_context = container_config.pop("security_context", None)
container_name = container_config.get("name", "dagster")
container_config = {
**container_config,
"name": container_name,
"image": metadata.image,
"env": [
{"name": key, "value": value} for key, value in env.items()
] + user_defined_env_vars,
"env_from": (
[{"config_map_ref": {"name": config_map}} for config_map in env_config_maps]
+ [{"secret_ref": {"name": secret_name}} for secret_name in env_secrets]
+ user_defined_env_from
),
"volume_mounts": volume_mounts + user_defined_volume_mounts,
"resources": user_defined_resources or resources,
"security_context": user_defined_security_context or security_context,
}
pod_spec_config = copy.deepcopy(user_defined_config.pod_spec_config)
user_defined_image_pull_secrets = pod_spec_config.pop("image_pull_secrets", [])
user_defined_service_account_name = pod_spec_config.pop("service_account_name", None)
user_defined_containers = pod_spec_config.pop("containers", [])
user_defined_volumes = pod_spec_config.pop("volumes", [])
user_defined_scheduler_name = pod_spec_config.pop("scheduler_name", None)
pod_spec_config = {
**pod_spec_config,
"image_pull_secrets": [
{"name": x["name"]} for x in image_pull_secrets
] + user_defined_image_pull_secrets,
"service_account_name": user_defined_service_account_name or service_account_name,
"containers": [container_config] + user_defined_containers,
"volumes": volumes + user_defined_volumes,
"scheduler_name": user_defined_scheduler_name or scheduler_name,
}
pod_template_spec_metadata = copy.deepcopy(user_defined_config.pod_template_spec_metadata)
user_defined_pod_template_labels = pod_template_spec_metadata.pop("labels", {})
deployment_dict = {
"metadata": {
"name": k8s_deployment_name,
"labels": {
**MANAGED_RESOURCES_LABEL,
"location_hash": deterministic_label_for_location(deployment_name, location_name),
"location_name": get_k8s_human_readable_label(location_name),
"deployment_name": get_k8s_human_readable_label(deployment_name),
"agent_id": instance.instance_uuid,
},
},
"spec": { # DeploymentSpec
"selector": {"match_labels": {"user-deployment": k8s_deployment_name}},
"template": { # PodTemplateSpec
"metadata": {
**pod_template_spec_metadata,
"labels": {
"user-deployment": k8s_deployment_name,
**labels,
**user_defined_pod_template_labels,
},
},
"spec": pod_spec_config,
},
},
}
return k8s_model_from_dict(
kubernetes.client.V1Deployment,
deployment_dict,
)
def get_container_waiting_reason(pod) -> Optional[str]:
if (not pod.status.container_statuses) or len(pod.status.container_statuses) == 0:
return None
container_waiting_state = pod.status.container_statuses[0].state.waiting
if not container_waiting_state:
return None
return container_waiting_state.reason
def get_deployment_failure_debug_info(
k8s_deployment_name, namespace, core_api_client, pod_list, logger
):
if not pod_list:
return (
"For more information about the failure, run `kubectl describe deployment"
f" {k8s_deployment_name}` in your cluster."
)
pod = pod_list[0]
pod_name = pod.metadata.name
kubectl_prompt = (
f"For more information about the failure, run `kubectl describe pod {pod_name}`"
f" or `kubectl describe deployment {k8s_deployment_name}` in your cluster."
)
pod_debug_info = ""
try:
api_client = DagsterKubernetesClient.production_client(core_api_override=core_api_client)
pod_debug_info = api_client.get_pod_debug_info(
pod_name, namespace, container_name="dagster"
)
except Exception:
logger.exception(
"Error trying to get debug information for failed k8s pod {pod_name}".format(
pod_name=pod_name
)
)
return f"{pod_debug_info}\n\n{kubectl_prompt}" if pod_debug_info else kubectl_prompt
def wait_for_deployment_complete(
k8s_deployment_name,
namespace,
logger,
location_name,
metadata,
timeout,
image_pull_grace_period,
core_api,
):
"""Translated from
https://github.com/kubernetes/kubectl/blob/ac49920c0ccb0dd0899d5300fc43713ee2dfcdc9/pkg/polymorphichelpers/rollout_status.go#L75-L91.
"""
api = client.AppsV1Api(client.ApiClient())
start = time.time()
pod_list = []
while True:
time.sleep(2)
time_elapsed = time.time() - start
if time_elapsed >= timeout:
timeout_message: str = f"Timed out waiting for deployment {k8s_deployment_name}."
debug_info = get_deployment_failure_debug_info(
k8s_deployment_name, namespace, core_api, pod_list, logger
)
if debug_info:
timeout_message = timeout_message + "\n\n" + debug_info
raise Exception(timeout_message)
deployment = api.read_namespaced_deployment(k8s_deployment_name, namespace)
status = deployment.status
spec = deployment.spec
logger.debug(
f"[updated_replicas:{status.updated_replicas},replicas:{status.replicas},available_replicas:{status.available_replicas},observed_generation:{status.observed_generation}]"
" waiting..."
)
logger.debug(f"Status: {status}, spec: {spec}")
if (
status.updated_replicas == spec.replicas # new replicas have been updated
and status.replicas == status.updated_replicas # no old replicas pending termination
and status.available_replicas == status.updated_replicas # updated replicas available
and status.observed_generation >= deployment.metadata.generation # new spec observed
):
return True
pod_list = core_api.list_namespaced_pod(
namespace, label_selector=f"user-deployment={k8s_deployment_name}"
).items
if time_elapsed >= image_pull_grace_period:
for pod in pod_list:
waiting_reason = get_container_waiting_reason(pod)
if (
waiting_reason == "ImagePullBackOff"
or waiting_reason == "ErrImageNeverPull"
or waiting_reason == "CreateContainerConfigError"
):
error_message = f"Error creating deployment for {k8s_deployment_name}."
debug_info = get_deployment_failure_debug_info(
k8s_deployment_name, namespace, core_api, pod_list, logger
)
if debug_info:
error_message = error_message + "\n" + debug_info
raise Exception(error_message)
|
PypiClean
|
/Pypicor-1.0.4-py3-none-any.whl/build/lib/build/lib/build/lib/build/lib/build/lib/pypicor/baq.py
|
import pandas as pd
import requests
from requests.auth import HTTPBasicAuth
from pypicor import credentials as creds
def GetBAQ(baq_name : str, parameters : dict = {}, filters : dict = {}, select : list = []):
# Get a BAQ from Epicor
# str: baq_name = Name of the BAQ to run
# Dictionary: parameters = Dictionary of parameters to pass to the BAQ
# Returns: Pandas dataframe with the results of the BAQ
# Example:
# df = GetBAQ('PartList', ['PartNum:1234', 'Company:000'])
url = f"https://centralusdtapp01.epicorsaas.com/SaaS515/api/v2/odata/{creds.EPICOR_COMPANY_ID}/BaqSvc/{baq_name}/Data"
auth = HTTPBasicAuth(creds.EPICOR_USERNAME, creds.EPICOR_USER_PASSWORD)
headers = {
"x-api-key": creds.EPICOR_API_KEY,
"Content-Type": "application/json",
}
hasFilter = len(filters) > 0
hasSelect = len(select) > 0
if hasFilter and hasSelect:
url += '?'
url = _addFilterToUrl(url, filters)
url += '&'
url = _addSelectToUrl(url, select)
elif hasFilter:
url += '?'
url = _addFilterToUrl(url, filters)
elif hasSelect:
url += '?'
url = _addSelectToUrl(url, select)
responce = requests.get(url, headers=headers, auth=auth, params=parameters)
data = responce.json()
return pd.DataFrame(data['value'])
def _addFilterToUrl(url : str, filters : dict):
url += '$filter='
num_filters = len(filters)
for i, (key, value) in enumerate(filters.items()):
if isinstance(value, bool):
url += f"{key} eq {str(value).lower()}"
elif isinstance(value, str):
url += f"{key} eq '{value}'"
elif isinstance(value, int):
url += f"{key} eq {value}"
if i < num_filters - 1:
url += ' and '
return url
def _addSelectToUrl(url : str, select : list):
url += '$select='
num_select = len(select)
for i, item in enumerate(select):
url += f'{item}'
if i < num_select - 1:
url += ','
return url
|
PypiClean
|
/borgweb-0.3.0.tar.gz/borgweb-0.3.0/docs/internals.rst
|
.. include:: global.rst.inc
.. _internals:
Internals
=========
This page documents the internal workings of |project_name|.
What we use
-----------
* Flask and Werkzeug - Python web micro-framework and http toolbox
* Bootstrap and jQuery - CSS framework, Javascript library
* |project_name_backup| - for doing the backups
Develop JS
~~~~~~~~~~
#. Have NodeJS/io.js and NPM installed.
#. ``git clone https://github.com/borgbackup/borgweb.git``
#. ``cd borgweb/js``
#. ``npm install``
#. ``gulp watch``
#. Edit JS files within ``js/``; files will automatically be bundle into ``borgweb/static/bundle.js``
|
PypiClean
|
/karrio.dpdhl-2023.5.1-py3-none-any.whl/karrio/providers/dpdhl/tracking.py
|
import dpdhl_lib.tracking_response as tracking
import dpdhl_lib.tracking_request as dpdhl
import typing
import karrio.lib as lib
import karrio.core.models as models
import karrio.providers.dpdhl.error as error
import karrio.providers.dpdhl.utils as provider_utils
import karrio.providers.dpdhl.units as provider_units
def parse_tracking_response(
_responses: lib.Deserializable[typing.List[lib.Element]],
settings: provider_utils.Settings,
) -> typing.Tuple[typing.List[models.TrackingDetails], typing.List[models.Message]]:
responses = _responses.deserialize()
response_messages = [
result
for result in responses
if result.get("code") != "0" or result.get("body") is not None
]
response_details = [
result[0]
for result in responses
if result.get("code") == "0" and next(iter(result), None) is not None
]
trackers = [_extract_details(rate, settings) for rate in response_details]
messages: typing.List[models.Message] = sum(
[error.parse_error_response(_, settings) for _ in response_messages], start=[]
)
return trackers, messages
def _extract_details(
data: lib.Element,
settings: provider_utils.Settings,
) -> models.TrackingDetails:
details = lib.to_object(tracking.dataType, data)
events: typing.List[tracking.dataType2] = (
[d for d in details.data.data] if details.data is not None else []
)
delivered = details.ice == "DLVRD"
status = next(
(
status.name
for status in list(provider_units.TrackingStatus)
if details.ice in status.value
),
provider_units.TrackingStatus.in_transit.name,
)
return models.TrackingDetails(
carrier_id=settings.carrier_id,
carrier_name=settings.carrier_name,
tracking_number=details.piece_identifier,
events=[
models.TrackingEvent(
code=event.ice,
description=event.event_status,
date=lib.fdate(event.event_timestamp, "%d.%m.%Y %H:%M"),
time=lib.fdate(event.event_timestamp, "%d.%m.%Y %H:%M"),
location=lib.join(
event.event_location,
event.event_country,
separator=", ",
join=True,
),
)
for event in events
],
status=status,
delivered=delivered,
estimated_delivery=lib.fdate(details.delivery_date),
info=models.TrackingInfo(
carrier_tracking_link=settings.tracking_url.format(
details.piece_identifier
),
customer_name=details.pan_recipient_name,
shipment_destination_country=details.dest_country,
shipment_destination_postal_code=details.pan_recipient_postalcode,
shipment_origin_country=details.origin_country,
shipment_service=details.product_name,
),
)
def tracking_request(
payload: models.TrackingRequest,
settings: provider_utils.Settings,
) -> lib.Serializable:
request = [
dpdhl.data(
appname=settings.zt_id,
password=settings.zt_password,
request="d-get-piece-detail",
language_code=settings.language_code,
piece_code=tracking_number,
)
for tracking_number in payload.tracking_numbers
]
return lib.Serializable(
request,
lambda requests: [
f'<?xml version="1.0" encoding="UTF-8" standalone="no"?>\n{lib.to_xml(req)}'
for req in requests
],
)
|
PypiClean
|
/scikit-tensor-0.1.tar.gz/scikit-tensor-0.1/sktensor/cp.py
|
import logging
import time
import numpy as np
from numpy import array, dot, ones, sqrt
from scipy.linalg import pinv
from numpy.random import rand
from .core import nvecs, norm
from .ktensor import ktensor
_log = logging.getLogger('CP')
_DEF_MAXITER = 500
_DEF_INIT = 'nvecs'
_DEF_CONV = 1e-5
_DEF_FIT_METHOD = 'full'
_DEF_TYPE = np.float
__all__ = [
'als',
'opt',
'wopt'
]
def als(X, rank, **kwargs):
"""
Alternating least-sqaures algorithm to compute the CP decomposition.
Parameters
----------
X : tensor_mixin
The tensor to be decomposed.
rank : int
Tensor rank of the decomposition.
init : {'random', 'nvecs'}, optional
The initialization method to use.
- random : Factor matrices are initialized randomly.
- nvecs : Factor matrices are initialzed via HOSVD.
(default 'nvecs')
max_iter : int, optional
Maximium number of iterations of the ALS algorithm.
(default 500)
fit_method : {'full', None}
The method to compute the fit of the factorization
- 'full' : Compute least-squares fit of the dense approximation of.
X and X.
- None : Do not compute the fit of the factorization, but iterate
until ``max_iter`` (Useful for large-scale tensors).
(default 'full')
conv : float
Convergence tolerance on difference of fit between iterations
(default 1e-5)
Returns
-------
P : ktensor
Rank ``rank`` factorization of X. ``P.U[i]`` corresponds to the factor
matrix for the i-th mode. ``P.lambda[i]`` corresponds to the weight
of the i-th mode.
fit : float
Fit of the factorization compared to ``X``
itr : int
Number of iterations that were needed until convergence
exectimes : ndarray of floats
Time needed for each single iteration
Examples
--------
Create random dense tensor
>>> from sktensor import dtensor
>>> U = [np.random.rand(i,3) for i in (20, 10, 14)]
>>> T = dtensor(ktensor(U).toarray())
Compute rank-3 CP decomposition of ``T`` with ALS
>>> P, fit, itr, _ = als(T, 3)
Result is a decomposed tensor stored as a Kruskal operator
>>> type(P)
<class 'sktensor.ktensor.ktensor'>
Factorization should be close to original data
>>> np.allclose(T, P.totensor())
True
References
----------
.. [1] Kolda, T. G. & Bader, B. W.
Tensor Decompositions and Applications.
SIAM Rev. 51, 455–500 (2009).
.. [2] Harshman, R. A.
Foundations of the PARAFAC procedure: models and conditions for an 'explanatory' multimodal factor analysis.
UCLA Working Papers in Phonetics 16, (1970).
.. [3] Carroll, J. D., Chang, J. J.
Analysis of individual differences in multidimensional scaling via an N-way generalization of 'Eckart-Young' decomposition.
Psychometrika 35, 283–319 (1970).
"""
# init options
ainit = kwargs.pop('init', _DEF_INIT)
maxiter = kwargs.pop('max_iter', _DEF_MAXITER)
fit_method = kwargs.pop('fit_method', _DEF_FIT_METHOD)
conv = kwargs.pop('conv', _DEF_CONV)
dtype = kwargs.pop('dtype', _DEF_TYPE)
if not len(kwargs) == 0:
raise ValueError('Unknown keywords (%s)' % (kwargs.keys()))
N = X.ndim
normX = norm(X)
U = _init(ainit, X, N, rank, dtype)
fit = 0
exectimes = []
for itr in range(maxiter):
tic = time.clock()
fitold = fit
for n in range(N):
Unew = X.uttkrp(U, n)
Y = ones((rank, rank), dtype=dtype)
for i in (range(n) + range(n + 1, N)):
Y = Y * dot(U[i].T, U[i])
Unew = Unew.dot(pinv(Y))
# Normalize
if itr == 0:
lmbda = sqrt((Unew ** 2).sum(axis=0))
else:
lmbda = Unew.max(axis=0)
lmbda[lmbda < 1] = 1
U[n] = Unew / lmbda
P = ktensor(U, lmbda)
if fit_method == 'full':
normresidual = normX ** 2 + P.norm() ** 2 - 2 * P.innerprod(X)
fit = 1 - (normresidual / normX ** 2)
else:
fit = itr
fitchange = abs(fitold - fit)
exectimes.append(time.clock() - tic)
_log.debug(
'[%3d] fit: %.5f | delta: %7.1e | secs: %.5f' %
(itr, fit, fitchange, exectimes[-1])
)
if itr > 0 and fitchange < conv:
break
return P, fit, itr, array(exectimes)
def opt(X, rank, **kwargs):
ainit = kwargs.pop('init', _DEF_INIT)
maxiter = kwargs.pop('maxIter', _DEF_MAXITER)
conv = kwargs.pop('conv', _DEF_CONV)
dtype = kwargs.pop('dtype', _DEF_TYPE)
if not len(kwargs) == 0:
raise ValueError('Unknown keywords (%s)' % (kwargs.keys()))
N = X.ndim
U = _init(ainit, X, N, rank, dtype)
def wopt(X, rank, **kwargs):
raise NotImplementedError()
def _init(init, X, N, rank, dtype):
"""
Initialization for CP models
"""
Uinit = [None for _ in range(N)]
if isinstance(init, list):
Uinit = init
elif init == 'random':
for n in range(1, N):
Uinit[n] = array(rand(X.shape[n], rank), dtype=dtype)
elif init == 'nvecs':
for n in range(1, N):
Uinit[n] = array(nvecs(X, n, rank), dtype=dtype)
else:
raise 'Unknown option (init=%s)' % str(init)
return Uinit
# vim: set et:
|
PypiClean
|
/django_skote-0.0.9-py3-none-any.whl/django_skote/static/django_skote/js/pages/echarts.init.js
|
function getChartColorsArray(e){if(null!==document.getElementById(e)){var t=document.getElementById(e).getAttribute("data-colors");if(t)return(t=JSON.parse(t)).map(function(e){var t=e.replace(" ","");return-1===t.indexOf(",")?getComputedStyle(document.documentElement).getPropertyValue(t)||t:2==(e=e.split(",")).length?"rgba("+getComputedStyle(document.documentElement).getPropertyValue(e[0])+","+e[1]+")":t});console.warn("data-colors Attribute not found on:",e)}}var data,dom,myChart,app,lineChartColors=getChartColorsArray("line-chart"),mixLineChartColors=(lineChartColors&&(dom=document.getElementById("line-chart"),myChart=echarts.init(dom),app={},option=null,(option={grid:{zlevel:0,x:50,x2:50,y:30,y2:30,borderWidth:0,backgroundColor:"rgba(0,0,0,0)",borderColor:"rgba(0,0,0,0)"},xAxis:{type:"category",data:["Mon","Tue","Wed","Thu","Fri","Sat","Sun"],axisLine:{lineStyle:{color:"#8791af"}}},yAxis:{type:"value",axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}}},series:[{data:[820,932,901,934,1290,1330,1320],type:"line"}],color:lineChartColors})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("mix-line-bar")),doughnutChartColors=(mixLineChartColors&&(dom=document.getElementById("mix-line-bar"),myChart=echarts.init(dom),option=null,(option={grid:{zlevel:0,x:80,x2:50,y:30,y2:30,borderWidth:0,backgroundColor:"rgba(0,0,0,0)",borderColor:"rgba(0,0,0,0)"},tooltip:{trigger:"axis",axisPointer:{type:"cross",crossStyle:{color:"#999"}}},toolbox:{orient:"center",left:0,top:20,feature:{dataView:{readOnly:!(app={title:"Data view"}),title:"Data View"},magicType:{type:["line","bar"],title:{line:"For line chart",bar:"For bar chart"}},restore:{title:"restore"},saveAsImage:{title:"Download Image"}}},color:mixLineChartColors,legend:{data:["Evaporation","Precipitation","Average temperature"],textStyle:{color:"#8791af"}},xAxis:[{type:"category",data:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug"],axisPointer:{type:"shadow"},axisLine:{lineStyle:{color:"#8791af"}}}],yAxis:[{type:"value",name:"Water volume",min:0,max:250,interval:50,axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}},axisLabel:{formatter:"{value} ml"}},{type:"value",name:"Temperature",min:0,max:25,interval:5,axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}},axisLabel:{formatter:"{value} °C"}}],series:[{name:"Evaporation",type:"bar",data:[2,4.9,7,23.2,25.6,76.7,135.6,162.2]},{name:"Precipitation",type:"bar",data:[2.6,5.9,9,26.4,28.7,70.7,175.6,182.2]},{name:"Average Temperature",type:"line",yAxisIndex:1,data:[2,2.2,3.3,4.5,6.3,10.2,20.3,23.4]}]})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("doughnut-chart")),pieChartColors=(doughnutChartColors&&(dom=document.getElementById("doughnut-chart"),myChart=echarts.init(dom),(option={tooltip:{trigger:"item",formatter:"{a} <br/>{b}: {c} ({d}%)"},legend:{orient:"vertical",x:"left",data:["Laptop","Tablet","Mobile","Others","Desktop"],textStyle:{color:"#8791af"}},color:doughnutChartColors,series:[{name:"Total sales",type:"pie",radius:["50%","70%"],avoidLabelOverlap:!(app={}),label:{normal:{show:!1,position:"center"},emphasis:{show:!(option=null),textStyle:{fontSize:"30",fontWeight:"bold"}}},labelLine:{normal:{show:!1}},data:[{value:335,name:"Laptop"},{value:310,name:"Tablet"},{value:234,name:"Mobile"},{value:135,name:"Others"},{value:1548,name:"Desktop"}]}]})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("pie-chart")),scatterChartColors=(pieChartColors&&(dom=document.getElementById("pie-chart"),myChart=echarts.init(dom),app={},option=null,(option={tooltip:{trigger:"item",formatter:"{a} <br/>{b} : {c} ({d}%)"},legend:{orient:"vertical",left:"left",data:["Laptop","Tablet","Mobile","Others","Desktop"],textStyle:{color:"#8791af"}},color:pieChartColors,series:[{name:"Total sales",type:"pie",radius:"55%",center:["50%","60%"],data:[{value:335,name:"Laptop"},{value:310,name:"Tablet"},{value:234,name:"Mobile"},{value:135,name:"Others"},{value:1548,name:"Desktop"}],itemStyle:{emphasis:{shadowBlur:10,shadowOffsetX:0,shadowColor:"rgba(0, 0, 0, 0.5)"}}}]})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("scatter-chart")),bubbleChartColors=(scatterChartColors&&(dom=document.getElementById("scatter-chart"),myChart=echarts.init(dom),app={},option=null,(option={grid:{zlevel:0,x:50,x2:50,y:30,y2:30,borderWidth:0,backgroundColor:"rgba(0,0,0,0)",borderColor:"rgba(0,0,0,0)"},xAxis:{axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}}},yAxis:{axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}}},series:[{symbolSize:10,data:[[10,8.04],[8,6.95],[13,7.58],[9,8.81],[11,8.33],[14,9.96],[6,7.24],[4,4.26],[12,10.84],[7,4.82],[5,5.68]],type:"scatter"}],color:scatterChartColors})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("bubble-chart")),candleStickChartColors=(bubbleChartColors&&(dom=document.getElementById("bubble-chart"),myChart=echarts.init(dom),app={},(option={grid:{zlevel:0,x:50,x2:50,y:30,y2:30,borderWidth:0,backgroundColor:"rgba(0,0,0,0)",borderColor:"rgba(0,0,0,0)"},legend:{right:10,data:["2018","2019"]},xAxis:{axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{type:"dashed",color:"rgba(166, 176, 207, 0.1)"}}},yAxis:{axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{type:"dashed",color:"rgba(166, 176, 207, 0.1)"}},scale:!(option=null)},series:[{name:"2018",data:(data=[[[28604,77,17096869,"Australia",1990],[31163,77.4,27662440,"Canada",1990],[1516,68,1154605773,"China",1990],[13670,74.7,10582082,"Cuba",1990],[28599,75,4986705,"Finland",1990],[29476,77.1,56943299,"France",1990],[31476,75.4,78958237,"Germany",1990],[28666,78.1,254830,"Iceland",1990],[1777,57.7,870601776,"India",1990],[29550,79.1,122249285,"Japan",1990],[2076,67.9,20194354,"North Korea",1990],[12087,72,42972254,"South Korea",1990],[24021,75.4,3397534,"New Zealand",1990],[43296,76.8,4240375,"Norway",1990],[10088,70.8,38195258,"Poland",1990],[19349,69.6,147568552,"Russia",1990],[10670,67.3,53994605,"Turkey",1990],[26424,75.7,57110117,"United Kingdom",1990],[37062,75.4,252847810,"United States",1990]],[[44056,81.8,23968973,"Australia",2015],[43294,81.7,35939927,"Canada",2015],[13334,76.9,1376048943,"China",2015],[21291,78.5,11389562,"Cuba",2015],[38923,80.8,5503457,"Finland",2015],[37599,81.9,64395345,"France",2015],[44053,81.1,80688545,"Germany",2015],[42182,82.8,329425,"Iceland",2015],[5903,66.8,1311050527,"India",2015],[36162,83.5,126573481,"Japan",2015],[1390,71.4,25155317,"North Korea",2015],[34644,80.7,50293439,"South Korea",2015],[34186,80.6,4528526,"New Zealand",2015],[64304,81.6,5210967,"Norway",2015],[24787,77.3,38611794,"Poland",2015],[23038,73.13,143456918,"Russia",2015],[19360,76.5,78665830,"Turkey",2015],[38225,81.4,64715810,"United Kingdom",2015],[53354,79.1,321773631,"United States",2015]]])[0],type:"scatter",symbolSize:function(e){return Math.sqrt(e[2])/500},label:{emphasis:{show:!0,formatter:function(e){return e.data[3]},position:"top"}},itemStyle:{normal:{shadowBlur:10,shadowColor:"rgba("+bubbleChartColors[0]+", 0.5)",shadowOffsetY:5,color:new echarts.graphic.RadialGradient(.4,.3,1,[{offset:0,color:"rgba("+bubbleChartColors[0]+", 0.1)"},{offset:1,color:"rgb("+bubbleChartColors[0]+")"}])}}},{name:"2019",data:data[1],type:"scatter",symbolSize:function(e){return Math.sqrt(e[2])/500},label:{emphasis:{show:!0,formatter:function(e){return e.data[3]},position:"top"}},itemStyle:{normal:{shadowBlur:10,shadowColor:"rgba("+bubbleChartColors[1]+", 0.5)",shadowOffsetY:5,color:new echarts.graphic.RadialGradient(.4,.3,1,[{offset:0,color:"rgba("+bubbleChartColors[1]+", 0.1)"},{offset:1,color:"rgb("+bubbleChartColors[1]+")"}])}}}]})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("candlestick-chart")),gaugeChartColors=(candleStickChartColors&&(dom=document.getElementById("candlestick-chart"),myChart=echarts.init(dom),app={},option=null,(option={grid:{zlevel:0,x:50,x2:50,y:30,y2:30,borderWidth:0,backgroundColor:"rgba(0,0,0,0)",borderColor:"rgba(0,0,0,0)"},xAxis:{data:["2017-10-24","2017-10-25","2017-10-26","2017-10-27"],axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}}},yAxis:{axisLine:{lineStyle:{color:"#8791af"}},splitLine:{lineStyle:{color:"rgba(166, 176, 207, 0.1)"}}},series:[{type:"k",data:[[20,30,10,35],[40,35,30,55],[33,38,33,40],[40,40,32,42]],itemStyle:{normal:{color:candleStickChartColors[0],color0:candleStickChartColors[1],borderColor:candleStickChartColors[0],borderColor0:candleStickChartColors[1]}}}]})&&"object"==typeof option&&myChart.setOption(option,!0)),getChartColorsArray("gauge-chart"));gaugeChartColors&&(dom=document.getElementById("gauge-chart"),myChart=echarts.init(dom),app={},option=null,option={tooltip:{formatter:"{a} <br/>{b} : {c}%"},toolbox:{feature:{restore:{title:"Refresh"},saveAsImage:{title:"Download Image"}}},series:[{name:"Business indicator",type:"gauge",detail:{formatter:"{value}%"},axisLine:{lineStyle:{color:[[.2,gaugeChartColors[0]],[.8,gaugeChartColors[1]],[1,gaugeChartColors[2]]],width:20}},data:[{value:50,name:"Completion rate"}]}]},setInterval(function(){option.series[0].data[0].value=+(100*Math.random()).toFixed(2),myChart.setOption(option,!0)},2e3),option&&"object"==typeof option&&myChart.setOption(option,!0));
|
PypiClean
|
/djbs-0.1.4.tar.gz/djbs-0.1.4/django/contrib/flatpages/migrations/0001_initial.py
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("sites", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="FlatPage",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"url",
models.CharField(max_length=100, verbose_name="URL", db_index=True),
),
("title", models.CharField(max_length=200, verbose_name="title")),
("content", models.TextField(verbose_name="content", blank=True)),
(
"enable_comments",
models.BooleanField(default=False, verbose_name="enable comments"),
),
(
"template_name",
models.CharField(
help_text=(
"Example: “flatpages/contact_page.html”. If this isn’t "
"provided, the system will use “flatpages/default.html”."
),
max_length=70,
verbose_name="template name",
blank=True,
),
),
(
"registration_required",
models.BooleanField(
default=False,
help_text=(
"If this is checked, only logged-in users will be able to "
"view the page."
),
verbose_name="registration required",
),
),
(
"sites",
models.ManyToManyField(to="sites.Site", verbose_name="sites"),
),
],
options={
"ordering": ["url"],
"db_table": "django_flatpage",
"verbose_name": "flat page",
"verbose_name_plural": "flat pages",
},
bases=(models.Model,),
),
]
|
PypiClean
|
/ase_core-3.23.0b1.post2-py3-none-any.whl/ase/calculators/kim/kimmodel.py
|
import numpy as np
from ase.calculators.calculator import Calculator
from ase.calculators.calculator import compare_atoms
from . import kimpy_wrappers
from . import neighborlist
class KIMModelData:
"""Initializes and subsequently stores the KIM API Portable Model
object, KIM API ComputeArguments object, and the neighbor list
object used by instances of KIMModelCalculator. Also stores the
arrays which are registered in the KIM API and which are used to
communicate with the model.
"""
def __init__(self, model_name, ase_neigh, neigh_skin_ratio, debug=False):
self.model_name = model_name
self.ase_neigh = ase_neigh
self.debug = debug
# Initialize KIM API Portable Model object and ComputeArguments object
self.kim_model, self.compute_args = self._init_kim()
self.species_map = self._create_species_map()
# Ask model to provide information relevant for neighbor list
# construction
(
model_influence_dist,
model_cutoffs,
padding_not_require_neigh,
) = self.get_model_neighbor_list_parameters()
# Initialize neighbor list object
self.neigh = self._init_neigh(
neigh_skin_ratio,
model_influence_dist,
model_cutoffs,
padding_not_require_neigh,
)
def _init_kim(self):
"""Create the KIM API Portable Model object and KIM API ComputeArguments
object
"""
if self.kim_initialized:
return
kim_model = kimpy_wrappers.PortableModel(self.model_name, self.debug)
# KIM API model object is what actually creates/destroys the
# ComputeArguments object, so we must pass it as a parameter
compute_args = kim_model.compute_arguments_create()
return kim_model, compute_args
def _init_neigh(
self,
neigh_skin_ratio,
model_influence_dist,
model_cutoffs,
padding_not_require_neigh,
):
"""Initialize neighbor list, either an ASE-native neighborlist
or one created using the neighlist module in kimpy
"""
neigh_list_object_type = (
neighborlist.ASENeighborList
if self.ase_neigh
else neighborlist.KimpyNeighborList
)
return neigh_list_object_type(
self.compute_args,
neigh_skin_ratio,
model_influence_dist,
model_cutoffs,
padding_not_require_neigh,
self.debug,
)
def get_model_neighbor_list_parameters(self):
model_influence_dist = self.kim_model.get_influence_distance()
(
model_cutoffs,
padding_not_require_neigh,
) = self.kim_model.get_neighbor_list_cutoffs_and_hints()
return model_influence_dist, model_cutoffs, padding_not_require_neigh
def update_compute_args_pointers(self, energy, forces):
self.compute_args.update(
self.num_particles,
self.species_code,
self._particle_contributing,
self.coords,
energy,
forces,
)
def _create_species_map(self):
"""Get all the supported species of the KIM model and the
corresponding integer codes used by the model
Returns
-------
species_map : dict
key : str
chemical symbols (e.g. "Ar")
value : int
species integer code (e.g. 1)
"""
supported_species, codes = self._get_model_supported_species_and_codes()
species_map = dict()
for i, spec in enumerate(supported_species):
species_map[spec] = codes[i]
if self.debug:
print(
"Species {} is supported and its code is: {}".format(
spec, codes[i])
)
return species_map
@property
def padding_image_of(self):
return self.neigh.padding_image_of
@property
def num_particles(self):
return self.neigh.num_particles
@property
def coords(self):
return self.neigh.coords
@property
def _particle_contributing(self):
return self.neigh.particle_contributing
@property
def species_code(self):
return self.neigh.species_code
@property
def kim_initialized(self):
return hasattr(self, "kim_model")
@property
def _neigh_initialized(self):
return hasattr(self, "neigh")
@property
def _get_model_supported_species_and_codes(self):
return self.kim_model.get_model_supported_species_and_codes
class KIMModelCalculator(Calculator):
"""Calculator that works with KIM Portable Models (PMs).
Calculator that carries out direct communication between ASE and a
KIM Portable Model (PM) through the kimpy library (which provides a
set of python bindings to the KIM API).
Parameters
----------
model_name : str
The unique identifier assigned to the interatomic model (for
details, see https://openkim.org/doc/schema/kim-ids)
ase_neigh : bool, optional
False (default): Use kimpy's neighbor list library
True: Use ASE's internal neighbor list mechanism (usually slower
than the kimpy neighlist library)
neigh_skin_ratio : float, optional
Used to determine the neighbor list cutoff distance, r_neigh,
through the relation r_neigh = (1 + neigh_skin_ratio) * rcut,
where rcut is the model's influence distance. (Default: 0.2)
release_GIL : bool, optional
Whether to release python GIL. Releasing the GIL allows a KIM
model to run with multiple concurrent threads. (Default: False)
debug : bool, optional
If True, detailed information is printed to stdout. (Default:
False)
"""
implemented_properties = ["energy", "free_energy", "forces", "stress"]
ignored_changes = {"initial_charges", "initial_magmoms"}
def __init__(
self,
model_name,
ase_neigh=False,
neigh_skin_ratio=0.2,
release_GIL=False,
debug=False,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
self.model_name = model_name
self.release_GIL = release_GIL
self.debug = debug
if neigh_skin_ratio < 0:
raise ValueError('Argument "neigh_skin_ratio" must be non-negative')
self.neigh_skin_ratio = neigh_skin_ratio
# Model output
self.energy = None
self.forces = None
# Create KIMModelData object. This will take care of creating
# and storing the KIM API Portable Model object, KIM API
# ComputeArguments object, and the neighbor list object that
# our calculator needs
self._kimmodeldata = KIMModelData(
self.model_name, ase_neigh, self.neigh_skin_ratio, self.debug
)
self._parameters_changed = False
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
pass
def __repr__(self):
return "KIMModelCalculator(model_name={})".format(self.model_name)
def calculate(
self,
atoms=None,
properties=["energy", "forces", "stress"],
system_changes=["positions", "numbers", "cell", "pbc"],
):
"""
Inherited method from the ase Calculator class that is called by
get_property()
Parameters
----------
atoms : Atoms
Atoms object whose properties are desired
properties : list of str
List of what needs to be calculated. Can be any combination
of 'energy', 'forces' and 'stress'.
system_changes : list of str
List of what has changed since last calculation. Can be any
combination of these six: 'positions', 'numbers', 'cell',
and 'pbc'.
"""
super().calculate(atoms, properties, system_changes)
if self._parameters_changed:
self._parameters_changed = False
if system_changes:
# Ask model to update all of its parameters and the parameters
# related to the neighbor list(s). This update is necessary to do
# here since the user will generally have made changes the model
# parameters since the last time an update was performed and we
# need to ensure that any properties calculated here are made using
# the up-to-date model and neighbor list parameters.
self._model_refresh_and_update_neighbor_list_parameters()
if self._need_neigh_update(atoms, system_changes):
self._update_neigh(atoms, self._species_map)
self.energy = np.array([0.0], dtype=kimpy_wrappers.c_double)
self.forces = np.zeros(
[self._num_particles[0], 3], dtype=kimpy_wrappers.c_double
)
self._update_compute_args_pointers(self.energy, self.forces)
else:
self._update_kim_coords(atoms)
self._kim_model.compute(self._compute_args, self.release_GIL)
energy = self.energy[0]
forces = self._assemble_padding_forces()
try:
volume = atoms.get_volume()
stress = self._compute_virial_stress(
self.forces, self._coords, volume)
except ValueError: # Volume cannot be computed
stress = None
# Quantities passed back to ASE
self.results["energy"] = energy
self.results["free_energy"] = energy
self.results["forces"] = forces
self.results["stress"] = stress
def check_state(self, atoms, tol=1e-15):
# Check for change in atomic configuration (positions or pbc)
system_changes = compare_atoms(
self.atoms, atoms, excluded_properties=self.ignored_changes
)
# Check if model parameters were changed
if self._parameters_changed:
system_changes.append("calculator")
return system_changes
def _assemble_padding_forces(self):
"""
Assemble forces on padding atoms back to contributing atoms.
Parameters
----------
forces : 2D array of doubles
Forces on both contributing and padding atoms
num_contrib: int
Number of contributing atoms
padding_image_of : 1D array of int
Atom number, of which the padding atom is an image
Returns
-------
Total forces on contributing atoms.
"""
total_forces = np.array(self.forces[:self._num_contributing_particles])
if self._padding_image_of.size != 0:
pad_forces = self.forces[self._num_contributing_particles:]
for f, org_index in zip(pad_forces, self._padding_image_of):
total_forces[org_index] += f
return total_forces
@staticmethod
def _compute_virial_stress(forces, coords, volume):
"""Compute the virial stress in Voigt notation.
Parameters
----------
forces : 2D array
Partial forces on all atoms (padding included)
coords : 2D array
Coordinates of all atoms (padding included)
volume : float
Volume of cell
Returns
-------
stress : 1D array
stress in Voigt order (xx, yy, zz, yz, xz, xy)
"""
stress = np.zeros(6)
stress[0] = -np.dot(forces[:, 0], coords[:, 0]) / volume
stress[1] = -np.dot(forces[:, 1], coords[:, 1]) / volume
stress[2] = -np.dot(forces[:, 2], coords[:, 2]) / volume
stress[3] = -np.dot(forces[:, 1], coords[:, 2]) / volume
stress[4] = -np.dot(forces[:, 0], coords[:, 2]) / volume
stress[5] = -np.dot(forces[:, 0], coords[:, 1]) / volume
return stress
@property
def _update_compute_args_pointers(self):
return self._kimmodeldata.update_compute_args_pointers
@property
def _kim_model(self):
return self._kimmodeldata.kim_model
@property
def _compute_args(self):
return self._kimmodeldata.compute_args
@property
def _num_particles(self):
return self._kimmodeldata.num_particles
@property
def _coords(self):
return self._kimmodeldata.coords
@property
def _padding_image_of(self):
return self._kimmodeldata.padding_image_of
@property
def _species_map(self):
return self._kimmodeldata.species_map
@property
def _neigh(self):
# WARNING: This property is underscored for a reason! The
# neighborlist(s) itself (themselves) may not be up to date with
# respect to changes that have been made to the model's parameters, or
# even since the positions in the Atoms object may have changed.
# Neighbor lists are only potentially updated inside the ``calculate``
# method.
return self._kimmodeldata.neigh
@property
def _num_contributing_particles(self):
return self._neigh.num_contributing_particles
@property
def _update_kim_coords(self):
return self._neigh.update_kim_coords
@property
def _need_neigh_update(self):
return self._neigh.need_neigh_update
@property
def _update_neigh(self):
return self._neigh.update
@property
def parameters_metadata(self):
return self._kim_model.parameters_metadata
@property
def parameter_names(self):
return self._kim_model.parameter_names
@property
def get_parameters(self):
# Ask model to update all of its parameters and the parameters related
# to the neighbor list(s). This update is necessary to do here since
# the user will generally have made changes the model parameters since
# the last time an update was performed and we need to ensure the
# parameters returned by this method are fully up to date.
self._model_refresh_and_update_neighbor_list_parameters()
return self._kim_model.get_parameters
def set_parameters(self, **kwargs):
parameters = self._kim_model.set_parameters(**kwargs)
self._parameters_changed = True
return parameters
def _model_refresh_and_update_neighbor_list_parameters(self):
"""
Call the model's refresh routine and update the neighbor list object
for any necessary changes arising from changes to the model parameters,
e.g. a change in one of its cutoffs. After a model's parameters have
been changed, this method *must* be called before calling the model's
compute routine.
"""
self._kim_model.clear_then_refresh()
# Update neighbor list parameters
(
model_influence_dist,
model_cutoffs,
padding_not_require_neigh,
) = self._kimmodeldata.get_model_neighbor_list_parameters()
self._neigh.set_neigh_parameters(
self.neigh_skin_ratio,
model_influence_dist,
model_cutoffs,
padding_not_require_neigh,
)
|
PypiClean
|
/auto_augmentation-0.3.tar.gz/auto_augmentation-0.3/src/autoaug/child_networks/bad_lenet.py
|
import torch.nn as nn
# class Bad_LeNet(nn.Module):
# # 1. I reduced the channel sizes of the convolutional layers
# # 2. I reduced the number of fully ocnnected layers from 3 to 2
# #
# # no. of weights: 25*2 + 25*2*4 + 16*4*10 = 250+640 = 890
# def __init__(self):
# super().__init__()
# self.conv1 = nn.Conv2d(1, 2, 5)
# self.relu1 = nn.ReLU()
# self.pool1 = nn.MaxPool2d(2)
# self.conv2 = nn.Conv2d(2, 4, 5)
# self.relu2 = nn.ReLU()
# self.pool2 = nn.MaxPool2d(2)
# self.fc1 = nn.Linear(16*4, 10)
# self.relu3 = nn.ReLU()
# def forward(self, x):
# y = self.conv1(x)
# y = self.relu1(y)
# y = self.pool1(y)
# y = self.conv2(y)
# y = self.relu2(y)
# y = self.pool2(y)
# y = y.view(y.shape[0], -1)
# y = self.fc1(y)
# y = self.relu3(y)
# return y
class Bad_LeNet(nn.Module):
# 1. I reduced the channel sizes of the convolutional layers
# 2. I reduced the number of fully connected layers from 3 to 2
#
# no. of weights: 25*2 + 25*2*3 + 4*3*10 = 50+150+120 = 320
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(1, 2, 5)
self.relu1 = nn.ReLU()
self.pool1 = nn.MaxPool2d(2)
self.conv2 = nn.Conv2d(2, 3, 5)
self.relu2 = nn.ReLU()
self.pool2 = nn.MaxPool2d(4)
self.fc1 = nn.Linear(4*3, 10)
self.relu3 = nn.ReLU()
# self.fc2 = nn.Linear(20, 14)
# self.relu4 = nn.ReLU()
# self.fc3 = nn.Linear(14, 10)
# self.relu5 = nn.ReLU()
def forward(self, x):
y = self.conv1(x)
y = self.relu1(y)
y = self.pool1(y)
y = self.conv2(y)
y = self.relu2(y)
y = self.pool2(y)
y = y.view(y.shape[0], -1)
y = self.fc1(y)
y = self.relu3(y)
# y = self.fc2(y)
# y = self.relu4(y)
# y = self.fc3(y)
# y = self.relu5(y)
return y
def bad_lenet():
model = Bad_LeNet()
return model
|
PypiClean
|
/django-bootstrap-customizer-0.2.0.tar.gz/django-bootstrap-customizer-0.2.0/bootstrap_customizer/static/bootstrap/dist/js/bootstrap.js
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('jquery'), require('popper.js')) :
typeof define === 'function' && define.amd ? define(['exports', 'jquery', 'popper.js'], factory) :
(global = global || self, factory(global.bootstrap = {}, global.jQuery, global.Popper));
}(this, function (exports, $, Popper) { 'use strict';
$ = $ && $.hasOwnProperty('default') ? $['default'] : $;
Popper = Popper && Popper.hasOwnProperty('default') ? Popper['default'] : Popper;
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _objectSpread(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
var ownKeys = Object.keys(source);
if (typeof Object.getOwnPropertySymbols === 'function') {
ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) {
return Object.getOwnPropertyDescriptor(source, sym).enumerable;
}));
}
ownKeys.forEach(function (key) {
_defineProperty(target, key, source[key]);
});
}
return target;
}
function _inheritsLoose(subClass, superClass) {
subClass.prototype = Object.create(superClass.prototype);
subClass.prototype.constructor = subClass;
subClass.__proto__ = superClass;
}
/**
* --------------------------------------------------------------------------
* Bootstrap (v4.3.1): util.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* --------------------------------------------------------------------------
*/
/**
* ------------------------------------------------------------------------
* Private TransitionEnd Helpers
* ------------------------------------------------------------------------
*/
var TRANSITION_END = 'transitionend';
var MAX_UID = 1000000;
var MILLISECONDS_MULTIPLIER = 1000; // Shoutout AngusCroll (https://goo.gl/pxwQGp)
function toType(obj) {
return {}.toString.call(obj).match(/\s([a-z]+)/i)[1].toLowerCase();
}
function getSpecialTransitionEndEvent() {
return {
bindType: TRANSITION_END,
delegateType: TRANSITION_END,
handle: function handle(event) {
if ($(event.target).is(this)) {
return event.handleObj.handler.apply(this, arguments); // eslint-disable-line prefer-rest-params
}
return undefined; // eslint-disable-line no-undefined
}
};
}
function transitionEndEmulator(duration) {
var _this = this;
var called = false;
$(this).one(Util.TRANSITION_END, function () {
called = true;
});
setTimeout(function () {
if (!called) {
Util.triggerTransitionEnd(_this);
}
}, duration);
return this;
}
function setTransitionEndSupport() {
$.fn.emulateTransitionEnd = transitionEndEmulator;
$.event.special[Util.TRANSITION_END] = getSpecialTransitionEndEvent();
}
/**
* --------------------------------------------------------------------------
* Public Util Api
* --------------------------------------------------------------------------
*/
var Util = {
TRANSITION_END: 'bsTransitionEnd',
getUID: function getUID(prefix) {
do {
// eslint-disable-next-line no-bitwise
prefix += ~~(Math.random() * MAX_UID); // "~~" acts like a faster Math.floor() here
} while (document.getElementById(prefix));
return prefix;
},
getSelectorFromElement: function getSelectorFromElement(element) {
var selector = element.getAttribute('data-target');
if (!selector || selector === '#') {
var hrefAttr = element.getAttribute('href');
selector = hrefAttr && hrefAttr !== '#' ? hrefAttr.trim() : '';
}
try {
return document.querySelector(selector) ? selector : null;
} catch (err) {
return null;
}
},
getTransitionDurationFromElement: function getTransitionDurationFromElement(element) {
if (!element) {
return 0;
} // Get transition-duration of the element
var transitionDuration = $(element).css('transition-duration');
var transitionDelay = $(element).css('transition-delay');
var floatTransitionDuration = parseFloat(transitionDuration);
var floatTransitionDelay = parseFloat(transitionDelay); // Return 0 if element or transition duration is not found
if (!floatTransitionDuration && !floatTransitionDelay) {
return 0;
} // If multiple durations are defined, take the first
transitionDuration = transitionDuration.split(',')[0];
transitionDelay = transitionDelay.split(',')[0];
return (parseFloat(transitionDuration) + parseFloat(transitionDelay)) * MILLISECONDS_MULTIPLIER;
},
reflow: function reflow(element) {
return element.offsetHeight;
},
triggerTransitionEnd: function triggerTransitionEnd(element) {
$(element).trigger(TRANSITION_END);
},
// TODO: Remove in v5
supportsTransitionEnd: function supportsTransitionEnd() {
return Boolean(TRANSITION_END);
},
isElement: function isElement(obj) {
return (obj[0] || obj).nodeType;
},
typeCheckConfig: function typeCheckConfig(componentName, config, configTypes) {
for (var property in configTypes) {
if (Object.prototype.hasOwnProperty.call(configTypes, property)) {
var expectedTypes = configTypes[property];
var value = config[property];
var valueType = value && Util.isElement(value) ? 'element' : toType(value);
if (!new RegExp(expectedTypes).test(valueType)) {
throw new Error(componentName.toUpperCase() + ": " + ("Option \"" + property + "\" provided type \"" + valueType + "\" ") + ("but expected type \"" + expectedTypes + "\"."));
}
}
}
},
findShadowRoot: function findShadowRoot(element) {
if (!document.documentElement.attachShadow) {
return null;
} // Can find the shadow root otherwise it'll return the document
if (typeof element.getRootNode === 'function') {
var root = element.getRootNode();
return root instanceof ShadowRoot ? root : null;
}
if (element instanceof ShadowRoot) {
return element;
} // when we don't find a shadow root
if (!element.parentNode) {
return null;
}
return Util.findShadowRoot(element.parentNode);
}
};
setTransitionEndSupport();
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME = 'alert';
var VERSION = '4.3.1';
var DATA_KEY = 'bs.alert';
var EVENT_KEY = "." + DATA_KEY;
var DATA_API_KEY = '.data-api';
var JQUERY_NO_CONFLICT = $.fn[NAME];
var Selector = {
DISMISS: '[data-dismiss="alert"]'
};
var Event = {
CLOSE: "close" + EVENT_KEY,
CLOSED: "closed" + EVENT_KEY,
CLICK_DATA_API: "click" + EVENT_KEY + DATA_API_KEY
};
var ClassName = {
ALERT: 'alert',
FADE: 'fade',
SHOW: 'show'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Alert =
/*#__PURE__*/
function () {
function Alert(element) {
this._element = element;
} // Getters
var _proto = Alert.prototype;
// Public
_proto.close = function close(element) {
var rootElement = this._element;
if (element) {
rootElement = this._getRootElement(element);
}
var customEvent = this._triggerCloseEvent(rootElement);
if (customEvent.isDefaultPrevented()) {
return;
}
this._removeElement(rootElement);
};
_proto.dispose = function dispose() {
$.removeData(this._element, DATA_KEY);
this._element = null;
} // Private
;
_proto._getRootElement = function _getRootElement(element) {
var selector = Util.getSelectorFromElement(element);
var parent = false;
if (selector) {
parent = document.querySelector(selector);
}
if (!parent) {
parent = $(element).closest("." + ClassName.ALERT)[0];
}
return parent;
};
_proto._triggerCloseEvent = function _triggerCloseEvent(element) {
var closeEvent = $.Event(Event.CLOSE);
$(element).trigger(closeEvent);
return closeEvent;
};
_proto._removeElement = function _removeElement(element) {
var _this = this;
$(element).removeClass(ClassName.SHOW);
if (!$(element).hasClass(ClassName.FADE)) {
this._destroyElement(element);
return;
}
var transitionDuration = Util.getTransitionDurationFromElement(element);
$(element).one(Util.TRANSITION_END, function (event) {
return _this._destroyElement(element, event);
}).emulateTransitionEnd(transitionDuration);
};
_proto._destroyElement = function _destroyElement(element) {
$(element).detach().trigger(Event.CLOSED).remove();
} // Static
;
Alert._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var $element = $(this);
var data = $element.data(DATA_KEY);
if (!data) {
data = new Alert(this);
$element.data(DATA_KEY, data);
}
if (config === 'close') {
data[config](this);
}
});
};
Alert._handleDismiss = function _handleDismiss(alertInstance) {
return function (event) {
if (event) {
event.preventDefault();
}
alertInstance.close(this);
};
};
_createClass(Alert, null, [{
key: "VERSION",
get: function get() {
return VERSION;
}
}]);
return Alert;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event.CLICK_DATA_API, Selector.DISMISS, Alert._handleDismiss(new Alert()));
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME] = Alert._jQueryInterface;
$.fn[NAME].Constructor = Alert;
$.fn[NAME].noConflict = function () {
$.fn[NAME] = JQUERY_NO_CONFLICT;
return Alert._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$1 = 'button';
var VERSION$1 = '4.3.1';
var DATA_KEY$1 = 'bs.button';
var EVENT_KEY$1 = "." + DATA_KEY$1;
var DATA_API_KEY$1 = '.data-api';
var JQUERY_NO_CONFLICT$1 = $.fn[NAME$1];
var ClassName$1 = {
ACTIVE: 'active',
BUTTON: 'btn',
FOCUS: 'focus'
};
var Selector$1 = {
DATA_TOGGLE_CARROT: '[data-toggle^="button"]',
DATA_TOGGLE: '[data-toggle="buttons"]',
INPUT: 'input:not([type="hidden"])',
ACTIVE: '.active',
BUTTON: '.btn'
};
var Event$1 = {
CLICK_DATA_API: "click" + EVENT_KEY$1 + DATA_API_KEY$1,
FOCUS_BLUR_DATA_API: "focus" + EVENT_KEY$1 + DATA_API_KEY$1 + " " + ("blur" + EVENT_KEY$1 + DATA_API_KEY$1)
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Button =
/*#__PURE__*/
function () {
function Button(element) {
this._element = element;
} // Getters
var _proto = Button.prototype;
// Public
_proto.toggle = function toggle() {
var triggerChangeEvent = true;
var addAriaPressed = true;
var rootElement = $(this._element).closest(Selector$1.DATA_TOGGLE)[0];
if (rootElement) {
var input = this._element.querySelector(Selector$1.INPUT);
if (input) {
if (input.type === 'radio') {
if (input.checked && this._element.classList.contains(ClassName$1.ACTIVE)) {
triggerChangeEvent = false;
} else {
var activeElement = rootElement.querySelector(Selector$1.ACTIVE);
if (activeElement) {
$(activeElement).removeClass(ClassName$1.ACTIVE);
}
}
}
if (triggerChangeEvent) {
if (input.hasAttribute('disabled') || rootElement.hasAttribute('disabled') || input.classList.contains('disabled') || rootElement.classList.contains('disabled')) {
return;
}
input.checked = !this._element.classList.contains(ClassName$1.ACTIVE);
$(input).trigger('change');
}
input.focus();
addAriaPressed = false;
}
}
if (addAriaPressed) {
this._element.setAttribute('aria-pressed', !this._element.classList.contains(ClassName$1.ACTIVE));
}
if (triggerChangeEvent) {
$(this._element).toggleClass(ClassName$1.ACTIVE);
}
};
_proto.dispose = function dispose() {
$.removeData(this._element, DATA_KEY$1);
this._element = null;
} // Static
;
Button._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var data = $(this).data(DATA_KEY$1);
if (!data) {
data = new Button(this);
$(this).data(DATA_KEY$1, data);
}
if (config === 'toggle') {
data[config]();
}
});
};
_createClass(Button, null, [{
key: "VERSION",
get: function get() {
return VERSION$1;
}
}]);
return Button;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event$1.CLICK_DATA_API, Selector$1.DATA_TOGGLE_CARROT, function (event) {
event.preventDefault();
var button = event.target;
if (!$(button).hasClass(ClassName$1.BUTTON)) {
button = $(button).closest(Selector$1.BUTTON);
}
Button._jQueryInterface.call($(button), 'toggle');
}).on(Event$1.FOCUS_BLUR_DATA_API, Selector$1.DATA_TOGGLE_CARROT, function (event) {
var button = $(event.target).closest(Selector$1.BUTTON)[0];
$(button).toggleClass(ClassName$1.FOCUS, /^focus(in)?$/.test(event.type));
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$1] = Button._jQueryInterface;
$.fn[NAME$1].Constructor = Button;
$.fn[NAME$1].noConflict = function () {
$.fn[NAME$1] = JQUERY_NO_CONFLICT$1;
return Button._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$2 = 'carousel';
var VERSION$2 = '4.3.1';
var DATA_KEY$2 = 'bs.carousel';
var EVENT_KEY$2 = "." + DATA_KEY$2;
var DATA_API_KEY$2 = '.data-api';
var JQUERY_NO_CONFLICT$2 = $.fn[NAME$2];
var ARROW_LEFT_KEYCODE = 37; // KeyboardEvent.which value for left arrow key
var ARROW_RIGHT_KEYCODE = 39; // KeyboardEvent.which value for right arrow key
var TOUCHEVENT_COMPAT_WAIT = 500; // Time for mouse compat events to fire after touch
var SWIPE_THRESHOLD = 40;
var Default = {
interval: 5000,
keyboard: true,
slide: false,
pause: 'hover',
wrap: true,
touch: true
};
var DefaultType = {
interval: '(number|boolean)',
keyboard: 'boolean',
slide: '(boolean|string)',
pause: '(string|boolean)',
wrap: 'boolean',
touch: 'boolean'
};
var Direction = {
NEXT: 'next',
PREV: 'prev',
LEFT: 'left',
RIGHT: 'right'
};
var Event$2 = {
SLIDE: "slide" + EVENT_KEY$2,
SLID: "slid" + EVENT_KEY$2,
KEYDOWN: "keydown" + EVENT_KEY$2,
MOUSEENTER: "mouseenter" + EVENT_KEY$2,
MOUSELEAVE: "mouseleave" + EVENT_KEY$2,
TOUCHSTART: "touchstart" + EVENT_KEY$2,
TOUCHMOVE: "touchmove" + EVENT_KEY$2,
TOUCHEND: "touchend" + EVENT_KEY$2,
POINTERDOWN: "pointerdown" + EVENT_KEY$2,
POINTERUP: "pointerup" + EVENT_KEY$2,
DRAG_START: "dragstart" + EVENT_KEY$2,
LOAD_DATA_API: "load" + EVENT_KEY$2 + DATA_API_KEY$2,
CLICK_DATA_API: "click" + EVENT_KEY$2 + DATA_API_KEY$2
};
var ClassName$2 = {
CAROUSEL: 'carousel',
ACTIVE: 'active',
SLIDE: 'slide',
RIGHT: 'carousel-item-right',
LEFT: 'carousel-item-left',
NEXT: 'carousel-item-next',
PREV: 'carousel-item-prev',
ITEM: 'carousel-item',
POINTER_EVENT: 'pointer-event'
};
var Selector$2 = {
ACTIVE: '.active',
ACTIVE_ITEM: '.active.carousel-item',
ITEM: '.carousel-item',
ITEM_IMG: '.carousel-item img',
NEXT_PREV: '.carousel-item-next, .carousel-item-prev',
INDICATORS: '.carousel-indicators',
DATA_SLIDE: '[data-slide], [data-slide-to]',
DATA_RIDE: '[data-ride="carousel"]'
};
var PointerType = {
TOUCH: 'touch',
PEN: 'pen'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Carousel =
/*#__PURE__*/
function () {
function Carousel(element, config) {
this._items = null;
this._interval = null;
this._activeElement = null;
this._isPaused = false;
this._isSliding = false;
this.touchTimeout = null;
this.touchStartX = 0;
this.touchDeltaX = 0;
this._config = this._getConfig(config);
this._element = element;
this._indicatorsElement = this._element.querySelector(Selector$2.INDICATORS);
this._touchSupported = 'ontouchstart' in document.documentElement || navigator.maxTouchPoints > 0;
this._pointerEvent = Boolean(window.PointerEvent || window.MSPointerEvent);
this._addEventListeners();
} // Getters
var _proto = Carousel.prototype;
// Public
_proto.next = function next() {
if (!this._isSliding) {
this._slide(Direction.NEXT);
}
};
_proto.nextWhenVisible = function nextWhenVisible() {
// Don't call next when the page isn't visible
// or the carousel or its parent isn't visible
if (!document.hidden && $(this._element).is(':visible') && $(this._element).css('visibility') !== 'hidden') {
this.next();
}
};
_proto.prev = function prev() {
if (!this._isSliding) {
this._slide(Direction.PREV);
}
};
_proto.pause = function pause(event) {
if (!event) {
this._isPaused = true;
}
if (this._element.querySelector(Selector$2.NEXT_PREV)) {
Util.triggerTransitionEnd(this._element);
this.cycle(true);
}
clearInterval(this._interval);
this._interval = null;
};
_proto.cycle = function cycle(event) {
if (!event) {
this._isPaused = false;
}
if (this._interval) {
clearInterval(this._interval);
this._interval = null;
}
if (this._config.interval && !this._isPaused) {
this._interval = setInterval((document.visibilityState ? this.nextWhenVisible : this.next).bind(this), this._config.interval);
}
};
_proto.to = function to(index) {
var _this = this;
this._activeElement = this._element.querySelector(Selector$2.ACTIVE_ITEM);
var activeIndex = this._getItemIndex(this._activeElement);
if (index > this._items.length - 1 || index < 0) {
return;
}
if (this._isSliding) {
$(this._element).one(Event$2.SLID, function () {
return _this.to(index);
});
return;
}
if (activeIndex === index) {
this.pause();
this.cycle();
return;
}
var direction = index > activeIndex ? Direction.NEXT : Direction.PREV;
this._slide(direction, this._items[index]);
};
_proto.dispose = function dispose() {
$(this._element).off(EVENT_KEY$2);
$.removeData(this._element, DATA_KEY$2);
this._items = null;
this._config = null;
this._element = null;
this._interval = null;
this._isPaused = null;
this._isSliding = null;
this._activeElement = null;
this._indicatorsElement = null;
} // Private
;
_proto._getConfig = function _getConfig(config) {
config = _objectSpread({}, Default, config);
Util.typeCheckConfig(NAME$2, config, DefaultType);
return config;
};
_proto._handleSwipe = function _handleSwipe() {
var absDeltax = Math.abs(this.touchDeltaX);
if (absDeltax <= SWIPE_THRESHOLD) {
return;
}
var direction = absDeltax / this.touchDeltaX; // swipe left
if (direction > 0) {
this.prev();
} // swipe right
if (direction < 0) {
this.next();
}
};
_proto._addEventListeners = function _addEventListeners() {
var _this2 = this;
if (this._config.keyboard) {
$(this._element).on(Event$2.KEYDOWN, function (event) {
return _this2._keydown(event);
});
}
if (this._config.pause === 'hover') {
$(this._element).on(Event$2.MOUSEENTER, function (event) {
return _this2.pause(event);
}).on(Event$2.MOUSELEAVE, function (event) {
return _this2.cycle(event);
});
}
if (this._config.touch) {
this._addTouchEventListeners();
}
};
_proto._addTouchEventListeners = function _addTouchEventListeners() {
var _this3 = this;
if (!this._touchSupported) {
return;
}
var start = function start(event) {
if (_this3._pointerEvent && PointerType[event.originalEvent.pointerType.toUpperCase()]) {
_this3.touchStartX = event.originalEvent.clientX;
} else if (!_this3._pointerEvent) {
_this3.touchStartX = event.originalEvent.touches[0].clientX;
}
};
var move = function move(event) {
// ensure swiping with one touch and not pinching
if (event.originalEvent.touches && event.originalEvent.touches.length > 1) {
_this3.touchDeltaX = 0;
} else {
_this3.touchDeltaX = event.originalEvent.touches[0].clientX - _this3.touchStartX;
}
};
var end = function end(event) {
if (_this3._pointerEvent && PointerType[event.originalEvent.pointerType.toUpperCase()]) {
_this3.touchDeltaX = event.originalEvent.clientX - _this3.touchStartX;
}
_this3._handleSwipe();
if (_this3._config.pause === 'hover') {
// If it's a touch-enabled device, mouseenter/leave are fired as
// part of the mouse compatibility events on first tap - the carousel
// would stop cycling until user tapped out of it;
// here, we listen for touchend, explicitly pause the carousel
// (as if it's the second time we tap on it, mouseenter compat event
// is NOT fired) and after a timeout (to allow for mouse compatibility
// events to fire) we explicitly restart cycling
_this3.pause();
if (_this3.touchTimeout) {
clearTimeout(_this3.touchTimeout);
}
_this3.touchTimeout = setTimeout(function (event) {
return _this3.cycle(event);
}, TOUCHEVENT_COMPAT_WAIT + _this3._config.interval);
}
};
$(this._element.querySelectorAll(Selector$2.ITEM_IMG)).on(Event$2.DRAG_START, function (e) {
return e.preventDefault();
});
if (this._pointerEvent) {
$(this._element).on(Event$2.POINTERDOWN, function (event) {
return start(event);
});
$(this._element).on(Event$2.POINTERUP, function (event) {
return end(event);
});
this._element.classList.add(ClassName$2.POINTER_EVENT);
} else {
$(this._element).on(Event$2.TOUCHSTART, function (event) {
return start(event);
});
$(this._element).on(Event$2.TOUCHMOVE, function (event) {
return move(event);
});
$(this._element).on(Event$2.TOUCHEND, function (event) {
return end(event);
});
}
};
_proto._keydown = function _keydown(event) {
if (/input|textarea/i.test(event.target.tagName)) {
return;
}
switch (event.which) {
case ARROW_LEFT_KEYCODE:
event.preventDefault();
this.prev();
break;
case ARROW_RIGHT_KEYCODE:
event.preventDefault();
this.next();
break;
default:
}
};
_proto._getItemIndex = function _getItemIndex(element) {
this._items = element && element.parentNode ? [].slice.call(element.parentNode.querySelectorAll(Selector$2.ITEM)) : [];
return this._items.indexOf(element);
};
_proto._getItemByDirection = function _getItemByDirection(direction, activeElement) {
var isNextDirection = direction === Direction.NEXT;
var isPrevDirection = direction === Direction.PREV;
var activeIndex = this._getItemIndex(activeElement);
var lastItemIndex = this._items.length - 1;
var isGoingToWrap = isPrevDirection && activeIndex === 0 || isNextDirection && activeIndex === lastItemIndex;
if (isGoingToWrap && !this._config.wrap) {
return activeElement;
}
var delta = direction === Direction.PREV ? -1 : 1;
var itemIndex = (activeIndex + delta) % this._items.length;
return itemIndex === -1 ? this._items[this._items.length - 1] : this._items[itemIndex];
};
_proto._triggerSlideEvent = function _triggerSlideEvent(relatedTarget, eventDirectionName) {
var targetIndex = this._getItemIndex(relatedTarget);
var fromIndex = this._getItemIndex(this._element.querySelector(Selector$2.ACTIVE_ITEM));
var slideEvent = $.Event(Event$2.SLIDE, {
relatedTarget: relatedTarget,
direction: eventDirectionName,
from: fromIndex,
to: targetIndex
});
$(this._element).trigger(slideEvent);
return slideEvent;
};
_proto._setActiveIndicatorElement = function _setActiveIndicatorElement(element) {
if (this._indicatorsElement) {
var indicators = [].slice.call(this._indicatorsElement.querySelectorAll(Selector$2.ACTIVE));
$(indicators).removeClass(ClassName$2.ACTIVE);
var nextIndicator = this._indicatorsElement.children[this._getItemIndex(element)];
if (nextIndicator) {
$(nextIndicator).addClass(ClassName$2.ACTIVE);
}
}
};
_proto._slide = function _slide(direction, element) {
var _this4 = this;
var activeElement = this._element.querySelector(Selector$2.ACTIVE_ITEM);
var activeElementIndex = this._getItemIndex(activeElement);
var nextElement = element || activeElement && this._getItemByDirection(direction, activeElement);
var nextElementIndex = this._getItemIndex(nextElement);
var isCycling = Boolean(this._interval);
var directionalClassName;
var orderClassName;
var eventDirectionName;
if (direction === Direction.NEXT) {
directionalClassName = ClassName$2.LEFT;
orderClassName = ClassName$2.NEXT;
eventDirectionName = Direction.LEFT;
} else {
directionalClassName = ClassName$2.RIGHT;
orderClassName = ClassName$2.PREV;
eventDirectionName = Direction.RIGHT;
}
if (nextElement && $(nextElement).hasClass(ClassName$2.ACTIVE)) {
this._isSliding = false;
return;
}
var slideEvent = this._triggerSlideEvent(nextElement, eventDirectionName);
if (slideEvent.isDefaultPrevented()) {
return;
}
if (!activeElement || !nextElement) {
// Some weirdness is happening, so we bail
return;
}
this._isSliding = true;
if (isCycling) {
this.pause();
}
this._setActiveIndicatorElement(nextElement);
var slidEvent = $.Event(Event$2.SLID, {
relatedTarget: nextElement,
direction: eventDirectionName,
from: activeElementIndex,
to: nextElementIndex
});
if ($(this._element).hasClass(ClassName$2.SLIDE)) {
$(nextElement).addClass(orderClassName);
Util.reflow(nextElement);
$(activeElement).addClass(directionalClassName);
$(nextElement).addClass(directionalClassName);
var nextElementInterval = parseInt(nextElement.getAttribute('data-interval'), 10);
if (nextElementInterval) {
this._config.defaultInterval = this._config.defaultInterval || this._config.interval;
this._config.interval = nextElementInterval;
} else {
this._config.interval = this._config.defaultInterval || this._config.interval;
}
var transitionDuration = Util.getTransitionDurationFromElement(activeElement);
$(activeElement).one(Util.TRANSITION_END, function () {
$(nextElement).removeClass(directionalClassName + " " + orderClassName).addClass(ClassName$2.ACTIVE);
$(activeElement).removeClass(ClassName$2.ACTIVE + " " + orderClassName + " " + directionalClassName);
_this4._isSliding = false;
setTimeout(function () {
return $(_this4._element).trigger(slidEvent);
}, 0);
}).emulateTransitionEnd(transitionDuration);
} else {
$(activeElement).removeClass(ClassName$2.ACTIVE);
$(nextElement).addClass(ClassName$2.ACTIVE);
this._isSliding = false;
$(this._element).trigger(slidEvent);
}
if (isCycling) {
this.cycle();
}
} // Static
;
Carousel._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var data = $(this).data(DATA_KEY$2);
var _config = _objectSpread({}, Default, $(this).data());
if (typeof config === 'object') {
_config = _objectSpread({}, _config, config);
}
var action = typeof config === 'string' ? config : _config.slide;
if (!data) {
data = new Carousel(this, _config);
$(this).data(DATA_KEY$2, data);
}
if (typeof config === 'number') {
data.to(config);
} else if (typeof action === 'string') {
if (typeof data[action] === 'undefined') {
throw new TypeError("No method named \"" + action + "\"");
}
data[action]();
} else if (_config.interval && _config.ride) {
data.pause();
data.cycle();
}
});
};
Carousel._dataApiClickHandler = function _dataApiClickHandler(event) {
var selector = Util.getSelectorFromElement(this);
if (!selector) {
return;
}
var target = $(selector)[0];
if (!target || !$(target).hasClass(ClassName$2.CAROUSEL)) {
return;
}
var config = _objectSpread({}, $(target).data(), $(this).data());
var slideIndex = this.getAttribute('data-slide-to');
if (slideIndex) {
config.interval = false;
}
Carousel._jQueryInterface.call($(target), config);
if (slideIndex) {
$(target).data(DATA_KEY$2).to(slideIndex);
}
event.preventDefault();
};
_createClass(Carousel, null, [{
key: "VERSION",
get: function get() {
return VERSION$2;
}
}, {
key: "Default",
get: function get() {
return Default;
}
}]);
return Carousel;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event$2.CLICK_DATA_API, Selector$2.DATA_SLIDE, Carousel._dataApiClickHandler);
$(window).on(Event$2.LOAD_DATA_API, function () {
var carousels = [].slice.call(document.querySelectorAll(Selector$2.DATA_RIDE));
for (var i = 0, len = carousels.length; i < len; i++) {
var $carousel = $(carousels[i]);
Carousel._jQueryInterface.call($carousel, $carousel.data());
}
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$2] = Carousel._jQueryInterface;
$.fn[NAME$2].Constructor = Carousel;
$.fn[NAME$2].noConflict = function () {
$.fn[NAME$2] = JQUERY_NO_CONFLICT$2;
return Carousel._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$3 = 'collapse';
var VERSION$3 = '4.3.1';
var DATA_KEY$3 = 'bs.collapse';
var EVENT_KEY$3 = "." + DATA_KEY$3;
var DATA_API_KEY$3 = '.data-api';
var JQUERY_NO_CONFLICT$3 = $.fn[NAME$3];
var Default$1 = {
toggle: true,
parent: ''
};
var DefaultType$1 = {
toggle: 'boolean',
parent: '(string|element)'
};
var Event$3 = {
SHOW: "show" + EVENT_KEY$3,
SHOWN: "shown" + EVENT_KEY$3,
HIDE: "hide" + EVENT_KEY$3,
HIDDEN: "hidden" + EVENT_KEY$3,
CLICK_DATA_API: "click" + EVENT_KEY$3 + DATA_API_KEY$3
};
var ClassName$3 = {
SHOW: 'show',
COLLAPSE: 'collapse',
COLLAPSING: 'collapsing',
COLLAPSED: 'collapsed'
};
var Dimension = {
WIDTH: 'width',
HEIGHT: 'height'
};
var Selector$3 = {
ACTIVES: '.show, .collapsing',
DATA_TOGGLE: '[data-toggle="collapse"]'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Collapse =
/*#__PURE__*/
function () {
function Collapse(element, config) {
this._isTransitioning = false;
this._element = element;
this._config = this._getConfig(config);
this._triggerArray = [].slice.call(document.querySelectorAll("[data-toggle=\"collapse\"][href=\"#" + element.id + "\"]," + ("[data-toggle=\"collapse\"][data-target=\"#" + element.id + "\"]")));
var toggleList = [].slice.call(document.querySelectorAll(Selector$3.DATA_TOGGLE));
for (var i = 0, len = toggleList.length; i < len; i++) {
var elem = toggleList[i];
var selector = Util.getSelectorFromElement(elem);
var filterElement = [].slice.call(document.querySelectorAll(selector)).filter(function (foundElem) {
return foundElem === element;
});
if (selector !== null && filterElement.length > 0) {
this._selector = selector;
this._triggerArray.push(elem);
}
}
this._parent = this._config.parent ? this._getParent() : null;
if (!this._config.parent) {
this._addAriaAndCollapsedClass(this._element, this._triggerArray);
}
if (this._config.toggle) {
this.toggle();
}
} // Getters
var _proto = Collapse.prototype;
// Public
_proto.toggle = function toggle() {
if ($(this._element).hasClass(ClassName$3.SHOW)) {
this.hide();
} else {
this.show();
}
};
_proto.show = function show() {
var _this = this;
if (this._isTransitioning || $(this._element).hasClass(ClassName$3.SHOW)) {
return;
}
var actives;
var activesData;
if (this._parent) {
actives = [].slice.call(this._parent.querySelectorAll(Selector$3.ACTIVES)).filter(function (elem) {
if (typeof _this._config.parent === 'string') {
return elem.getAttribute('data-parent') === _this._config.parent;
}
return elem.classList.contains(ClassName$3.COLLAPSE);
});
if (actives.length === 0) {
actives = null;
}
}
if (actives) {
activesData = $(actives).not(this._selector).data(DATA_KEY$3);
if (activesData && activesData._isTransitioning) {
return;
}
}
var startEvent = $.Event(Event$3.SHOW);
$(this._element).trigger(startEvent);
if (startEvent.isDefaultPrevented()) {
return;
}
if (actives) {
Collapse._jQueryInterface.call($(actives).not(this._selector), 'hide');
if (!activesData) {
$(actives).data(DATA_KEY$3, null);
}
}
var dimension = this._getDimension();
$(this._element).removeClass(ClassName$3.COLLAPSE).addClass(ClassName$3.COLLAPSING);
this._element.style[dimension] = 0;
if (this._triggerArray.length) {
$(this._triggerArray).removeClass(ClassName$3.COLLAPSED).attr('aria-expanded', true);
}
this.setTransitioning(true);
var complete = function complete() {
$(_this._element).removeClass(ClassName$3.COLLAPSING).addClass(ClassName$3.COLLAPSE).addClass(ClassName$3.SHOW);
_this._element.style[dimension] = '';
_this.setTransitioning(false);
$(_this._element).trigger(Event$3.SHOWN);
};
var capitalizedDimension = dimension[0].toUpperCase() + dimension.slice(1);
var scrollSize = "scroll" + capitalizedDimension;
var transitionDuration = Util.getTransitionDurationFromElement(this._element);
$(this._element).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
this._element.style[dimension] = this._element[scrollSize] + "px";
};
_proto.hide = function hide() {
var _this2 = this;
if (this._isTransitioning || !$(this._element).hasClass(ClassName$3.SHOW)) {
return;
}
var startEvent = $.Event(Event$3.HIDE);
$(this._element).trigger(startEvent);
if (startEvent.isDefaultPrevented()) {
return;
}
var dimension = this._getDimension();
this._element.style[dimension] = this._element.getBoundingClientRect()[dimension] + "px";
Util.reflow(this._element);
$(this._element).addClass(ClassName$3.COLLAPSING).removeClass(ClassName$3.COLLAPSE).removeClass(ClassName$3.SHOW);
var triggerArrayLength = this._triggerArray.length;
if (triggerArrayLength > 0) {
for (var i = 0; i < triggerArrayLength; i++) {
var trigger = this._triggerArray[i];
var selector = Util.getSelectorFromElement(trigger);
if (selector !== null) {
var $elem = $([].slice.call(document.querySelectorAll(selector)));
if (!$elem.hasClass(ClassName$3.SHOW)) {
$(trigger).addClass(ClassName$3.COLLAPSED).attr('aria-expanded', false);
}
}
}
}
this.setTransitioning(true);
var complete = function complete() {
_this2.setTransitioning(false);
$(_this2._element).removeClass(ClassName$3.COLLAPSING).addClass(ClassName$3.COLLAPSE).trigger(Event$3.HIDDEN);
};
this._element.style[dimension] = '';
var transitionDuration = Util.getTransitionDurationFromElement(this._element);
$(this._element).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
};
_proto.setTransitioning = function setTransitioning(isTransitioning) {
this._isTransitioning = isTransitioning;
};
_proto.dispose = function dispose() {
$.removeData(this._element, DATA_KEY$3);
this._config = null;
this._parent = null;
this._element = null;
this._triggerArray = null;
this._isTransitioning = null;
} // Private
;
_proto._getConfig = function _getConfig(config) {
config = _objectSpread({}, Default$1, config);
config.toggle = Boolean(config.toggle); // Coerce string values
Util.typeCheckConfig(NAME$3, config, DefaultType$1);
return config;
};
_proto._getDimension = function _getDimension() {
var hasWidth = $(this._element).hasClass(Dimension.WIDTH);
return hasWidth ? Dimension.WIDTH : Dimension.HEIGHT;
};
_proto._getParent = function _getParent() {
var _this3 = this;
var parent;
if (Util.isElement(this._config.parent)) {
parent = this._config.parent; // It's a jQuery object
if (typeof this._config.parent.jquery !== 'undefined') {
parent = this._config.parent[0];
}
} else {
parent = document.querySelector(this._config.parent);
}
var selector = "[data-toggle=\"collapse\"][data-parent=\"" + this._config.parent + "\"]";
var children = [].slice.call(parent.querySelectorAll(selector));
$(children).each(function (i, element) {
_this3._addAriaAndCollapsedClass(Collapse._getTargetFromElement(element), [element]);
});
return parent;
};
_proto._addAriaAndCollapsedClass = function _addAriaAndCollapsedClass(element, triggerArray) {
var isOpen = $(element).hasClass(ClassName$3.SHOW);
if (triggerArray.length) {
$(triggerArray).toggleClass(ClassName$3.COLLAPSED, !isOpen).attr('aria-expanded', isOpen);
}
} // Static
;
Collapse._getTargetFromElement = function _getTargetFromElement(element) {
var selector = Util.getSelectorFromElement(element);
return selector ? document.querySelector(selector) : null;
};
Collapse._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var $this = $(this);
var data = $this.data(DATA_KEY$3);
var _config = _objectSpread({}, Default$1, $this.data(), typeof config === 'object' && config ? config : {});
if (!data && _config.toggle && /show|hide/.test(config)) {
_config.toggle = false;
}
if (!data) {
data = new Collapse(this, _config);
$this.data(DATA_KEY$3, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
_createClass(Collapse, null, [{
key: "VERSION",
get: function get() {
return VERSION$3;
}
}, {
key: "Default",
get: function get() {
return Default$1;
}
}]);
return Collapse;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event$3.CLICK_DATA_API, Selector$3.DATA_TOGGLE, function (event) {
// preventDefault only for <a> elements (which change the URL) not inside the collapsible element
if (event.currentTarget.tagName === 'A') {
event.preventDefault();
}
var $trigger = $(this);
var selector = Util.getSelectorFromElement(this);
var selectors = [].slice.call(document.querySelectorAll(selector));
$(selectors).each(function () {
var $target = $(this);
var data = $target.data(DATA_KEY$3);
var config = data ? 'toggle' : $trigger.data();
Collapse._jQueryInterface.call($target, config);
});
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$3] = Collapse._jQueryInterface;
$.fn[NAME$3].Constructor = Collapse;
$.fn[NAME$3].noConflict = function () {
$.fn[NAME$3] = JQUERY_NO_CONFLICT$3;
return Collapse._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$4 = 'dropdown';
var VERSION$4 = '4.3.1';
var DATA_KEY$4 = 'bs.dropdown';
var EVENT_KEY$4 = "." + DATA_KEY$4;
var DATA_API_KEY$4 = '.data-api';
var JQUERY_NO_CONFLICT$4 = $.fn[NAME$4];
var ESCAPE_KEYCODE = 27; // KeyboardEvent.which value for Escape (Esc) key
var SPACE_KEYCODE = 32; // KeyboardEvent.which value for space key
var TAB_KEYCODE = 9; // KeyboardEvent.which value for tab key
var ARROW_UP_KEYCODE = 38; // KeyboardEvent.which value for up arrow key
var ARROW_DOWN_KEYCODE = 40; // KeyboardEvent.which value for down arrow key
var RIGHT_MOUSE_BUTTON_WHICH = 3; // MouseEvent.which value for the right button (assuming a right-handed mouse)
var REGEXP_KEYDOWN = new RegExp(ARROW_UP_KEYCODE + "|" + ARROW_DOWN_KEYCODE + "|" + ESCAPE_KEYCODE);
var Event$4 = {
HIDE: "hide" + EVENT_KEY$4,
HIDDEN: "hidden" + EVENT_KEY$4,
SHOW: "show" + EVENT_KEY$4,
SHOWN: "shown" + EVENT_KEY$4,
CLICK: "click" + EVENT_KEY$4,
CLICK_DATA_API: "click" + EVENT_KEY$4 + DATA_API_KEY$4,
KEYDOWN_DATA_API: "keydown" + EVENT_KEY$4 + DATA_API_KEY$4,
KEYUP_DATA_API: "keyup" + EVENT_KEY$4 + DATA_API_KEY$4
};
var ClassName$4 = {
DISABLED: 'disabled',
SHOW: 'show',
DROPUP: 'dropup',
DROPRIGHT: 'dropright',
DROPLEFT: 'dropleft',
MENURIGHT: 'dropdown-menu-right',
MENULEFT: 'dropdown-menu-left',
POSITION_STATIC: 'position-static'
};
var Selector$4 = {
DATA_TOGGLE: '[data-toggle="dropdown"]',
FORM_CHILD: '.dropdown form',
MENU: '.dropdown-menu',
NAVBAR_NAV: '.navbar-nav',
VISIBLE_ITEMS: '.dropdown-menu .dropdown-item:not(.disabled):not(:disabled)'
};
var AttachmentMap = {
TOP: 'top-start',
TOPEND: 'top-end',
BOTTOM: 'bottom-start',
BOTTOMEND: 'bottom-end',
RIGHT: 'right-start',
RIGHTEND: 'right-end',
LEFT: 'left-start',
LEFTEND: 'left-end'
};
var Default$2 = {
offset: 0,
flip: true,
boundary: 'scrollParent',
reference: 'toggle',
display: 'dynamic'
};
var DefaultType$2 = {
offset: '(number|string|function)',
flip: 'boolean',
boundary: '(string|element)',
reference: '(string|element)',
display: 'string'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Dropdown =
/*#__PURE__*/
function () {
function Dropdown(element, config) {
this._element = element;
this._popper = null;
this._config = this._getConfig(config);
this._menu = this._getMenuElement();
this._inNavbar = this._detectNavbar();
this._addEventListeners();
} // Getters
var _proto = Dropdown.prototype;
// Public
_proto.toggle = function toggle() {
if (this._element.disabled || $(this._element).hasClass(ClassName$4.DISABLED)) {
return;
}
var parent = Dropdown._getParentFromElement(this._element);
var isActive = $(this._menu).hasClass(ClassName$4.SHOW);
Dropdown._clearMenus();
if (isActive) {
return;
}
var relatedTarget = {
relatedTarget: this._element
};
var showEvent = $.Event(Event$4.SHOW, relatedTarget);
$(parent).trigger(showEvent);
if (showEvent.isDefaultPrevented()) {
return;
} // Disable totally Popper.js for Dropdown in Navbar
if (!this._inNavbar) {
/**
* Check for Popper dependency
* Popper - https://popper.js.org
*/
if (typeof Popper === 'undefined') {
throw new TypeError('Bootstrap\'s dropdowns require Popper.js (https://popper.js.org/)');
}
var referenceElement = this._element;
if (this._config.reference === 'parent') {
referenceElement = parent;
} else if (Util.isElement(this._config.reference)) {
referenceElement = this._config.reference; // Check if it's jQuery element
if (typeof this._config.reference.jquery !== 'undefined') {
referenceElement = this._config.reference[0];
}
} // If boundary is not `scrollParent`, then set position to `static`
// to allow the menu to "escape" the scroll parent's boundaries
// https://github.com/twbs/bootstrap/issues/24251
if (this._config.boundary !== 'scrollParent') {
$(parent).addClass(ClassName$4.POSITION_STATIC);
}
this._popper = new Popper(referenceElement, this._menu, this._getPopperConfig());
} // If this is a touch-enabled device we add extra
// empty mouseover listeners to the body's immediate children;
// only needed because of broken event delegation on iOS
// https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html
if ('ontouchstart' in document.documentElement && $(parent).closest(Selector$4.NAVBAR_NAV).length === 0) {
$(document.body).children().on('mouseover', null, $.noop);
}
this._element.focus();
this._element.setAttribute('aria-expanded', true);
$(this._menu).toggleClass(ClassName$4.SHOW);
$(parent).toggleClass(ClassName$4.SHOW).trigger($.Event(Event$4.SHOWN, relatedTarget));
};
_proto.show = function show() {
if (this._element.disabled || $(this._element).hasClass(ClassName$4.DISABLED) || $(this._menu).hasClass(ClassName$4.SHOW)) {
return;
}
var relatedTarget = {
relatedTarget: this._element
};
var showEvent = $.Event(Event$4.SHOW, relatedTarget);
var parent = Dropdown._getParentFromElement(this._element);
$(parent).trigger(showEvent);
if (showEvent.isDefaultPrevented()) {
return;
}
$(this._menu).toggleClass(ClassName$4.SHOW);
$(parent).toggleClass(ClassName$4.SHOW).trigger($.Event(Event$4.SHOWN, relatedTarget));
};
_proto.hide = function hide() {
if (this._element.disabled || $(this._element).hasClass(ClassName$4.DISABLED) || !$(this._menu).hasClass(ClassName$4.SHOW)) {
return;
}
var relatedTarget = {
relatedTarget: this._element
};
var hideEvent = $.Event(Event$4.HIDE, relatedTarget);
var parent = Dropdown._getParentFromElement(this._element);
$(parent).trigger(hideEvent);
if (hideEvent.isDefaultPrevented()) {
return;
}
$(this._menu).toggleClass(ClassName$4.SHOW);
$(parent).toggleClass(ClassName$4.SHOW).trigger($.Event(Event$4.HIDDEN, relatedTarget));
};
_proto.dispose = function dispose() {
$.removeData(this._element, DATA_KEY$4);
$(this._element).off(EVENT_KEY$4);
this._element = null;
this._menu = null;
if (this._popper !== null) {
this._popper.destroy();
this._popper = null;
}
};
_proto.update = function update() {
this._inNavbar = this._detectNavbar();
if (this._popper !== null) {
this._popper.scheduleUpdate();
}
} // Private
;
_proto._addEventListeners = function _addEventListeners() {
var _this = this;
$(this._element).on(Event$4.CLICK, function (event) {
event.preventDefault();
event.stopPropagation();
_this.toggle();
});
};
_proto._getConfig = function _getConfig(config) {
config = _objectSpread({}, this.constructor.Default, $(this._element).data(), config);
Util.typeCheckConfig(NAME$4, config, this.constructor.DefaultType);
return config;
};
_proto._getMenuElement = function _getMenuElement() {
if (!this._menu) {
var parent = Dropdown._getParentFromElement(this._element);
if (parent) {
this._menu = parent.querySelector(Selector$4.MENU);
}
}
return this._menu;
};
_proto._getPlacement = function _getPlacement() {
var $parentDropdown = $(this._element.parentNode);
var placement = AttachmentMap.BOTTOM; // Handle dropup
if ($parentDropdown.hasClass(ClassName$4.DROPUP)) {
placement = AttachmentMap.TOP;
if ($(this._menu).hasClass(ClassName$4.MENURIGHT)) {
placement = AttachmentMap.TOPEND;
}
} else if ($parentDropdown.hasClass(ClassName$4.DROPRIGHT)) {
placement = AttachmentMap.RIGHT;
} else if ($parentDropdown.hasClass(ClassName$4.DROPLEFT)) {
placement = AttachmentMap.LEFT;
} else if ($(this._menu).hasClass(ClassName$4.MENURIGHT)) {
placement = AttachmentMap.BOTTOMEND;
}
return placement;
};
_proto._detectNavbar = function _detectNavbar() {
return $(this._element).closest('.navbar').length > 0;
};
_proto._getOffset = function _getOffset() {
var _this2 = this;
var offset = {};
if (typeof this._config.offset === 'function') {
offset.fn = function (data) {
data.offsets = _objectSpread({}, data.offsets, _this2._config.offset(data.offsets, _this2._element) || {});
return data;
};
} else {
offset.offset = this._config.offset;
}
return offset;
};
_proto._getPopperConfig = function _getPopperConfig() {
var popperConfig = {
placement: this._getPlacement(),
modifiers: {
offset: this._getOffset(),
flip: {
enabled: this._config.flip
},
preventOverflow: {
boundariesElement: this._config.boundary
}
} // Disable Popper.js if we have a static display
};
if (this._config.display === 'static') {
popperConfig.modifiers.applyStyle = {
enabled: false
};
}
return popperConfig;
} // Static
;
Dropdown._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var data = $(this).data(DATA_KEY$4);
var _config = typeof config === 'object' ? config : null;
if (!data) {
data = new Dropdown(this, _config);
$(this).data(DATA_KEY$4, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
Dropdown._clearMenus = function _clearMenus(event) {
if (event && (event.which === RIGHT_MOUSE_BUTTON_WHICH || event.type === 'keyup' && event.which !== TAB_KEYCODE)) {
return;
}
var toggles = [].slice.call(document.querySelectorAll(Selector$4.DATA_TOGGLE));
for (var i = 0, len = toggles.length; i < len; i++) {
var parent = Dropdown._getParentFromElement(toggles[i]);
var context = $(toggles[i]).data(DATA_KEY$4);
var relatedTarget = {
relatedTarget: toggles[i]
};
if (event && event.type === 'click') {
relatedTarget.clickEvent = event;
}
if (!context) {
continue;
}
var dropdownMenu = context._menu;
if (!$(parent).hasClass(ClassName$4.SHOW)) {
continue;
}
if (event && (event.type === 'click' && /input|textarea/i.test(event.target.tagName) || event.type === 'keyup' && event.which === TAB_KEYCODE) && $.contains(parent, event.target)) {
continue;
}
var hideEvent = $.Event(Event$4.HIDE, relatedTarget);
$(parent).trigger(hideEvent);
if (hideEvent.isDefaultPrevented()) {
continue;
} // If this is a touch-enabled device we remove the extra
// empty mouseover listeners we added for iOS support
if ('ontouchstart' in document.documentElement) {
$(document.body).children().off('mouseover', null, $.noop);
}
toggles[i].setAttribute('aria-expanded', 'false');
$(dropdownMenu).removeClass(ClassName$4.SHOW);
$(parent).removeClass(ClassName$4.SHOW).trigger($.Event(Event$4.HIDDEN, relatedTarget));
}
};
Dropdown._getParentFromElement = function _getParentFromElement(element) {
var parent;
var selector = Util.getSelectorFromElement(element);
if (selector) {
parent = document.querySelector(selector);
}
return parent || element.parentNode;
} // eslint-disable-next-line complexity
;
Dropdown._dataApiKeydownHandler = function _dataApiKeydownHandler(event) {
// If not input/textarea:
// - And not a key in REGEXP_KEYDOWN => not a dropdown command
// If input/textarea:
// - If space key => not a dropdown command
// - If key is other than escape
// - If key is not up or down => not a dropdown command
// - If trigger inside the menu => not a dropdown command
if (/input|textarea/i.test(event.target.tagName) ? event.which === SPACE_KEYCODE || event.which !== ESCAPE_KEYCODE && (event.which !== ARROW_DOWN_KEYCODE && event.which !== ARROW_UP_KEYCODE || $(event.target).closest(Selector$4.MENU).length) : !REGEXP_KEYDOWN.test(event.which)) {
return;
}
event.preventDefault();
event.stopPropagation();
if (this.disabled || $(this).hasClass(ClassName$4.DISABLED)) {
return;
}
var parent = Dropdown._getParentFromElement(this);
var isActive = $(parent).hasClass(ClassName$4.SHOW);
if (!isActive || isActive && (event.which === ESCAPE_KEYCODE || event.which === SPACE_KEYCODE)) {
if (event.which === ESCAPE_KEYCODE) {
var toggle = parent.querySelector(Selector$4.DATA_TOGGLE);
$(toggle).trigger('focus');
}
$(this).trigger('click');
return;
}
var items = [].slice.call(parent.querySelectorAll(Selector$4.VISIBLE_ITEMS));
if (items.length === 0) {
return;
}
var index = items.indexOf(event.target);
if (event.which === ARROW_UP_KEYCODE && index > 0) {
// Up
index--;
}
if (event.which === ARROW_DOWN_KEYCODE && index < items.length - 1) {
// Down
index++;
}
if (index < 0) {
index = 0;
}
items[index].focus();
};
_createClass(Dropdown, null, [{
key: "VERSION",
get: function get() {
return VERSION$4;
}
}, {
key: "Default",
get: function get() {
return Default$2;
}
}, {
key: "DefaultType",
get: function get() {
return DefaultType$2;
}
}]);
return Dropdown;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event$4.KEYDOWN_DATA_API, Selector$4.DATA_TOGGLE, Dropdown._dataApiKeydownHandler).on(Event$4.KEYDOWN_DATA_API, Selector$4.MENU, Dropdown._dataApiKeydownHandler).on(Event$4.CLICK_DATA_API + " " + Event$4.KEYUP_DATA_API, Dropdown._clearMenus).on(Event$4.CLICK_DATA_API, Selector$4.DATA_TOGGLE, function (event) {
event.preventDefault();
event.stopPropagation();
Dropdown._jQueryInterface.call($(this), 'toggle');
}).on(Event$4.CLICK_DATA_API, Selector$4.FORM_CHILD, function (e) {
e.stopPropagation();
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$4] = Dropdown._jQueryInterface;
$.fn[NAME$4].Constructor = Dropdown;
$.fn[NAME$4].noConflict = function () {
$.fn[NAME$4] = JQUERY_NO_CONFLICT$4;
return Dropdown._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$5 = 'modal';
var VERSION$5 = '4.3.1';
var DATA_KEY$5 = 'bs.modal';
var EVENT_KEY$5 = "." + DATA_KEY$5;
var DATA_API_KEY$5 = '.data-api';
var JQUERY_NO_CONFLICT$5 = $.fn[NAME$5];
var ESCAPE_KEYCODE$1 = 27; // KeyboardEvent.which value for Escape (Esc) key
var Default$3 = {
backdrop: true,
keyboard: true,
focus: true,
show: true
};
var DefaultType$3 = {
backdrop: '(boolean|string)',
keyboard: 'boolean',
focus: 'boolean',
show: 'boolean'
};
var Event$5 = {
HIDE: "hide" + EVENT_KEY$5,
HIDDEN: "hidden" + EVENT_KEY$5,
SHOW: "show" + EVENT_KEY$5,
SHOWN: "shown" + EVENT_KEY$5,
FOCUSIN: "focusin" + EVENT_KEY$5,
RESIZE: "resize" + EVENT_KEY$5,
CLICK_DISMISS: "click.dismiss" + EVENT_KEY$5,
KEYDOWN_DISMISS: "keydown.dismiss" + EVENT_KEY$5,
MOUSEUP_DISMISS: "mouseup.dismiss" + EVENT_KEY$5,
MOUSEDOWN_DISMISS: "mousedown.dismiss" + EVENT_KEY$5,
CLICK_DATA_API: "click" + EVENT_KEY$5 + DATA_API_KEY$5
};
var ClassName$5 = {
SCROLLABLE: 'modal-dialog-scrollable',
SCROLLBAR_MEASURER: 'modal-scrollbar-measure',
BACKDROP: 'modal-backdrop',
OPEN: 'modal-open',
FADE: 'fade',
SHOW: 'show'
};
var Selector$5 = {
DIALOG: '.modal-dialog',
MODAL_BODY: '.modal-body',
DATA_TOGGLE: '[data-toggle="modal"]',
DATA_DISMISS: '[data-dismiss="modal"]',
FIXED_CONTENT: '.fixed-top, .fixed-bottom, .is-fixed, .sticky-top',
STICKY_CONTENT: '.sticky-top'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Modal =
/*#__PURE__*/
function () {
function Modal(element, config) {
this._config = this._getConfig(config);
this._element = element;
this._dialog = element.querySelector(Selector$5.DIALOG);
this._backdrop = null;
this._isShown = false;
this._isBodyOverflowing = false;
this._ignoreBackdropClick = false;
this._isTransitioning = false;
this._scrollbarWidth = 0;
} // Getters
var _proto = Modal.prototype;
// Public
_proto.toggle = function toggle(relatedTarget) {
return this._isShown ? this.hide() : this.show(relatedTarget);
};
_proto.show = function show(relatedTarget) {
var _this = this;
if (this._isShown || this._isTransitioning) {
return;
}
if ($(this._element).hasClass(ClassName$5.FADE)) {
this._isTransitioning = true;
}
var showEvent = $.Event(Event$5.SHOW, {
relatedTarget: relatedTarget
});
$(this._element).trigger(showEvent);
if (this._isShown || showEvent.isDefaultPrevented()) {
return;
}
this._isShown = true;
this._checkScrollbar();
this._setScrollbar();
this._adjustDialog();
this._setEscapeEvent();
this._setResizeEvent();
$(this._element).on(Event$5.CLICK_DISMISS, Selector$5.DATA_DISMISS, function (event) {
return _this.hide(event);
});
$(this._dialog).on(Event$5.MOUSEDOWN_DISMISS, function () {
$(_this._element).one(Event$5.MOUSEUP_DISMISS, function (event) {
if ($(event.target).is(_this._element)) {
_this._ignoreBackdropClick = true;
}
});
});
this._showBackdrop(function () {
return _this._showElement(relatedTarget);
});
};
_proto.hide = function hide(event) {
var _this2 = this;
if (event) {
event.preventDefault();
}
if (!this._isShown || this._isTransitioning) {
return;
}
var hideEvent = $.Event(Event$5.HIDE);
$(this._element).trigger(hideEvent);
if (!this._isShown || hideEvent.isDefaultPrevented()) {
return;
}
this._isShown = false;
var transition = $(this._element).hasClass(ClassName$5.FADE);
if (transition) {
this._isTransitioning = true;
}
this._setEscapeEvent();
this._setResizeEvent();
$(document).off(Event$5.FOCUSIN);
$(this._element).removeClass(ClassName$5.SHOW);
$(this._element).off(Event$5.CLICK_DISMISS);
$(this._dialog).off(Event$5.MOUSEDOWN_DISMISS);
if (transition) {
var transitionDuration = Util.getTransitionDurationFromElement(this._element);
$(this._element).one(Util.TRANSITION_END, function (event) {
return _this2._hideModal(event);
}).emulateTransitionEnd(transitionDuration);
} else {
this._hideModal();
}
};
_proto.dispose = function dispose() {
[window, this._element, this._dialog].forEach(function (htmlElement) {
return $(htmlElement).off(EVENT_KEY$5);
});
/**
* `document` has 2 events `Event.FOCUSIN` and `Event.CLICK_DATA_API`
* Do not move `document` in `htmlElements` array
* It will remove `Event.CLICK_DATA_API` event that should remain
*/
$(document).off(Event$5.FOCUSIN);
$.removeData(this._element, DATA_KEY$5);
this._config = null;
this._element = null;
this._dialog = null;
this._backdrop = null;
this._isShown = null;
this._isBodyOverflowing = null;
this._ignoreBackdropClick = null;
this._isTransitioning = null;
this._scrollbarWidth = null;
};
_proto.handleUpdate = function handleUpdate() {
this._adjustDialog();
} // Private
;
_proto._getConfig = function _getConfig(config) {
config = _objectSpread({}, Default$3, config);
Util.typeCheckConfig(NAME$5, config, DefaultType$3);
return config;
};
_proto._showElement = function _showElement(relatedTarget) {
var _this3 = this;
var transition = $(this._element).hasClass(ClassName$5.FADE);
if (!this._element.parentNode || this._element.parentNode.nodeType !== Node.ELEMENT_NODE) {
// Don't move modal's DOM position
document.body.appendChild(this._element);
}
this._element.style.display = 'block';
this._element.removeAttribute('aria-hidden');
this._element.setAttribute('aria-modal', true);
if ($(this._dialog).hasClass(ClassName$5.SCROLLABLE)) {
this._dialog.querySelector(Selector$5.MODAL_BODY).scrollTop = 0;
} else {
this._element.scrollTop = 0;
}
if (transition) {
Util.reflow(this._element);
}
$(this._element).addClass(ClassName$5.SHOW);
if (this._config.focus) {
this._enforceFocus();
}
var shownEvent = $.Event(Event$5.SHOWN, {
relatedTarget: relatedTarget
});
var transitionComplete = function transitionComplete() {
if (_this3._config.focus) {
_this3._element.focus();
}
_this3._isTransitioning = false;
$(_this3._element).trigger(shownEvent);
};
if (transition) {
var transitionDuration = Util.getTransitionDurationFromElement(this._dialog);
$(this._dialog).one(Util.TRANSITION_END, transitionComplete).emulateTransitionEnd(transitionDuration);
} else {
transitionComplete();
}
};
_proto._enforceFocus = function _enforceFocus() {
var _this4 = this;
$(document).off(Event$5.FOCUSIN) // Guard against infinite focus loop
.on(Event$5.FOCUSIN, function (event) {
if (document !== event.target && _this4._element !== event.target && $(_this4._element).has(event.target).length === 0) {
_this4._element.focus();
}
});
};
_proto._setEscapeEvent = function _setEscapeEvent() {
var _this5 = this;
if (this._isShown && this._config.keyboard) {
$(this._element).on(Event$5.KEYDOWN_DISMISS, function (event) {
if (event.which === ESCAPE_KEYCODE$1) {
event.preventDefault();
_this5.hide();
}
});
} else if (!this._isShown) {
$(this._element).off(Event$5.KEYDOWN_DISMISS);
}
};
_proto._setResizeEvent = function _setResizeEvent() {
var _this6 = this;
if (this._isShown) {
$(window).on(Event$5.RESIZE, function (event) {
return _this6.handleUpdate(event);
});
} else {
$(window).off(Event$5.RESIZE);
}
};
_proto._hideModal = function _hideModal() {
var _this7 = this;
this._element.style.display = 'none';
this._element.setAttribute('aria-hidden', true);
this._element.removeAttribute('aria-modal');
this._isTransitioning = false;
this._showBackdrop(function () {
$(document.body).removeClass(ClassName$5.OPEN);
_this7._resetAdjustments();
_this7._resetScrollbar();
$(_this7._element).trigger(Event$5.HIDDEN);
});
};
_proto._removeBackdrop = function _removeBackdrop() {
if (this._backdrop) {
$(this._backdrop).remove();
this._backdrop = null;
}
};
_proto._showBackdrop = function _showBackdrop(callback) {
var _this8 = this;
var animate = $(this._element).hasClass(ClassName$5.FADE) ? ClassName$5.FADE : '';
if (this._isShown && this._config.backdrop) {
this._backdrop = document.createElement('div');
this._backdrop.className = ClassName$5.BACKDROP;
if (animate) {
this._backdrop.classList.add(animate);
}
$(this._backdrop).appendTo(document.body);
$(this._element).on(Event$5.CLICK_DISMISS, function (event) {
if (_this8._ignoreBackdropClick) {
_this8._ignoreBackdropClick = false;
return;
}
if (event.target !== event.currentTarget) {
return;
}
if (_this8._config.backdrop === 'static') {
_this8._element.focus();
} else {
_this8.hide();
}
});
if (animate) {
Util.reflow(this._backdrop);
}
$(this._backdrop).addClass(ClassName$5.SHOW);
if (!callback) {
return;
}
if (!animate) {
callback();
return;
}
var backdropTransitionDuration = Util.getTransitionDurationFromElement(this._backdrop);
$(this._backdrop).one(Util.TRANSITION_END, callback).emulateTransitionEnd(backdropTransitionDuration);
} else if (!this._isShown && this._backdrop) {
$(this._backdrop).removeClass(ClassName$5.SHOW);
var callbackRemove = function callbackRemove() {
_this8._removeBackdrop();
if (callback) {
callback();
}
};
if ($(this._element).hasClass(ClassName$5.FADE)) {
var _backdropTransitionDuration = Util.getTransitionDurationFromElement(this._backdrop);
$(this._backdrop).one(Util.TRANSITION_END, callbackRemove).emulateTransitionEnd(_backdropTransitionDuration);
} else {
callbackRemove();
}
} else if (callback) {
callback();
}
} // ----------------------------------------------------------------------
// the following methods are used to handle overflowing modals
// todo (fat): these should probably be refactored out of modal.js
// ----------------------------------------------------------------------
;
_proto._adjustDialog = function _adjustDialog() {
var isModalOverflowing = this._element.scrollHeight > document.documentElement.clientHeight;
if (!this._isBodyOverflowing && isModalOverflowing) {
this._element.style.paddingLeft = this._scrollbarWidth + "px";
}
if (this._isBodyOverflowing && !isModalOverflowing) {
this._element.style.paddingRight = this._scrollbarWidth + "px";
}
};
_proto._resetAdjustments = function _resetAdjustments() {
this._element.style.paddingLeft = '';
this._element.style.paddingRight = '';
};
_proto._checkScrollbar = function _checkScrollbar() {
var rect = document.body.getBoundingClientRect();
this._isBodyOverflowing = rect.left + rect.right < window.innerWidth;
this._scrollbarWidth = this._getScrollbarWidth();
};
_proto._setScrollbar = function _setScrollbar() {
var _this9 = this;
if (this._isBodyOverflowing) {
// Note: DOMNode.style.paddingRight returns the actual value or '' if not set
// while $(DOMNode).css('padding-right') returns the calculated value or 0 if not set
var fixedContent = [].slice.call(document.querySelectorAll(Selector$5.FIXED_CONTENT));
var stickyContent = [].slice.call(document.querySelectorAll(Selector$5.STICKY_CONTENT)); // Adjust fixed content padding
$(fixedContent).each(function (index, element) {
var actualPadding = element.style.paddingRight;
var calculatedPadding = $(element).css('padding-right');
$(element).data('padding-right', actualPadding).css('padding-right', parseFloat(calculatedPadding) + _this9._scrollbarWidth + "px");
}); // Adjust sticky content margin
$(stickyContent).each(function (index, element) {
var actualMargin = element.style.marginRight;
var calculatedMargin = $(element).css('margin-right');
$(element).data('margin-right', actualMargin).css('margin-right', parseFloat(calculatedMargin) - _this9._scrollbarWidth + "px");
}); // Adjust body padding
var actualPadding = document.body.style.paddingRight;
var calculatedPadding = $(document.body).css('padding-right');
$(document.body).data('padding-right', actualPadding).css('padding-right', parseFloat(calculatedPadding) + this._scrollbarWidth + "px");
}
$(document.body).addClass(ClassName$5.OPEN);
};
_proto._resetScrollbar = function _resetScrollbar() {
// Restore fixed content padding
var fixedContent = [].slice.call(document.querySelectorAll(Selector$5.FIXED_CONTENT));
$(fixedContent).each(function (index, element) {
var padding = $(element).data('padding-right');
$(element).removeData('padding-right');
element.style.paddingRight = padding ? padding : '';
}); // Restore sticky content
var elements = [].slice.call(document.querySelectorAll("" + Selector$5.STICKY_CONTENT));
$(elements).each(function (index, element) {
var margin = $(element).data('margin-right');
if (typeof margin !== 'undefined') {
$(element).css('margin-right', margin).removeData('margin-right');
}
}); // Restore body padding
var padding = $(document.body).data('padding-right');
$(document.body).removeData('padding-right');
document.body.style.paddingRight = padding ? padding : '';
};
_proto._getScrollbarWidth = function _getScrollbarWidth() {
// thx d.walsh
var scrollDiv = document.createElement('div');
scrollDiv.className = ClassName$5.SCROLLBAR_MEASURER;
document.body.appendChild(scrollDiv);
var scrollbarWidth = scrollDiv.getBoundingClientRect().width - scrollDiv.clientWidth;
document.body.removeChild(scrollDiv);
return scrollbarWidth;
} // Static
;
Modal._jQueryInterface = function _jQueryInterface(config, relatedTarget) {
return this.each(function () {
var data = $(this).data(DATA_KEY$5);
var _config = _objectSpread({}, Default$3, $(this).data(), typeof config === 'object' && config ? config : {});
if (!data) {
data = new Modal(this, _config);
$(this).data(DATA_KEY$5, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config](relatedTarget);
} else if (_config.show) {
data.show(relatedTarget);
}
});
};
_createClass(Modal, null, [{
key: "VERSION",
get: function get() {
return VERSION$5;
}
}, {
key: "Default",
get: function get() {
return Default$3;
}
}]);
return Modal;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event$5.CLICK_DATA_API, Selector$5.DATA_TOGGLE, function (event) {
var _this10 = this;
var target;
var selector = Util.getSelectorFromElement(this);
if (selector) {
target = document.querySelector(selector);
}
var config = $(target).data(DATA_KEY$5) ? 'toggle' : _objectSpread({}, $(target).data(), $(this).data());
if (this.tagName === 'A' || this.tagName === 'AREA') {
event.preventDefault();
}
var $target = $(target).one(Event$5.SHOW, function (showEvent) {
if (showEvent.isDefaultPrevented()) {
// Only register focus restorer if modal will actually get shown
return;
}
$target.one(Event$5.HIDDEN, function () {
if ($(_this10).is(':visible')) {
_this10.focus();
}
});
});
Modal._jQueryInterface.call($(target), config, this);
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$5] = Modal._jQueryInterface;
$.fn[NAME$5].Constructor = Modal;
$.fn[NAME$5].noConflict = function () {
$.fn[NAME$5] = JQUERY_NO_CONFLICT$5;
return Modal._jQueryInterface;
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v4.3.1): tools/sanitizer.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* --------------------------------------------------------------------------
*/
var uriAttrs = ['background', 'cite', 'href', 'itemtype', 'longdesc', 'poster', 'src', 'xlink:href'];
var ARIA_ATTRIBUTE_PATTERN = /^aria-[\w-]*$/i;
var DefaultWhitelist = {
// Global attributes allowed on any supplied element below.
'*': ['class', 'dir', 'id', 'lang', 'role', ARIA_ATTRIBUTE_PATTERN],
a: ['target', 'href', 'title', 'rel'],
area: [],
b: [],
br: [],
col: [],
code: [],
div: [],
em: [],
hr: [],
h1: [],
h2: [],
h3: [],
h4: [],
h5: [],
h6: [],
i: [],
img: ['src', 'alt', 'title', 'width', 'height'],
li: [],
ol: [],
p: [],
pre: [],
s: [],
small: [],
span: [],
sub: [],
sup: [],
strong: [],
u: [],
ul: []
/**
* A pattern that recognizes a commonly useful subset of URLs that are safe.
*
* Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
*/
};
var SAFE_URL_PATTERN = /^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi;
/**
* A pattern that matches safe data URLs. Only matches image, video and audio types.
*
* Shoutout to Angular 7 https://github.com/angular/angular/blob/7.2.4/packages/core/src/sanitization/url_sanitizer.ts
*/
var DATA_URL_PATTERN = /^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;
function allowedAttribute(attr, allowedAttributeList) {
var attrName = attr.nodeName.toLowerCase();
if (allowedAttributeList.indexOf(attrName) !== -1) {
if (uriAttrs.indexOf(attrName) !== -1) {
return Boolean(attr.nodeValue.match(SAFE_URL_PATTERN) || attr.nodeValue.match(DATA_URL_PATTERN));
}
return true;
}
var regExp = allowedAttributeList.filter(function (attrRegex) {
return attrRegex instanceof RegExp;
}); // Check if a regular expression validates the attribute.
for (var i = 0, l = regExp.length; i < l; i++) {
if (attrName.match(regExp[i])) {
return true;
}
}
return false;
}
function sanitizeHtml(unsafeHtml, whiteList, sanitizeFn) {
if (unsafeHtml.length === 0) {
return unsafeHtml;
}
if (sanitizeFn && typeof sanitizeFn === 'function') {
return sanitizeFn(unsafeHtml);
}
var domParser = new window.DOMParser();
var createdDocument = domParser.parseFromString(unsafeHtml, 'text/html');
var whitelistKeys = Object.keys(whiteList);
var elements = [].slice.call(createdDocument.body.querySelectorAll('*'));
var _loop = function _loop(i, len) {
var el = elements[i];
var elName = el.nodeName.toLowerCase();
if (whitelistKeys.indexOf(el.nodeName.toLowerCase()) === -1) {
el.parentNode.removeChild(el);
return "continue";
}
var attributeList = [].slice.call(el.attributes);
var whitelistedAttributes = [].concat(whiteList['*'] || [], whiteList[elName] || []);
attributeList.forEach(function (attr) {
if (!allowedAttribute(attr, whitelistedAttributes)) {
el.removeAttribute(attr.nodeName);
}
});
};
for (var i = 0, len = elements.length; i < len; i++) {
var _ret = _loop(i, len);
if (_ret === "continue") continue;
}
return createdDocument.body.innerHTML;
}
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$6 = 'tooltip';
var VERSION$6 = '4.3.1';
var DATA_KEY$6 = 'bs.tooltip';
var EVENT_KEY$6 = "." + DATA_KEY$6;
var JQUERY_NO_CONFLICT$6 = $.fn[NAME$6];
var CLASS_PREFIX = 'bs-tooltip';
var BSCLS_PREFIX_REGEX = new RegExp("(^|\\s)" + CLASS_PREFIX + "\\S+", 'g');
var DISALLOWED_ATTRIBUTES = ['sanitize', 'whiteList', 'sanitizeFn'];
var DefaultType$4 = {
animation: 'boolean',
template: 'string',
title: '(string|element|function)',
trigger: 'string',
delay: '(number|object)',
html: 'boolean',
selector: '(string|boolean)',
placement: '(string|function)',
offset: '(number|string|function)',
container: '(string|element|boolean)',
fallbackPlacement: '(string|array)',
boundary: '(string|element)',
sanitize: 'boolean',
sanitizeFn: '(null|function)',
whiteList: 'object'
};
var AttachmentMap$1 = {
AUTO: 'auto',
TOP: 'top',
RIGHT: 'right',
BOTTOM: 'bottom',
LEFT: 'left'
};
var Default$4 = {
animation: true,
template: '<div class="tooltip" role="tooltip">' + '<div class="arrow"></div>' + '<div class="tooltip-inner"></div></div>',
trigger: 'hover focus',
title: '',
delay: 0,
html: false,
selector: false,
placement: 'top',
offset: 0,
container: false,
fallbackPlacement: 'flip',
boundary: 'scrollParent',
sanitize: true,
sanitizeFn: null,
whiteList: DefaultWhitelist
};
var HoverState = {
SHOW: 'show',
OUT: 'out'
};
var Event$6 = {
HIDE: "hide" + EVENT_KEY$6,
HIDDEN: "hidden" + EVENT_KEY$6,
SHOW: "show" + EVENT_KEY$6,
SHOWN: "shown" + EVENT_KEY$6,
INSERTED: "inserted" + EVENT_KEY$6,
CLICK: "click" + EVENT_KEY$6,
FOCUSIN: "focusin" + EVENT_KEY$6,
FOCUSOUT: "focusout" + EVENT_KEY$6,
MOUSEENTER: "mouseenter" + EVENT_KEY$6,
MOUSELEAVE: "mouseleave" + EVENT_KEY$6
};
var ClassName$6 = {
FADE: 'fade',
SHOW: 'show'
};
var Selector$6 = {
TOOLTIP: '.tooltip',
TOOLTIP_INNER: '.tooltip-inner',
ARROW: '.arrow'
};
var Trigger = {
HOVER: 'hover',
FOCUS: 'focus',
CLICK: 'click',
MANUAL: 'manual'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Tooltip =
/*#__PURE__*/
function () {
function Tooltip(element, config) {
/**
* Check for Popper dependency
* Popper - https://popper.js.org
*/
if (typeof Popper === 'undefined') {
throw new TypeError('Bootstrap\'s tooltips require Popper.js (https://popper.js.org/)');
} // private
this._isEnabled = true;
this._timeout = 0;
this._hoverState = '';
this._activeTrigger = {};
this._popper = null; // Protected
this.element = element;
this.config = this._getConfig(config);
this.tip = null;
this._setListeners();
} // Getters
var _proto = Tooltip.prototype;
// Public
_proto.enable = function enable() {
this._isEnabled = true;
};
_proto.disable = function disable() {
this._isEnabled = false;
};
_proto.toggleEnabled = function toggleEnabled() {
this._isEnabled = !this._isEnabled;
};
_proto.toggle = function toggle(event) {
if (!this._isEnabled) {
return;
}
if (event) {
var dataKey = this.constructor.DATA_KEY;
var context = $(event.currentTarget).data(dataKey);
if (!context) {
context = new this.constructor(event.currentTarget, this._getDelegateConfig());
$(event.currentTarget).data(dataKey, context);
}
context._activeTrigger.click = !context._activeTrigger.click;
if (context._isWithActiveTrigger()) {
context._enter(null, context);
} else {
context._leave(null, context);
}
} else {
if ($(this.getTipElement()).hasClass(ClassName$6.SHOW)) {
this._leave(null, this);
return;
}
this._enter(null, this);
}
};
_proto.dispose = function dispose() {
clearTimeout(this._timeout);
$.removeData(this.element, this.constructor.DATA_KEY);
$(this.element).off(this.constructor.EVENT_KEY);
$(this.element).closest('.modal').off('hide.bs.modal');
if (this.tip) {
$(this.tip).remove();
}
this._isEnabled = null;
this._timeout = null;
this._hoverState = null;
this._activeTrigger = null;
if (this._popper !== null) {
this._popper.destroy();
}
this._popper = null;
this.element = null;
this.config = null;
this.tip = null;
};
_proto.show = function show() {
var _this = this;
if ($(this.element).css('display') === 'none') {
throw new Error('Please use show on visible elements');
}
var showEvent = $.Event(this.constructor.Event.SHOW);
if (this.isWithContent() && this._isEnabled) {
$(this.element).trigger(showEvent);
var shadowRoot = Util.findShadowRoot(this.element);
var isInTheDom = $.contains(shadowRoot !== null ? shadowRoot : this.element.ownerDocument.documentElement, this.element);
if (showEvent.isDefaultPrevented() || !isInTheDom) {
return;
}
var tip = this.getTipElement();
var tipId = Util.getUID(this.constructor.NAME);
tip.setAttribute('id', tipId);
this.element.setAttribute('aria-describedby', tipId);
this.setContent();
if (this.config.animation) {
$(tip).addClass(ClassName$6.FADE);
}
var placement = typeof this.config.placement === 'function' ? this.config.placement.call(this, tip, this.element) : this.config.placement;
var attachment = this._getAttachment(placement);
this.addAttachmentClass(attachment);
var container = this._getContainer();
$(tip).data(this.constructor.DATA_KEY, this);
if (!$.contains(this.element.ownerDocument.documentElement, this.tip)) {
$(tip).appendTo(container);
}
$(this.element).trigger(this.constructor.Event.INSERTED);
this._popper = new Popper(this.element, tip, {
placement: attachment,
modifiers: {
offset: this._getOffset(),
flip: {
behavior: this.config.fallbackPlacement
},
arrow: {
element: Selector$6.ARROW
},
preventOverflow: {
boundariesElement: this.config.boundary
}
},
onCreate: function onCreate(data) {
if (data.originalPlacement !== data.placement) {
_this._handlePopperPlacementChange(data);
}
},
onUpdate: function onUpdate(data) {
return _this._handlePopperPlacementChange(data);
}
});
$(tip).addClass(ClassName$6.SHOW); // If this is a touch-enabled device we add extra
// empty mouseover listeners to the body's immediate children;
// only needed because of broken event delegation on iOS
// https://www.quirksmode.org/blog/archives/2014/02/mouse_event_bub.html
if ('ontouchstart' in document.documentElement) {
$(document.body).children().on('mouseover', null, $.noop);
}
var complete = function complete() {
if (_this.config.animation) {
_this._fixTransition();
}
var prevHoverState = _this._hoverState;
_this._hoverState = null;
$(_this.element).trigger(_this.constructor.Event.SHOWN);
if (prevHoverState === HoverState.OUT) {
_this._leave(null, _this);
}
};
if ($(this.tip).hasClass(ClassName$6.FADE)) {
var transitionDuration = Util.getTransitionDurationFromElement(this.tip);
$(this.tip).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
} else {
complete();
}
}
};
_proto.hide = function hide(callback) {
var _this2 = this;
var tip = this.getTipElement();
var hideEvent = $.Event(this.constructor.Event.HIDE);
var complete = function complete() {
if (_this2._hoverState !== HoverState.SHOW && tip.parentNode) {
tip.parentNode.removeChild(tip);
}
_this2._cleanTipClass();
_this2.element.removeAttribute('aria-describedby');
$(_this2.element).trigger(_this2.constructor.Event.HIDDEN);
if (_this2._popper !== null) {
_this2._popper.destroy();
}
if (callback) {
callback();
}
};
$(this.element).trigger(hideEvent);
if (hideEvent.isDefaultPrevented()) {
return;
}
$(tip).removeClass(ClassName$6.SHOW); // If this is a touch-enabled device we remove the extra
// empty mouseover listeners we added for iOS support
if ('ontouchstart' in document.documentElement) {
$(document.body).children().off('mouseover', null, $.noop);
}
this._activeTrigger[Trigger.CLICK] = false;
this._activeTrigger[Trigger.FOCUS] = false;
this._activeTrigger[Trigger.HOVER] = false;
if ($(this.tip).hasClass(ClassName$6.FADE)) {
var transitionDuration = Util.getTransitionDurationFromElement(tip);
$(tip).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
} else {
complete();
}
this._hoverState = '';
};
_proto.update = function update() {
if (this._popper !== null) {
this._popper.scheduleUpdate();
}
} // Protected
;
_proto.isWithContent = function isWithContent() {
return Boolean(this.getTitle());
};
_proto.addAttachmentClass = function addAttachmentClass(attachment) {
$(this.getTipElement()).addClass(CLASS_PREFIX + "-" + attachment);
};
_proto.getTipElement = function getTipElement() {
this.tip = this.tip || $(this.config.template)[0];
return this.tip;
};
_proto.setContent = function setContent() {
var tip = this.getTipElement();
this.setElementContent($(tip.querySelectorAll(Selector$6.TOOLTIP_INNER)), this.getTitle());
$(tip).removeClass(ClassName$6.FADE + " " + ClassName$6.SHOW);
};
_proto.setElementContent = function setElementContent($element, content) {
if (typeof content === 'object' && (content.nodeType || content.jquery)) {
// Content is a DOM node or a jQuery
if (this.config.html) {
if (!$(content).parent().is($element)) {
$element.empty().append(content);
}
} else {
$element.text($(content).text());
}
return;
}
if (this.config.html) {
if (this.config.sanitize) {
content = sanitizeHtml(content, this.config.whiteList, this.config.sanitizeFn);
}
$element.html(content);
} else {
$element.text(content);
}
};
_proto.getTitle = function getTitle() {
var title = this.element.getAttribute('data-original-title');
if (!title) {
title = typeof this.config.title === 'function' ? this.config.title.call(this.element) : this.config.title;
}
return title;
} // Private
;
_proto._getOffset = function _getOffset() {
var _this3 = this;
var offset = {};
if (typeof this.config.offset === 'function') {
offset.fn = function (data) {
data.offsets = _objectSpread({}, data.offsets, _this3.config.offset(data.offsets, _this3.element) || {});
return data;
};
} else {
offset.offset = this.config.offset;
}
return offset;
};
_proto._getContainer = function _getContainer() {
if (this.config.container === false) {
return document.body;
}
if (Util.isElement(this.config.container)) {
return $(this.config.container);
}
return $(document).find(this.config.container);
};
_proto._getAttachment = function _getAttachment(placement) {
return AttachmentMap$1[placement.toUpperCase()];
};
_proto._setListeners = function _setListeners() {
var _this4 = this;
var triggers = this.config.trigger.split(' ');
triggers.forEach(function (trigger) {
if (trigger === 'click') {
$(_this4.element).on(_this4.constructor.Event.CLICK, _this4.config.selector, function (event) {
return _this4.toggle(event);
});
} else if (trigger !== Trigger.MANUAL) {
var eventIn = trigger === Trigger.HOVER ? _this4.constructor.Event.MOUSEENTER : _this4.constructor.Event.FOCUSIN;
var eventOut = trigger === Trigger.HOVER ? _this4.constructor.Event.MOUSELEAVE : _this4.constructor.Event.FOCUSOUT;
$(_this4.element).on(eventIn, _this4.config.selector, function (event) {
return _this4._enter(event);
}).on(eventOut, _this4.config.selector, function (event) {
return _this4._leave(event);
});
}
});
$(this.element).closest('.modal').on('hide.bs.modal', function () {
if (_this4.element) {
_this4.hide();
}
});
if (this.config.selector) {
this.config = _objectSpread({}, this.config, {
trigger: 'manual',
selector: ''
});
} else {
this._fixTitle();
}
};
_proto._fixTitle = function _fixTitle() {
var titleType = typeof this.element.getAttribute('data-original-title');
if (this.element.getAttribute('title') || titleType !== 'string') {
this.element.setAttribute('data-original-title', this.element.getAttribute('title') || '');
this.element.setAttribute('title', '');
}
};
_proto._enter = function _enter(event, context) {
var dataKey = this.constructor.DATA_KEY;
context = context || $(event.currentTarget).data(dataKey);
if (!context) {
context = new this.constructor(event.currentTarget, this._getDelegateConfig());
$(event.currentTarget).data(dataKey, context);
}
if (event) {
context._activeTrigger[event.type === 'focusin' ? Trigger.FOCUS : Trigger.HOVER] = true;
}
if ($(context.getTipElement()).hasClass(ClassName$6.SHOW) || context._hoverState === HoverState.SHOW) {
context._hoverState = HoverState.SHOW;
return;
}
clearTimeout(context._timeout);
context._hoverState = HoverState.SHOW;
if (!context.config.delay || !context.config.delay.show) {
context.show();
return;
}
context._timeout = setTimeout(function () {
if (context._hoverState === HoverState.SHOW) {
context.show();
}
}, context.config.delay.show);
};
_proto._leave = function _leave(event, context) {
var dataKey = this.constructor.DATA_KEY;
context = context || $(event.currentTarget).data(dataKey);
if (!context) {
context = new this.constructor(event.currentTarget, this._getDelegateConfig());
$(event.currentTarget).data(dataKey, context);
}
if (event) {
context._activeTrigger[event.type === 'focusout' ? Trigger.FOCUS : Trigger.HOVER] = false;
}
if (context._isWithActiveTrigger()) {
return;
}
clearTimeout(context._timeout);
context._hoverState = HoverState.OUT;
if (!context.config.delay || !context.config.delay.hide) {
context.hide();
return;
}
context._timeout = setTimeout(function () {
if (context._hoverState === HoverState.OUT) {
context.hide();
}
}, context.config.delay.hide);
};
_proto._isWithActiveTrigger = function _isWithActiveTrigger() {
for (var trigger in this._activeTrigger) {
if (this._activeTrigger[trigger]) {
return true;
}
}
return false;
};
_proto._getConfig = function _getConfig(config) {
var dataAttributes = $(this.element).data();
Object.keys(dataAttributes).forEach(function (dataAttr) {
if (DISALLOWED_ATTRIBUTES.indexOf(dataAttr) !== -1) {
delete dataAttributes[dataAttr];
}
});
config = _objectSpread({}, this.constructor.Default, dataAttributes, typeof config === 'object' && config ? config : {});
if (typeof config.delay === 'number') {
config.delay = {
show: config.delay,
hide: config.delay
};
}
if (typeof config.title === 'number') {
config.title = config.title.toString();
}
if (typeof config.content === 'number') {
config.content = config.content.toString();
}
Util.typeCheckConfig(NAME$6, config, this.constructor.DefaultType);
if (config.sanitize) {
config.template = sanitizeHtml(config.template, config.whiteList, config.sanitizeFn);
}
return config;
};
_proto._getDelegateConfig = function _getDelegateConfig() {
var config = {};
if (this.config) {
for (var key in this.config) {
if (this.constructor.Default[key] !== this.config[key]) {
config[key] = this.config[key];
}
}
}
return config;
};
_proto._cleanTipClass = function _cleanTipClass() {
var $tip = $(this.getTipElement());
var tabClass = $tip.attr('class').match(BSCLS_PREFIX_REGEX);
if (tabClass !== null && tabClass.length) {
$tip.removeClass(tabClass.join(''));
}
};
_proto._handlePopperPlacementChange = function _handlePopperPlacementChange(popperData) {
var popperInstance = popperData.instance;
this.tip = popperInstance.popper;
this._cleanTipClass();
this.addAttachmentClass(this._getAttachment(popperData.placement));
};
_proto._fixTransition = function _fixTransition() {
var tip = this.getTipElement();
var initConfigAnimation = this.config.animation;
if (tip.getAttribute('x-placement') !== null) {
return;
}
$(tip).removeClass(ClassName$6.FADE);
this.config.animation = false;
this.hide();
this.show();
this.config.animation = initConfigAnimation;
} // Static
;
Tooltip._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var data = $(this).data(DATA_KEY$6);
var _config = typeof config === 'object' && config;
if (!data && /dispose|hide/.test(config)) {
return;
}
if (!data) {
data = new Tooltip(this, _config);
$(this).data(DATA_KEY$6, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
_createClass(Tooltip, null, [{
key: "VERSION",
get: function get() {
return VERSION$6;
}
}, {
key: "Default",
get: function get() {
return Default$4;
}
}, {
key: "NAME",
get: function get() {
return NAME$6;
}
}, {
key: "DATA_KEY",
get: function get() {
return DATA_KEY$6;
}
}, {
key: "Event",
get: function get() {
return Event$6;
}
}, {
key: "EVENT_KEY",
get: function get() {
return EVENT_KEY$6;
}
}, {
key: "DefaultType",
get: function get() {
return DefaultType$4;
}
}]);
return Tooltip;
}();
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$6] = Tooltip._jQueryInterface;
$.fn[NAME$6].Constructor = Tooltip;
$.fn[NAME$6].noConflict = function () {
$.fn[NAME$6] = JQUERY_NO_CONFLICT$6;
return Tooltip._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$7 = 'popover';
var VERSION$7 = '4.3.1';
var DATA_KEY$7 = 'bs.popover';
var EVENT_KEY$7 = "." + DATA_KEY$7;
var JQUERY_NO_CONFLICT$7 = $.fn[NAME$7];
var CLASS_PREFIX$1 = 'bs-popover';
var BSCLS_PREFIX_REGEX$1 = new RegExp("(^|\\s)" + CLASS_PREFIX$1 + "\\S+", 'g');
var Default$5 = _objectSpread({}, Tooltip.Default, {
placement: 'right',
trigger: 'click',
content: '',
template: '<div class="popover" role="tooltip">' + '<div class="arrow"></div>' + '<h3 class="popover-header"></h3>' + '<div class="popover-body"></div></div>'
});
var DefaultType$5 = _objectSpread({}, Tooltip.DefaultType, {
content: '(string|element|function)'
});
var ClassName$7 = {
FADE: 'fade',
SHOW: 'show'
};
var Selector$7 = {
TITLE: '.popover-header',
CONTENT: '.popover-body'
};
var Event$7 = {
HIDE: "hide" + EVENT_KEY$7,
HIDDEN: "hidden" + EVENT_KEY$7,
SHOW: "show" + EVENT_KEY$7,
SHOWN: "shown" + EVENT_KEY$7,
INSERTED: "inserted" + EVENT_KEY$7,
CLICK: "click" + EVENT_KEY$7,
FOCUSIN: "focusin" + EVENT_KEY$7,
FOCUSOUT: "focusout" + EVENT_KEY$7,
MOUSEENTER: "mouseenter" + EVENT_KEY$7,
MOUSELEAVE: "mouseleave" + EVENT_KEY$7
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Popover =
/*#__PURE__*/
function (_Tooltip) {
_inheritsLoose(Popover, _Tooltip);
function Popover() {
return _Tooltip.apply(this, arguments) || this;
}
var _proto = Popover.prototype;
// Overrides
_proto.isWithContent = function isWithContent() {
return this.getTitle() || this._getContent();
};
_proto.addAttachmentClass = function addAttachmentClass(attachment) {
$(this.getTipElement()).addClass(CLASS_PREFIX$1 + "-" + attachment);
};
_proto.getTipElement = function getTipElement() {
this.tip = this.tip || $(this.config.template)[0];
return this.tip;
};
_proto.setContent = function setContent() {
var $tip = $(this.getTipElement()); // We use append for html objects to maintain js events
this.setElementContent($tip.find(Selector$7.TITLE), this.getTitle());
var content = this._getContent();
if (typeof content === 'function') {
content = content.call(this.element);
}
this.setElementContent($tip.find(Selector$7.CONTENT), content);
$tip.removeClass(ClassName$7.FADE + " " + ClassName$7.SHOW);
} // Private
;
_proto._getContent = function _getContent() {
return this.element.getAttribute('data-content') || this.config.content;
};
_proto._cleanTipClass = function _cleanTipClass() {
var $tip = $(this.getTipElement());
var tabClass = $tip.attr('class').match(BSCLS_PREFIX_REGEX$1);
if (tabClass !== null && tabClass.length > 0) {
$tip.removeClass(tabClass.join(''));
}
} // Static
;
Popover._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var data = $(this).data(DATA_KEY$7);
var _config = typeof config === 'object' ? config : null;
if (!data && /dispose|hide/.test(config)) {
return;
}
if (!data) {
data = new Popover(this, _config);
$(this).data(DATA_KEY$7, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
_createClass(Popover, null, [{
key: "VERSION",
// Getters
get: function get() {
return VERSION$7;
}
}, {
key: "Default",
get: function get() {
return Default$5;
}
}, {
key: "NAME",
get: function get() {
return NAME$7;
}
}, {
key: "DATA_KEY",
get: function get() {
return DATA_KEY$7;
}
}, {
key: "Event",
get: function get() {
return Event$7;
}
}, {
key: "EVENT_KEY",
get: function get() {
return EVENT_KEY$7;
}
}, {
key: "DefaultType",
get: function get() {
return DefaultType$5;
}
}]);
return Popover;
}(Tooltip);
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$7] = Popover._jQueryInterface;
$.fn[NAME$7].Constructor = Popover;
$.fn[NAME$7].noConflict = function () {
$.fn[NAME$7] = JQUERY_NO_CONFLICT$7;
return Popover._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$8 = 'scrollspy';
var VERSION$8 = '4.3.1';
var DATA_KEY$8 = 'bs.scrollspy';
var EVENT_KEY$8 = "." + DATA_KEY$8;
var DATA_API_KEY$6 = '.data-api';
var JQUERY_NO_CONFLICT$8 = $.fn[NAME$8];
var Default$6 = {
offset: 10,
method: 'auto',
target: ''
};
var DefaultType$6 = {
offset: 'number',
method: 'string',
target: '(string|element)'
};
var Event$8 = {
ACTIVATE: "activate" + EVENT_KEY$8,
SCROLL: "scroll" + EVENT_KEY$8,
LOAD_DATA_API: "load" + EVENT_KEY$8 + DATA_API_KEY$6
};
var ClassName$8 = {
DROPDOWN_ITEM: 'dropdown-item',
DROPDOWN_MENU: 'dropdown-menu',
ACTIVE: 'active'
};
var Selector$8 = {
DATA_SPY: '[data-spy="scroll"]',
ACTIVE: '.active',
NAV_LIST_GROUP: '.nav, .list-group',
NAV_LINKS: '.nav-link',
NAV_ITEMS: '.nav-item',
LIST_ITEMS: '.list-group-item',
DROPDOWN: '.dropdown',
DROPDOWN_ITEMS: '.dropdown-item',
DROPDOWN_TOGGLE: '.dropdown-toggle'
};
var OffsetMethod = {
OFFSET: 'offset',
POSITION: 'position'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var ScrollSpy =
/*#__PURE__*/
function () {
function ScrollSpy(element, config) {
var _this = this;
this._element = element;
this._scrollElement = element.tagName === 'BODY' ? window : element;
this._config = this._getConfig(config);
this._selector = this._config.target + " " + Selector$8.NAV_LINKS + "," + (this._config.target + " " + Selector$8.LIST_ITEMS + ",") + (this._config.target + " " + Selector$8.DROPDOWN_ITEMS);
this._offsets = [];
this._targets = [];
this._activeTarget = null;
this._scrollHeight = 0;
$(this._scrollElement).on(Event$8.SCROLL, function (event) {
return _this._process(event);
});
this.refresh();
this._process();
} // Getters
var _proto = ScrollSpy.prototype;
// Public
_proto.refresh = function refresh() {
var _this2 = this;
var autoMethod = this._scrollElement === this._scrollElement.window ? OffsetMethod.OFFSET : OffsetMethod.POSITION;
var offsetMethod = this._config.method === 'auto' ? autoMethod : this._config.method;
var offsetBase = offsetMethod === OffsetMethod.POSITION ? this._getScrollTop() : 0;
this._offsets = [];
this._targets = [];
this._scrollHeight = this._getScrollHeight();
var targets = [].slice.call(document.querySelectorAll(this._selector));
targets.map(function (element) {
var target;
var targetSelector = Util.getSelectorFromElement(element);
if (targetSelector) {
target = document.querySelector(targetSelector);
}
if (target) {
var targetBCR = target.getBoundingClientRect();
if (targetBCR.width || targetBCR.height) {
// TODO (fat): remove sketch reliance on jQuery position/offset
return [$(target)[offsetMethod]().top + offsetBase, targetSelector];
}
}
return null;
}).filter(function (item) {
return item;
}).sort(function (a, b) {
return a[0] - b[0];
}).forEach(function (item) {
_this2._offsets.push(item[0]);
_this2._targets.push(item[1]);
});
};
_proto.dispose = function dispose() {
$.removeData(this._element, DATA_KEY$8);
$(this._scrollElement).off(EVENT_KEY$8);
this._element = null;
this._scrollElement = null;
this._config = null;
this._selector = null;
this._offsets = null;
this._targets = null;
this._activeTarget = null;
this._scrollHeight = null;
} // Private
;
_proto._getConfig = function _getConfig(config) {
config = _objectSpread({}, Default$6, typeof config === 'object' && config ? config : {});
if (typeof config.target !== 'string') {
var id = $(config.target).attr('id');
if (!id) {
id = Util.getUID(NAME$8);
$(config.target).attr('id', id);
}
config.target = "#" + id;
}
Util.typeCheckConfig(NAME$8, config, DefaultType$6);
return config;
};
_proto._getScrollTop = function _getScrollTop() {
return this._scrollElement === window ? this._scrollElement.pageYOffset : this._scrollElement.scrollTop;
};
_proto._getScrollHeight = function _getScrollHeight() {
return this._scrollElement.scrollHeight || Math.max(document.body.scrollHeight, document.documentElement.scrollHeight);
};
_proto._getOffsetHeight = function _getOffsetHeight() {
return this._scrollElement === window ? window.innerHeight : this._scrollElement.getBoundingClientRect().height;
};
_proto._process = function _process() {
var scrollTop = this._getScrollTop() + this._config.offset;
var scrollHeight = this._getScrollHeight();
var maxScroll = this._config.offset + scrollHeight - this._getOffsetHeight();
if (this._scrollHeight !== scrollHeight) {
this.refresh();
}
if (scrollTop >= maxScroll) {
var target = this._targets[this._targets.length - 1];
if (this._activeTarget !== target) {
this._activate(target);
}
return;
}
if (this._activeTarget && scrollTop < this._offsets[0] && this._offsets[0] > 0) {
this._activeTarget = null;
this._clear();
return;
}
var offsetLength = this._offsets.length;
for (var i = offsetLength; i--;) {
var isActiveTarget = this._activeTarget !== this._targets[i] && scrollTop >= this._offsets[i] && (typeof this._offsets[i + 1] === 'undefined' || scrollTop < this._offsets[i + 1]);
if (isActiveTarget) {
this._activate(this._targets[i]);
}
}
};
_proto._activate = function _activate(target) {
this._activeTarget = target;
this._clear();
var queries = this._selector.split(',').map(function (selector) {
return selector + "[data-target=\"" + target + "\"]," + selector + "[href=\"" + target + "\"]";
});
var $link = $([].slice.call(document.querySelectorAll(queries.join(','))));
if ($link.hasClass(ClassName$8.DROPDOWN_ITEM)) {
$link.closest(Selector$8.DROPDOWN).find(Selector$8.DROPDOWN_TOGGLE).addClass(ClassName$8.ACTIVE);
$link.addClass(ClassName$8.ACTIVE);
} else {
// Set triggered link as active
$link.addClass(ClassName$8.ACTIVE); // Set triggered links parents as active
// With both <ul> and <nav> markup a parent is the previous sibling of any nav ancestor
$link.parents(Selector$8.NAV_LIST_GROUP).prev(Selector$8.NAV_LINKS + ", " + Selector$8.LIST_ITEMS).addClass(ClassName$8.ACTIVE); // Handle special case when .nav-link is inside .nav-item
$link.parents(Selector$8.NAV_LIST_GROUP).prev(Selector$8.NAV_ITEMS).children(Selector$8.NAV_LINKS).addClass(ClassName$8.ACTIVE);
}
$(this._scrollElement).trigger(Event$8.ACTIVATE, {
relatedTarget: target
});
};
_proto._clear = function _clear() {
[].slice.call(document.querySelectorAll(this._selector)).filter(function (node) {
return node.classList.contains(ClassName$8.ACTIVE);
}).forEach(function (node) {
return node.classList.remove(ClassName$8.ACTIVE);
});
} // Static
;
ScrollSpy._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var data = $(this).data(DATA_KEY$8);
var _config = typeof config === 'object' && config;
if (!data) {
data = new ScrollSpy(this, _config);
$(this).data(DATA_KEY$8, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
_createClass(ScrollSpy, null, [{
key: "VERSION",
get: function get() {
return VERSION$8;
}
}, {
key: "Default",
get: function get() {
return Default$6;
}
}]);
return ScrollSpy;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(window).on(Event$8.LOAD_DATA_API, function () {
var scrollSpys = [].slice.call(document.querySelectorAll(Selector$8.DATA_SPY));
var scrollSpysLength = scrollSpys.length;
for (var i = scrollSpysLength; i--;) {
var $spy = $(scrollSpys[i]);
ScrollSpy._jQueryInterface.call($spy, $spy.data());
}
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$8] = ScrollSpy._jQueryInterface;
$.fn[NAME$8].Constructor = ScrollSpy;
$.fn[NAME$8].noConflict = function () {
$.fn[NAME$8] = JQUERY_NO_CONFLICT$8;
return ScrollSpy._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$9 = 'tab';
var VERSION$9 = '4.3.1';
var DATA_KEY$9 = 'bs.tab';
var EVENT_KEY$9 = "." + DATA_KEY$9;
var DATA_API_KEY$7 = '.data-api';
var JQUERY_NO_CONFLICT$9 = $.fn[NAME$9];
var Event$9 = {
HIDE: "hide" + EVENT_KEY$9,
HIDDEN: "hidden" + EVENT_KEY$9,
SHOW: "show" + EVENT_KEY$9,
SHOWN: "shown" + EVENT_KEY$9,
CLICK_DATA_API: "click" + EVENT_KEY$9 + DATA_API_KEY$7
};
var ClassName$9 = {
DROPDOWN_MENU: 'dropdown-menu',
ACTIVE: 'active',
DISABLED: 'disabled',
FADE: 'fade',
SHOW: 'show'
};
var Selector$9 = {
DROPDOWN: '.dropdown',
NAV_LIST_GROUP: '.nav, .list-group',
ACTIVE: '.active',
ACTIVE_UL: '> li > .active',
DATA_TOGGLE: '[data-toggle="tab"], [data-toggle="pill"], [data-toggle="list"]',
DROPDOWN_TOGGLE: '.dropdown-toggle',
DROPDOWN_ACTIVE_CHILD: '> .dropdown-menu .active'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Tab =
/*#__PURE__*/
function () {
function Tab(element) {
this._element = element;
} // Getters
var _proto = Tab.prototype;
// Public
_proto.show = function show() {
var _this = this;
if (this._element.parentNode && this._element.parentNode.nodeType === Node.ELEMENT_NODE && $(this._element).hasClass(ClassName$9.ACTIVE) || $(this._element).hasClass(ClassName$9.DISABLED)) {
return;
}
var target;
var previous;
var listElement = $(this._element).closest(Selector$9.NAV_LIST_GROUP)[0];
var selector = Util.getSelectorFromElement(this._element);
if (listElement) {
var itemSelector = listElement.nodeName === 'UL' || listElement.nodeName === 'OL' ? Selector$9.ACTIVE_UL : Selector$9.ACTIVE;
previous = $.makeArray($(listElement).find(itemSelector));
previous = previous[previous.length - 1];
}
var hideEvent = $.Event(Event$9.HIDE, {
relatedTarget: this._element
});
var showEvent = $.Event(Event$9.SHOW, {
relatedTarget: previous
});
if (previous) {
$(previous).trigger(hideEvent);
}
$(this._element).trigger(showEvent);
if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) {
return;
}
if (selector) {
target = document.querySelector(selector);
}
this._activate(this._element, listElement);
var complete = function complete() {
var hiddenEvent = $.Event(Event$9.HIDDEN, {
relatedTarget: _this._element
});
var shownEvent = $.Event(Event$9.SHOWN, {
relatedTarget: previous
});
$(previous).trigger(hiddenEvent);
$(_this._element).trigger(shownEvent);
};
if (target) {
this._activate(target, target.parentNode, complete);
} else {
complete();
}
};
_proto.dispose = function dispose() {
$.removeData(this._element, DATA_KEY$9);
this._element = null;
} // Private
;
_proto._activate = function _activate(element, container, callback) {
var _this2 = this;
var activeElements = container && (container.nodeName === 'UL' || container.nodeName === 'OL') ? $(container).find(Selector$9.ACTIVE_UL) : $(container).children(Selector$9.ACTIVE);
var active = activeElements[0];
var isTransitioning = callback && active && $(active).hasClass(ClassName$9.FADE);
var complete = function complete() {
return _this2._transitionComplete(element, active, callback);
};
if (active && isTransitioning) {
var transitionDuration = Util.getTransitionDurationFromElement(active);
$(active).removeClass(ClassName$9.SHOW).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
} else {
complete();
}
};
_proto._transitionComplete = function _transitionComplete(element, active, callback) {
if (active) {
$(active).removeClass(ClassName$9.ACTIVE);
var dropdownChild = $(active.parentNode).find(Selector$9.DROPDOWN_ACTIVE_CHILD)[0];
if (dropdownChild) {
$(dropdownChild).removeClass(ClassName$9.ACTIVE);
}
if (active.getAttribute('role') === 'tab') {
active.setAttribute('aria-selected', false);
}
}
$(element).addClass(ClassName$9.ACTIVE);
if (element.getAttribute('role') === 'tab') {
element.setAttribute('aria-selected', true);
}
Util.reflow(element);
if (element.classList.contains(ClassName$9.FADE)) {
element.classList.add(ClassName$9.SHOW);
}
if (element.parentNode && $(element.parentNode).hasClass(ClassName$9.DROPDOWN_MENU)) {
var dropdownElement = $(element).closest(Selector$9.DROPDOWN)[0];
if (dropdownElement) {
var dropdownToggleList = [].slice.call(dropdownElement.querySelectorAll(Selector$9.DROPDOWN_TOGGLE));
$(dropdownToggleList).addClass(ClassName$9.ACTIVE);
}
element.setAttribute('aria-expanded', true);
}
if (callback) {
callback();
}
} // Static
;
Tab._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var $this = $(this);
var data = $this.data(DATA_KEY$9);
if (!data) {
data = new Tab(this);
$this.data(DATA_KEY$9, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config]();
}
});
};
_createClass(Tab, null, [{
key: "VERSION",
get: function get() {
return VERSION$9;
}
}]);
return Tab;
}();
/**
* ------------------------------------------------------------------------
* Data Api implementation
* ------------------------------------------------------------------------
*/
$(document).on(Event$9.CLICK_DATA_API, Selector$9.DATA_TOGGLE, function (event) {
event.preventDefault();
Tab._jQueryInterface.call($(this), 'show');
});
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$9] = Tab._jQueryInterface;
$.fn[NAME$9].Constructor = Tab;
$.fn[NAME$9].noConflict = function () {
$.fn[NAME$9] = JQUERY_NO_CONFLICT$9;
return Tab._jQueryInterface;
};
/**
* ------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------
*/
var NAME$a = 'toast';
var VERSION$a = '4.3.1';
var DATA_KEY$a = 'bs.toast';
var EVENT_KEY$a = "." + DATA_KEY$a;
var JQUERY_NO_CONFLICT$a = $.fn[NAME$a];
var Event$a = {
CLICK_DISMISS: "click.dismiss" + EVENT_KEY$a,
HIDE: "hide" + EVENT_KEY$a,
HIDDEN: "hidden" + EVENT_KEY$a,
SHOW: "show" + EVENT_KEY$a,
SHOWN: "shown" + EVENT_KEY$a
};
var ClassName$a = {
FADE: 'fade',
HIDE: 'hide',
SHOW: 'show',
SHOWING: 'showing'
};
var DefaultType$7 = {
animation: 'boolean',
autohide: 'boolean',
delay: 'number'
};
var Default$7 = {
animation: true,
autohide: true,
delay: 500
};
var Selector$a = {
DATA_DISMISS: '[data-dismiss="toast"]'
/**
* ------------------------------------------------------------------------
* Class Definition
* ------------------------------------------------------------------------
*/
};
var Toast =
/*#__PURE__*/
function () {
function Toast(element, config) {
this._element = element;
this._config = this._getConfig(config);
this._timeout = null;
this._setListeners();
} // Getters
var _proto = Toast.prototype;
// Public
_proto.show = function show() {
var _this = this;
$(this._element).trigger(Event$a.SHOW);
if (this._config.animation) {
this._element.classList.add(ClassName$a.FADE);
}
var complete = function complete() {
_this._element.classList.remove(ClassName$a.SHOWING);
_this._element.classList.add(ClassName$a.SHOW);
$(_this._element).trigger(Event$a.SHOWN);
if (_this._config.autohide) {
_this.hide();
}
};
this._element.classList.remove(ClassName$a.HIDE);
this._element.classList.add(ClassName$a.SHOWING);
if (this._config.animation) {
var transitionDuration = Util.getTransitionDurationFromElement(this._element);
$(this._element).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
} else {
complete();
}
};
_proto.hide = function hide(withoutTimeout) {
var _this2 = this;
if (!this._element.classList.contains(ClassName$a.SHOW)) {
return;
}
$(this._element).trigger(Event$a.HIDE);
if (withoutTimeout) {
this._close();
} else {
this._timeout = setTimeout(function () {
_this2._close();
}, this._config.delay);
}
};
_proto.dispose = function dispose() {
clearTimeout(this._timeout);
this._timeout = null;
if (this._element.classList.contains(ClassName$a.SHOW)) {
this._element.classList.remove(ClassName$a.SHOW);
}
$(this._element).off(Event$a.CLICK_DISMISS);
$.removeData(this._element, DATA_KEY$a);
this._element = null;
this._config = null;
} // Private
;
_proto._getConfig = function _getConfig(config) {
config = _objectSpread({}, Default$7, $(this._element).data(), typeof config === 'object' && config ? config : {});
Util.typeCheckConfig(NAME$a, config, this.constructor.DefaultType);
return config;
};
_proto._setListeners = function _setListeners() {
var _this3 = this;
$(this._element).on(Event$a.CLICK_DISMISS, Selector$a.DATA_DISMISS, function () {
return _this3.hide(true);
});
};
_proto._close = function _close() {
var _this4 = this;
var complete = function complete() {
_this4._element.classList.add(ClassName$a.HIDE);
$(_this4._element).trigger(Event$a.HIDDEN);
};
this._element.classList.remove(ClassName$a.SHOW);
if (this._config.animation) {
var transitionDuration = Util.getTransitionDurationFromElement(this._element);
$(this._element).one(Util.TRANSITION_END, complete).emulateTransitionEnd(transitionDuration);
} else {
complete();
}
} // Static
;
Toast._jQueryInterface = function _jQueryInterface(config) {
return this.each(function () {
var $element = $(this);
var data = $element.data(DATA_KEY$a);
var _config = typeof config === 'object' && config;
if (!data) {
data = new Toast(this, _config);
$element.data(DATA_KEY$a, data);
}
if (typeof config === 'string') {
if (typeof data[config] === 'undefined') {
throw new TypeError("No method named \"" + config + "\"");
}
data[config](this);
}
});
};
_createClass(Toast, null, [{
key: "VERSION",
get: function get() {
return VERSION$a;
}
}, {
key: "DefaultType",
get: function get() {
return DefaultType$7;
}
}, {
key: "Default",
get: function get() {
return Default$7;
}
}]);
return Toast;
}();
/**
* ------------------------------------------------------------------------
* jQuery
* ------------------------------------------------------------------------
*/
$.fn[NAME$a] = Toast._jQueryInterface;
$.fn[NAME$a].Constructor = Toast;
$.fn[NAME$a].noConflict = function () {
$.fn[NAME$a] = JQUERY_NO_CONFLICT$a;
return Toast._jQueryInterface;
};
/**
* --------------------------------------------------------------------------
* Bootstrap (v4.3.1): index.js
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* --------------------------------------------------------------------------
*/
(function () {
if (typeof $ === 'undefined') {
throw new TypeError('Bootstrap\'s JavaScript requires jQuery. jQuery must be included before Bootstrap\'s JavaScript.');
}
var version = $.fn.jquery.split(' ')[0].split('.');
var minMajor = 1;
var ltMajor = 2;
var minMinor = 9;
var minPatch = 1;
var maxMajor = 4;
if (version[0] < ltMajor && version[1] < minMinor || version[0] === minMajor && version[1] === minMinor && version[2] < minPatch || version[0] >= maxMajor) {
throw new Error('Bootstrap\'s JavaScript requires at least jQuery v1.9.1 but less than v4.0.0');
}
})();
exports.Util = Util;
exports.Alert = Alert;
exports.Button = Button;
exports.Carousel = Carousel;
exports.Collapse = Collapse;
exports.Dropdown = Dropdown;
exports.Modal = Modal;
exports.Popover = Popover;
exports.Scrollspy = ScrollSpy;
exports.Tab = Tab;
exports.Toast = Toast;
exports.Tooltip = Tooltip;
Object.defineProperty(exports, '__esModule', { value: true });
}));
//# sourceMappingURL=bootstrap.js.map
|
PypiClean
|
/os_sys-2.1.4-py3-none-any.whl/edit/rpc.py
|
import builtins
import copyreg
import io
import marshal
import os
import pickle
import queue
import select
import socket
import socketserver
import struct
import sys
import threading
import traceback
import types
def unpickle_code(ms):
"Return code object from marshal string ms."
co = marshal.loads(ms)
assert isinstance(co, types.CodeType)
return co
def pickle_code(co):
"Return unpickle function and tuple with marshalled co code object."
assert isinstance(co, types.CodeType)
ms = marshal.dumps(co)
return unpickle_code, (ms,)
def dumps(obj, protocol=None):
"Return pickled (or marshalled) string for obj."
# edit passes 'None' to select pickle.DEFAULT_PROTOCOL.
f = io.BytesIO()
p = CodePickler(f, protocol)
p.dump(obj)
return f.getvalue()
class CodePickler(pickle.Pickler):
dispatch_table = {types.CodeType: pickle_code}
dispatch_table.update(copyreg.dispatch_table)
BUFSIZE = 8*1024
LOCALHOST = '127.0.0.1'
class RPCServer(socketserver.TCPServer):
def __init__(self, addr, handlerclass=None):
if handlerclass is None:
handlerclass = RPCHandler
socketserver.TCPServer.__init__(self, addr, handlerclass)
def server_bind(self):
"Override TCPServer method, no bind() phase for connecting entity"
pass
def server_activate(self):
"""Override TCPServer method, connect() instead of listen()
Due to the reversed connection, self.server_address is actually the
address of the edit Client to which we are connecting.
"""
self.socket.connect(self.server_address)
def get_request(self):
"Override TCPServer method, return already connected socket"
return self.socket, self.server_address
def handle_error(self, request, client_address):
"""Override TCPServer method
Error message goes to __stderr__. No error message if exiting
normally or socket raised EOF. Other exceptions not handled in
server code will cause os._exit.
"""
try:
raise
except SystemExit:
raise
except:
erf = sys.__stderr__
print('\n' + '-'*40, file=erf)
print('Unhandled server exception!', file=erf)
print('Thread: %s' % threading.current_thread().name, file=erf)
print('Client Address: ', client_address, file=erf)
print('Request: ', repr(request), file=erf)
traceback.print_exc(file=erf)
print('\n*** Unrecoverable, server exiting!', file=erf)
print('-'*40, file=erf)
os._exit(0)
#----------------- end class RPCServer --------------------
objecttable = {}
request_queue = queue.Queue(0)
response_queue = queue.Queue(0)
class SocketIO(object):
nextseq = 0
def __init__(self, sock, objtable=None, debugging=None):
self.sockthread = threading.current_thread()
if debugging is not None:
self.debugging = debugging
self.sock = sock
if objtable is None:
objtable = objecttable
self.objtable = objtable
self.responses = {}
self.cvars = {}
def close(self):
sock = self.sock
self.sock = None
if sock is not None:
sock.close()
def exithook(self):
"override for specific exit action"
os._exit(0)
def debug(self, *args):
if not self.debugging:
return
s = self.location + " " + str(threading.current_thread().name)
for a in args:
s = s + " " + str(a)
print(s, file=sys.__stderr__)
def register(self, oid, object):
self.objtable[oid] = object
def unregister(self, oid):
try:
del self.objtable[oid]
except KeyError:
pass
def localcall(self, seq, request):
self.debug("localcall:", request)
try:
how, (oid, methodname, args, kwargs) = request
except TypeError:
return ("ERROR", "Bad request format")
if oid not in self.objtable:
return ("ERROR", "Unknown object id: %r" % (oid,))
obj = self.objtable[oid]
if methodname == "__methods__":
methods = {}
_getmethods(obj, methods)
return ("OK", methods)
if methodname == "__attributes__":
attributes = {}
_getattributes(obj, attributes)
return ("OK", attributes)
if not hasattr(obj, methodname):
return ("ERROR", "Unsupported method name: %r" % (methodname,))
method = getattr(obj, methodname)
try:
if how == 'CALL':
ret = method(*args, **kwargs)
if isinstance(ret, RemoteObject):
ret = remoteref(ret)
return ("OK", ret)
elif how == 'QUEUE':
request_queue.put((seq, (method, args, kwargs)))
return("QUEUED", None)
else:
return ("ERROR", "Unsupported message type: %s" % how)
except SystemExit:
raise
except KeyboardInterrupt:
raise
except OSError:
raise
except Exception as ex:
return ("CALLEXC", ex)
except:
msg = "*** Internal Error: rpc.py:SocketIO.localcall()\n\n"\
" Object: %s \n Method: %s \n Args: %s\n"
print(msg % (oid, method, args), file=sys.__stderr__)
traceback.print_exc(file=sys.__stderr__)
return ("EXCEPTION", None)
def remotecall(self, oid, methodname, args, kwargs):
self.debug("remotecall:asynccall: ", oid, methodname)
seq = self.asynccall(oid, methodname, args, kwargs)
return self.asyncreturn(seq)
def remotequeue(self, oid, methodname, args, kwargs):
self.debug("remotequeue:asyncqueue: ", oid, methodname)
seq = self.asyncqueue(oid, methodname, args, kwargs)
return self.asyncreturn(seq)
def asynccall(self, oid, methodname, args, kwargs):
request = ("CALL", (oid, methodname, args, kwargs))
seq = self.newseq()
if threading.current_thread() != self.sockthread:
cvar = threading.Condition()
self.cvars[seq] = cvar
self.debug(("asynccall:%d:" % seq), oid, methodname, args, kwargs)
self.putmessage((seq, request))
return seq
def asyncqueue(self, oid, methodname, args, kwargs):
request = ("QUEUE", (oid, methodname, args, kwargs))
seq = self.newseq()
if threading.current_thread() != self.sockthread:
cvar = threading.Condition()
self.cvars[seq] = cvar
self.debug(("asyncqueue:%d:" % seq), oid, methodname, args, kwargs)
self.putmessage((seq, request))
return seq
def asyncreturn(self, seq):
self.debug("asyncreturn:%d:call getresponse(): " % seq)
response = self.getresponse(seq, wait=0.05)
self.debug(("asyncreturn:%d:response: " % seq), response)
return self.decoderesponse(response)
def decoderesponse(self, response):
how, what = response
if how == "OK":
return what
if how == "QUEUED":
return None
if how == "EXCEPTION":
self.debug("decoderesponse: EXCEPTION")
return None
if how == "EOF":
self.debug("decoderesponse: EOF")
self.decode_interrupthook()
return None
if how == "ERROR":
self.debug("decoderesponse: Internal ERROR:", what)
raise RuntimeError(what)
if how == "CALLEXC":
self.debug("decoderesponse: Call Exception:", what)
raise what
raise SystemError(how, what)
def decode_interrupthook(self):
""
raise EOFError
def mainloop(self):
"""Listen on socket until I/O not ready or EOF
pollresponse() will loop looking for seq number None, which
never comes, and exit on EOFError.
"""
try:
self.getresponse(myseq=None, wait=0.05)
except EOFError:
self.debug("mainloop:return")
return
def getresponse(self, myseq, wait):
response = self._getresponse(myseq, wait)
if response is not None:
how, what = response
if how == "OK":
response = how, self._proxify(what)
return response
def _proxify(self, obj):
if isinstance(obj, RemoteProxy):
return RPCProxy(self, obj.oid)
if isinstance(obj, list):
return list(map(self._proxify, obj))
# XXX Check for other types -- not currently needed
return obj
def _getresponse(self, myseq, wait):
self.debug("_getresponse:myseq:", myseq)
if threading.current_thread() is self.sockthread:
# this thread does all reading of requests or responses
while 1:
response = self.pollresponse(myseq, wait)
if response is not None:
return response
else:
# wait for notification from socket handling thread
cvar = self.cvars[myseq]
cvar.acquire()
while myseq not in self.responses:
cvar.wait()
response = self.responses[myseq]
self.debug("_getresponse:%s: thread woke up: response: %s" %
(myseq, response))
del self.responses[myseq]
del self.cvars[myseq]
cvar.release()
return response
def newseq(self):
self.nextseq = seq = self.nextseq + 2
return seq
def putmessage(self, message):
self.debug("putmessage:%d:" % message[0])
try:
s = dumps(message)
except pickle.PicklingError:
print("Cannot pickle:", repr(message), file=sys.__stderr__)
raise
s = struct.pack("<i", len(s)) + s
while len(s) > 0:
try:
r, w, x = select.select([], [self.sock], [])
n = self.sock.send(s[:BUFSIZE])
except (AttributeError, TypeError):
raise OSError("socket no longer exists")
s = s[n:]
buff = b''
bufneed = 4
bufstate = 0 # meaning: 0 => reading count; 1 => reading data
def pollpacket(self, wait):
self._stage0()
if len(self.buff) < self.bufneed:
r, w, x = select.select([self.sock.fileno()], [], [], wait)
if len(r) == 0:
return None
try:
s = self.sock.recv(BUFSIZE)
except OSError:
raise EOFError
if len(s) == 0:
raise EOFError
self.buff += s
self._stage0()
return self._stage1()
def _stage0(self):
if self.bufstate == 0 and len(self.buff) >= 4:
s = self.buff[:4]
self.buff = self.buff[4:]
self.bufneed = struct.unpack("<i", s)[0]
self.bufstate = 1
def _stage1(self):
if self.bufstate == 1 and len(self.buff) >= self.bufneed:
packet = self.buff[:self.bufneed]
self.buff = self.buff[self.bufneed:]
self.bufneed = 4
self.bufstate = 0
return packet
def pollmessage(self, wait):
packet = self.pollpacket(wait)
if packet is None:
return None
try:
message = pickle.loads(packet)
except pickle.UnpicklingError:
print("-----------------------", file=sys.__stderr__)
print("cannot unpickle packet:", repr(packet), file=sys.__stderr__)
traceback.print_stack(file=sys.__stderr__)
print("-----------------------", file=sys.__stderr__)
raise
return message
def pollresponse(self, myseq, wait):
"""Handle messages received on the socket.
Some messages received may be asynchronous 'call' or 'queue' requests,
and some may be responses for other threads.
'call' requests are passed to self.localcall() with the expectation of
immediate execution, during which time the socket is not serviced.
'queue' requests are used for tasks (which may block or hang) to be
processed in a different thread. These requests are fed into
request_queue by self.localcall(). Responses to queued requests are
taken from response_queue and sent across the link with the associated
sequence numbers. Messages in the queues are (sequence_number,
request/response) tuples and code using this module removing messages
from the request_queue is responsible for returning the correct
sequence number in the response_queue.
pollresponse() will loop until a response message with the myseq
sequence number is received, and will save other responses in
self.responses and notify the owning thread.
"""
while 1:
# send queued response if there is one available
try:
qmsg = response_queue.get(0)
except queue.Empty:
pass
else:
seq, response = qmsg
message = (seq, ('OK', response))
self.putmessage(message)
# poll for message on link
try:
message = self.pollmessage(wait)
if message is None: # socket not ready
return None
except EOFError:
self.handle_EOF()
return None
except AttributeError:
return None
seq, resq = message
how = resq[0]
self.debug("pollresponse:%d:myseq:%s" % (seq, myseq))
# process or queue a request
if how in ("CALL", "QUEUE"):
self.debug("pollresponse:%d:localcall:call:" % seq)
response = self.localcall(seq, resq)
self.debug("pollresponse:%d:localcall:response:%s"
% (seq, response))
if how == "CALL":
self.putmessage((seq, response))
elif how == "QUEUE":
# don't acknowledge the 'queue' request!
pass
continue
# return if completed message transaction
elif seq == myseq:
return resq
# must be a response for a different thread:
else:
cv = self.cvars.get(seq, None)
# response involving unknown sequence number is discarded,
# probably intended for prior incarnation of server
if cv is not None:
cv.acquire()
self.responses[seq] = resq
cv.notify()
cv.release()
continue
def handle_EOF(self):
"action taken upon link being closed by peer"
self.EOFhook()
self.debug("handle_EOF")
for key in self.cvars:
cv = self.cvars[key]
cv.acquire()
self.responses[key] = ('EOF', None)
cv.notify()
cv.release()
# call our (possibly overridden) exit function
self.exithook()
def EOFhook(self):
"Classes using rpc client/server can override to augment EOF action"
pass
#----------------- end class SocketIO --------------------
class RemoteObject(object):
# Token mix-in class
pass
def remoteref(obj):
oid = id(obj)
objecttable[oid] = obj
return RemoteProxy(oid)
class RemoteProxy(object):
def __init__(self, oid):
self.oid = oid
class RPCHandler(socketserver.BaseRequestHandler, SocketIO):
debugging = False
location = "#S" # Server
def __init__(self, sock, addr, svr):
svr.current_handler = self ## cgt xxx
SocketIO.__init__(self, sock)
socketserver.BaseRequestHandler.__init__(self, sock, addr, svr)
def handle(self):
"handle() method required by socketserver"
self.mainloop()
def get_remote_proxy(self, oid):
return RPCProxy(self, oid)
class RPCClient(SocketIO):
debugging = False
location = "#C" # Client
nextseq = 1 # Requests coming from the client are odd numbered
def __init__(self, address, family=socket.AF_INET, type=socket.SOCK_STREAM):
self.listening_sock = socket.socket(family, type)
self.listening_sock.bind(address)
self.listening_sock.listen(1)
def accept(self):
working_sock, address = self.listening_sock.accept()
if self.debugging:
print("****** Connection request from ", address, file=sys.__stderr__)
if address[0] == LOCALHOST:
SocketIO.__init__(self, working_sock)
else:
print("** Invalid host: ", address, file=sys.__stderr__)
raise OSError
def get_remote_proxy(self, oid):
return RPCProxy(self, oid)
class RPCProxy(object):
__methods = None
__attributes = None
def __init__(self, sockio, oid):
self.sockio = sockio
self.oid = oid
def __getattr__(self, name):
if self.__methods is None:
self.__getmethods()
if self.__methods.get(name):
return MethodProxy(self.sockio, self.oid, name)
if self.__attributes is None:
self.__getattributes()
if name in self.__attributes:
value = self.sockio.remotecall(self.oid, '__getattribute__',
(name,), {})
return value
else:
raise AttributeError(name)
def __getattributes(self):
self.__attributes = self.sockio.remotecall(self.oid,
"__attributes__", (), {})
def __getmethods(self):
self.__methods = self.sockio.remotecall(self.oid,
"__methods__", (), {})
def _getmethods(obj, methods):
# Helper to get a list of methods from an object
# Adds names to dictionary argument 'methods'
for name in dir(obj):
attr = getattr(obj, name)
if callable(attr):
methods[name] = 1
if isinstance(obj, type):
for super in obj.__bases__:
_getmethods(super, methods)
def _getattributes(obj, attributes):
for name in dir(obj):
attr = getattr(obj, name)
if not callable(attr):
attributes[name] = 1
class MethodProxy(object):
def __init__(self, sockio, oid, name):
self.sockio = sockio
self.oid = oid
self.name = name
def __call__(self, *args, **kwargs):
value = self.sockio.remotecall(self.oid, self.name, args, kwargs)
return value
# XXX KBK 09Sep03 We need a proper unit test for this module. Previously
# existing test code was removed at Rev 1.27 (r34098).
def displayhook(value):
"""Override standard display hook to use non-locale encoding"""
if value is None:
return
# Set '_' to None to avoid recursion
builtins._ = None
text = repr(value)
try:
sys.stdout.write(text)
except UnicodeEncodeError:
# let's use ascii while utf8-bmp codec doesn't present
encoding = 'ascii'
bytes = text.encode(encoding, 'backslashreplace')
text = bytes.decode(encoding, 'strict')
sys.stdout.write(text)
sys.stdout.write("\n")
builtins._ = value
if __name__ == '__main__':
from unittest import main
main('edit.edit_test.test_rpc', verbosity=2,)
|
PypiClean
|
/mesh_common-0.1.6-py3-none-any.whl/mesh_common/concurrency.py
|
import concurrent.futures
from typing import Any, Callable, Mapping, Optional, Sequence
from nhs_context_logging.logger import LoggingThreadPoolExecutor
def concurrent_tasks(
parallels: list[tuple[str, Callable, Sequence[Any]]],
raise_if_ex: bool = True,
with_results=True,
max_workers: Optional[int] = None,
) -> Mapping[str, Any]:
"""
Execute a collection of tasks in parallel, wait for all tasks to complete and return the
outcomes
Args:
parallels: The tasks to be queued to execute, each specified as a three-tuple:
the first element is a string to use as an identifier for the task;
the second is the callable to be invoked;
the third a sequence of arguments to be passed to the callable
raise_if_ex: Whether to raise an instance of ConcurrentExceptions should any of the
tasks raise an exception
with_results: Whether to return the task results:
if set to false, the returned dictionary will just contain either a True or a False
value depending on the success of the task
max_workers: max concurrency
Returns:
A dictionary keyed by the task IDs and whose values are either the value produced by the
task or the exception raised by the task; or, if with_results is set to False, True
for tasks that ran successfully and False for tasks which raised exceptions
"""
with LoggingThreadPoolExecutor(max_workers=max_workers) as executor:
task_ids = {}
futures = []
for task_id, func, args in parallels:
future = executor.submit(func, *args)
task_ids[id(future)] = str(task_id)
futures.append(future)
results = {}
exceptions = []
for future in concurrent.futures.as_completed(futures):
task_id = task_ids[id(future)]
ex = future.exception()
if not ex:
results[task_id] = future.result() if with_results else True
continue
results[task_id] = ex if with_results else False
exceptions.append((future, ex))
if not raise_if_ex or not exceptions:
return results
if len(exceptions) == 1:
exceptions[0][0].result()
result_exc = [(task_ids[id(f)], e) for f, e in exceptions]
raise ConcurrentExceptions(*result_exc) # type: ignore[arg-type]
class ConcurrentExceptions(Exception):
"""custom concurrent exception implementation, with extended output"""
def __init__(self, *exceptions: tuple[str, Exception]):
super().__init__(*exceptions)
self.exceptions = dict(exceptions)
def __repr__(self):
return "\n".join(t + ": " + repr(e) for t, e in self.exceptions.items())
def __str__(self):
child_errors = "\n".join(t + ": " + str(e) for t, e in self.exceptions.items())
return "Inner Exceptions:\n" + child_errors
|
PypiClean
|
/pocketbasex-0.8.2-py3-none-any.whl/pocketbase/services/log_service.py
|
from __future__ import annotations
from dataclasses import dataclass
from typing import Union
from urllib.parse import quote
import datetime
from pocketbase.services.utils.base_service import BaseService
from pocketbase.models.utils.list_result import ListResult
from pocketbase.models.log_request import LogRequest
from pocketbase.utils import to_datetime
@dataclass
class HourlyStats:
total: int
date: Union[str, datetime.datetime]
class LogService(BaseService):
def get_request_list(
self, page: int = 1, per_page: int = 30, query_params: dict = {}
) -> ListResult:
"""Returns paginated logged requests list."""
query_params.update({"page": page, "perPage": per_page})
response_data = self.client.send(
"/api/logs/requests",
{"method": "GET", "params": query_params},
)
items: list[LogRequest] = []
if "items" in response_data:
response_data["items"] = response_data["items"] or []
for item in response_data["items"]:
items.append(LogRequest(item))
return ListResult(
response_data.get("page", 1),
response_data.get("perPage", 0),
response_data.get("totalItems", 0),
response_data.get("totalPages", 0),
items,
)
def get_request(self, id: str, query_params: dict = {}) -> LogRequest:
"""Returns a single logged request by its id."""
return LogRequest(
self.client.send(
"/api/logs/requests/" + quote(id),
{"method": "GET", "params": query_params},
)
)
def get_requests_stats(self, query_params: dict = {}) -> list[HourlyStats]:
"""Returns request logs statistics."""
return [
HourlyStats(total=stat["total"], date=to_datetime(stat["date"]))
for stat in self.client.send(
"/api/logs/requests/stats",
{"method": "GET", "params": query_params},
)
]
|
PypiClean
|
/productmd-1.36.tar.gz/productmd-1.36/doc/discinfo-1.0.rst
|
========================
Discinfo file format 1.0
========================
.discinfo files can be found on Fedora installation media and
provide media information to Anaconda installer.
File Format
===========
.discinfo is a plain-text file containing following fields, one value per line:
::
timestamp: float
release: str
architecture: str
disc_numbers: ALL or comma separated numbers
Examples
========
Fedora 21 Server.x86_64, disc_numbers: ALL::
1417653453.026288
Fedora Server 21
x86_64
ALL
Fedora 21 Server.x86_64, disc_numbers: [1, 2, 3]::
1417653453.026288
Fedora Server 21
x86_64
1,2,3
|
PypiClean
|
/nonebot_plugin_oddtext-0.2.0.tar.gz/nonebot_plugin_oddtext-0.2.0/nonebot_plugin_oddtext/cxh.py
|
pinyin = {
"㐀": "qiu",
"㐁": "tian",
"㐂": "xi",
"㐅": "wu",
"㐆": "yin",
"㐇": "jiu",
"㐈": "ru",
"㐉": "ding",
"㐊": "shi",
"㐋": "tu",
"㐌": "si",
"㐍": "zhao",
"㐎": "wen",
"㐏": "wu",
"㐐": "nu",
"㐑": "dong",
"㐒": "zhao",
"㐓": "ke",
"㐔": "xiang",
"㐖": "ye",
"㐗": "lao",
"㐘": "mi",
"㐙": "dou",
"㐚": "wu",
"㐜": "chou",
"㐝": "ju",
"㐞": "qi",
"㐟": "fei",
"㐡": "nuo",
"㐢": "fu",
"㐣": "ku",
"㐤": "qiu",
"㐥": "chu",
"㐦": "ju",
"㐨": "xu",
"㐩": "xing",
"㐫": "xiong",
"㐬": "liu",
"㐭": "lin",
"㐰": "xin",
"㐱": "zhen",
"㐲": "dai",
"㐳": "wu",
"㐷": "ma",
"㐸": "qian",
"㐹": "yi",
"㐺": "zhong",
"㐻": "nei",
"㐼": "cheng",
"㐾": "ruan",
"㑀": "tai",
"㑂": "fang",
"㑃": "ao",
"㑄": "wu",
"㑅": "zuo",
"㑇": "zhou",
"㑈": "dong",
"㑉": "su",
"㑊": "yi",
"㑋": "jiong",
"㑍": "lei",
"㑎": "nao",
"㑏": "zhu",
"㑑": "lan",
"㑔": "xu",
"㑗": "shen",
"㑘": "jie",
"㑙": "die",
"㑚": "nuo",
"㑛": "su",
"㑜": "yi",
"㑝": "long",
"㑞": "ying",
"㑟": "beng",
"㑣": "lan",
"㑤": "miao",
"㑥": "yi",
"㑦": "li",
"㑧": "ji",
"㑨": "yu",
"㑩": "luo",
"㑪": "chai",
"㑮": "hun",
"㑯": "xu",
"㑰": "hui",
"㑱": "rao",
"㑳": "zhou",
"㑵": "han",
"㑶": "xi",
"㑷": "tai",
"㑸": "ai",
"㑹": "hui",
"㑺": "jun",
"㑻": "ma",
"㑽": "tang",
"㑾": "xiao",
"㑿": "zhao",
"㒀": "zha",
"㒁": "yu",
"㒂": "ku",
"㒃": "er",
"㒄": "nang",
"㒅": "qi",
"㒆": "chi",
"㒇": "mu",
"㒈": "han",
"㒉": "tang",
"㒊": "se",
"㒌": "qiong",
"㒍": "lei",
"㒎": "sa",
"㒐": "xun",
"㒑": "hui",
"㒒": "pu",
"㒓": "ta",
"㒔": "shu",
"㒖": "ou",
"㒗": "tai",
"㒙": "mian",
"㒚": "wen",
"㒛": "diao",
"㒜": "yu",
"㒝": "mie",
"㒞": "jun",
"㒟": "niao",
"㒠": "xie",
"㒡": "you",
"㒤": "she",
"㒦": "lei",
"㒧": "li",
"㒩": "luo",
"㒫": "ji",
"㒰": "quan",
"㒱": "meng",
"㒲": "cai",
"㒳": "liang",
"㒴": "gu",
"㒵": "mao",
"㒷": "gua",
"㒸": "sui",
"㒹": "dian",
"㒻": "mao",
"㒼": "man",
"㒾": "shi",
"㒿": "li",
"㓁": "wang",
"㓂": "kou",
"㓃": "chui",
"㓄": "zhen",
"㓈": "bei",
"㓉": "huan",
"㓊": "dong",
"㓋": "gong",
"㓎": "qin",
"㓏": "jiong",
"㓐": "lu",
"㓑": "xing",
"㓓": "nan",
"㓔": "xie",
"㓖": "bi",
"㓗": "jie",
"㓘": "su",
"㓜": "you",
"㓝": "xing",
"㓞": "qi",
"㓟": "pi",
"㓠": "dian",
"㓡": "fu",
"㓢": "luo",
"㓣": "qia",
"㓤": "jie",
"㓦": "bai",
"㓧": "yan",
"㓨": "ci",
"㓪": "lang",
"㓭": "he",
"㓯": "li",
"㓰": "hua",
"㓱": "tou",
"㓲": "pian",
"㓴": "jun",
"㓵": "e",
"㓶": "qie",
"㓷": "yi",
"㓸": "jue",
"㓹": "rui",
"㓺": "jian",
"㓼": "chi",
"㓽": "chong",
"㓾": "chi",
"㔀": "lve",
"㔂": "lin",
"㔃": "jue",
"㔄": "su",
"㔅": "xiao",
"㔆": "zan",
"㔇": "po",
"㔉": "zhu",
"㔊": "dan",
"㔋": "jian",
"㔌": "zhou",
"㔍": "duo",
"㔎": "xie",
"㔏": "li",
"㔑": "chi",
"㔒": "xi",
"㔓": "jian",
"㔕": "ji",
"㔖": "jia",
"㔗": "fei",
"㔘": "chu",
"㔙": "bang",
"㔚": "kou",
"㔛": "jie",
"㔜": "ba",
"㔝": "liang",
"㔞": "kuai",
"㔠": "he",
"㔢": "jue",
"㔣": "lei",
"㔤": "shen",
"㔥": "pi",
"㔦": "yang",
"㔧": "xue",
"㔨": "bei",
"㔩": "e",
"㔪": "lu",
"㔭": "che",
"㔮": "nuo",
"㔯": "suan",
"㔰": "heng",
"㔱": "yu",
"㔳": "gui",
"㔴": "yi",
"㔵": "xian",
"㔶": "gong",
"㔷": "lou",
"㔹": "le",
"㔺": "shi",
"㔻": "pi",
"㔼": "sun",
"㔽": "yao",
"㔾": "jie",
"㔿": "zou",
"㕁": "que",
"㕂": "yin",
"㕄": "zhi",
"㕅": "jia",
"㕆": "hu",
"㕇": "la",
"㕈": "hou",
"㕉": "ke",
"㕊": "fu",
"㕋": "jing",
"㕌": "ai",
"㕎": "e",
"㕏": "chu",
"㕐": "xie",
"㕑": "chu",
"㕒": "wei",
"㕕": "huan",
"㕖": "su",
"㕗": "you",
"㕘": "can",
"㕙": "jun",
"㕚": "zhao",
"㕛": "xu",
"㕜": "shi",
"㕞": "shua",
"㕟": "kui",
"㕠": "shuang",
"㕡": "he",
"㕢": "gai",
"㕣": "yan",
"㕤": "qiu",
"㕥": "yi",
"㕦": "hua",
"㕧": "xi",
"㕨": "fan",
"㕩": "zhang",
"㕪": "dan",
"㕫": "fang",
"㕬": "song",
"㕭": "ao",
"㕮": "fu",
"㕯": "ne",
"㕰": "he",
"㕱": "you",
"㕲": "hua",
"㕴": "chen",
"㕵": "guo",
"㕶": "ň",
"㕷": "hua",
"㕸": "li",
"㕹": "fa",
"㕺": "hao",
"㕻": "tou",
"㕽": "si",
"㕾": "xiao",
"㖀": "le",
"㖁": "lin",
"㖂": "yi",
"㖃": "hou",
"㖅": "xu",
"㖆": "qu",
"㖇": "er",
"㖏": "nei",
"㖐": "wei",
"㖑": "xie",
"㖒": "ti",
"㖓": "hong",
"㖔": "tun",
"㖕": "bo",
"㖖": "nie",
"㖗": "yin",
"㖜": "xiang",
"㖞": "wai",
"㖟": "shou",
"㖠": "ba",
"㖡": "ye",
"㖢": "ji",
"㖣": "tou",
"㖤": "han",
"㖥": "jiong",
"㖦": "dong",
"㖧": "wen",
"㖨": "lu",
"㖩": "sou",
"㖪": "guo",
"㖫": "ling",
"㖭": "tian",
"㖮": "lun",
"㖶": "ye",
"㖷": "shi",
"㖸": "xue",
"㖹": "fen",
"㖺": "chun",
"㖻": "rou",
"㖼": "duo",
"㖽": "ze",
"㖾": "e",
"㖿": "xie",
"㗁": "e",
"㗂": "sheng",
"㗃": "wen",
"㗄": "man",
"㗅": "hu",
"㗆": "ge",
"㗇": "xia",
"㗈": "man",
"㗉": "bi",
"㗊": "ji",
"㗋": "hou",
"㗌": "zhi",
"㗑": "bai",
"㗒": "ai",
"㗕": "gou",
"㗖": "dan",
"㗗": "bai",
"㗘": "bo",
"㗙": "na",
"㗚": "li",
"㗛": "xiao",
"㗜": "xiu",
"㗢": "dong",
"㗣": "ti",
"㗤": "cu",
"㗥": "kuo",
"㗦": "lao",
"㗧": "zhi",
"㗨": "ai",
"㗩": "xi",
"㗫": "qie",
"㗬": "zha",
"㗰": "chu",
"㗱": "ji",
"㗲": "huo",
"㗳": "ta",
"㗴": "yan",
"㗵": "xu",
"㗷": "sai",
"㗻": "guo",
"㗼": "ye",
"㗽": "xiang",
"㗾": "xue",
"㗿": "he",
"㘀": "zuo",
"㘁": "yi",
"㘂": "ci",
"㘃": "ruo",
"㘄": "leng",
"㘅": "xian",
"㘆": "tai",
"㘇": "rong",
"㘈": "yi",
"㘉": "zhi",
"㘊": "yi",
"㘋": "xian",
"㘌": "ju",
"㘍": "ji",
"㘎": "han",
"㘐": "pao",
"㘑": "li",
"㘓": "lan",
"㘔": "can",
"㘕": "han",
"㘖": "yan",
"㘙": "yan",
"㘚": "han",
"㘜": "chi",
"㘝": "nian",
"㘞": "huo",
"㘠": "bi",
"㘡": "xia",
"㘢": "weng",
"㘣": "xuan",
"㘥": "you",
"㘦": "qin",
"㘧": "xu",
"㘨": "nei",
"㘩": "bi",
"㘪": "hao",
"㘫": "jing",
"㘬": "ao",
"㘭": "ao",
"㘲": "ju",
"㘴": "zuo",
"㘵": "bu",
"㘶": "jie",
"㘷": "ai",
"㘸": "zang",
"㘹": "ci",
"㘺": "fa",
"㘿": "nie",
"㙀": "liu",
"㙁": "mang",
"㙂": "dui",
"㙄": "bi",
"㙅": "bao",
"㙇": "chu",
"㙈": "han",
"㙉": "tian",
"㙊": "chang",
"㙍": "duo",
"㙏": "fu",
"㙐": "duo",
"㙑": "yu",
"㙒": "ye",
"㙓": "kui",
"㙔": "han",
"㙕": "kuai",
"㙗": "kuai",
"㙙": "long",
"㙚": "xing",
"㙛": "bu",
"㙜": "chi",
"㙝": "xie",
"㙞": "nie",
"㙟": "lang",
"㙠": "yi",
"㙢": "man",
"㙣": "zhang",
"㙤": "xia",
"㙥": "gun",
"㙦": "xie",
"㙨": "ji",
"㙩": "liao",
"㙪": "ye",
"㙫": "ji",
"㙬": "yin",
"㙮": "da",
"㙯": "yi",
"㙰": "xie",
"㙱": "hao",
"㙲": "yong",
"㙳": "han",
"㙴": "chan",
"㙵": "tai",
"㙶": "tang",
"㙷": "zhi",
"㙸": "bao",
"㙹": "meng",
"㙺": "gui",
"㙻": "chan",
"㙼": "lei",
"㙾": "xi",
"㚁": "qiao",
"㚂": "rang",
"㚃": "yun",
"㚅": "long",
"㚆": "fu",
"㚇": "zong",
"㚉": "gu",
"㚊": "kai",
"㚌": "hua",
"㚍": "guo",
"㚏": "gao",
"㚐": "tao",
"㚒": "shan",
"㚓": "lai",
"㚔": "nie",
"㚕": "fu",
"㚖": "gao",
"㚗": "qie",
"㚘": "ban",
"㚛": "xi",
"㚜": "xu",
"㚝": "kui",
"㚞": "meng",
"㚟": "chuo",
"㚡": "ji",
"㚢": "nu",
"㚣": "jiao",
"㚤": "yi",
"㚥": "yu",
"㚦": "yi",
"㚧": "yan",
"㚩": "ran",
"㚪": "hao",
"㚫": "sha",
"㚭": "you",
"㚯": "xin",
"㚰": "bi",
"㚲": "dian",
"㚴": "bu",
"㚶": "si",
"㚷": "er",
"㚸": "si",
"㚹": "mao",
"㚺": "yun",
"㚻": "ji",
"㚽": "qiao",
"㚿": "pao",
"㛂": "nuo",
"㛃": "jie",
"㛄": "yi",
"㛅": "er",
"㛆": "duo",
"㛊": "duo",
"㛍": "qie",
"㛏": "ou",
"㛐": "sao",
"㛑": "can",
"㛒": "dou",
"㛔": "peng",
"㛕": "yi",
"㛖": "suo",
"㛗": "zuo",
"㛘": "po",
"㛙": "qie",
"㛚": "tong",
"㛛": "xin",
"㛜": "you",
"㛝": "bei",
"㛞": "long",
"㛣": "yun",
"㛥": "ta",
"㛦": "lan",
"㛧": "man",
"㛨": "qiang",
"㛩": "zhou",
"㛪": "yan",
"㛬": "lu",
"㛮": "sao",
"㛯": "mian",
"㛱": "rui",
"㛲": "fa",
"㛳": "cha",
"㛴": "nao",
"㛶": "chou",
"㛸": "shu",
"㛹": "pian",
"㛻": "kui",
"㛼": "sha",
"㛾": "xian",
"㛿": "zhi",
"㜃": "lian",
"㜄": "xun",
"㜅": "xu",
"㜆": "mi",
"㜇": "hui",
"㜈": "mu",
"㜊": "pang",
"㜋": "yi",
"㜌": "gou",
"㜍": "tang",
"㜎": "xi",
"㜏": "yun",
"㜐": "shu",
"㜑": "fu",
"㜒": "yi",
"㜓": "da",
"㜕": "lian",
"㜖": "cao",
"㜗": "can",
"㜘": "ju",
"㜙": "lu",
"㜚": "su",
"㜛": "nen",
"㜜": "ao",
"㜝": "an",
"㜞": "qian",
"㜣": "ran",
"㜤": "shen",
"㜥": "mai",
"㜦": "han",
"㜧": "yue",
"㜨": "er",
"㜩": "ao",
"㜪": "xian",
"㜫": "ma",
"㜭": "dang",
"㜮": "lan",
"㜰": "yue",
"㜱": "dong",
"㜲": "weng",
"㜳": "huai",
"㜴": "meng",
"㜵": "niao",
"㜶": "fan",
"㜷": "mi",
"㜸": "nie",
"㜹": "qu",
"㜺": "zan",
"㜻": "lian",
"㜼": "zhi",
"㜽": "zi",
"㜾": "hai",
"㜿": "xu",
"㝀": "hao",
"㝁": "xun",
"㝂": "zhi",
"㝃": "fan",
"㝄": "chun",
"㝅": "gou",
"㝇": "chun",
"㝈": "luan",
"㝉": "zhu",
"㝊": "shou",
"㝋": "liao",
"㝌": "jie",
"㝍": "xie",
"㝎": "ding",
"㝏": "jie",
"㝐": "rong",
"㝑": "mang",
"㝓": "ge",
"㝔": "yao",
"㝕": "ning",
"㝖": "yi",
"㝗": "lang",
"㝘": "yong",
"㝙": "yin",
"㝛": "su",
"㝝": "lin",
"㝞": "ya",
"㝟": "mao",
"㝠": "ming",
"㝡": "zui",
"㝢": "yu",
"㝣": "ye",
"㝤": "gou",
"㝥": "mi",
"㝦": "jun",
"㝧": "wen",
"㝩": "kang",
"㝪": "dian",
"㝫": "long",
"㝬": "hu",
"㝭": "xing",
"㝮": "cui",
"㝯": "qiao",
"㝰": "mian",
"㝱": "meng",
"㝲": "qin",
"㝴": "wan",
"㝵": "de",
"㝶": "ai",
"㝸": "bian",
"㝹": "nou",
"㝺": "lian",
"㝻": "jin",
"㝼": "yu",
"㝽": "chui",
"㝾": "zuo",
"㝿": "bo",
"㞁": "yao",
"㞂": "tui",
"㞃": "ji",
"㞅": "guo",
"㞆": "ji",
"㞇": "wei",
"㞊": "xu",
"㞋": "nian",
"㞌": "yun",
"㞎": "ba",
"㞏": "zhe",
"㞐": "ju",
"㞑": "wei",
"㞒": "xi",
"㞓": "qi",
"㞔": "yi",
"㞕": "xie",
"㞖": "ci",
"㞗": "qiu",
"㞘": "tun",
"㞙": "niao",
"㞚": "qi",
"㞛": "ji",
"㞞": "song",
"㞟": "dian",
"㞠": "lao",
"㞡": "zhan",
"㞤": "yin",
"㞥": "cen",
"㞦": "ji",
"㞧": "hui",
"㞨": "zai",
"㞩": "lan",
"㞪": "nao",
"㞫": "ju",
"㞬": "qin",
"㞭": "dai",
"㞯": "jie",
"㞰": "xu",
"㞲": "yong",
"㞳": "dou",
"㞴": "chi",
"㞶": "min",
"㞷": "huang",
"㞸": "sui",
"㞹": "ke",
"㞺": "zu",
"㞻": "hao",
"㞼": "cheng",
"㞽": "xue",
"㞾": "ni",
"㞿": "chi",
"㟀": "lian",
"㟁": "an",
"㟂": "chi",
"㟃": "si",
"㟄": "xiang",
"㟅": "yang",
"㟆": "hua",
"㟇": "cuo",
"㟈": "qiu",
"㟉": "lao",
"㟊": "fu",
"㟋": "dui",
"㟌": "mang",
"㟍": "lang",
"㟎": "tuo",
"㟏": "han",
"㟐": "mang",
"㟑": "bo",
"㟓": "qi",
"㟔": "han",
"㟖": "long",
"㟘": "tiao",
"㟙": "lao",
"㟚": "qi",
"㟛": "zan",
"㟜": "mi",
"㟝": "pei",
"㟞": "zhan",
"㟟": "xiang",
"㟠": "gang",
"㟢": "qi",
"㟤": "lu",
"㟦": "yun",
"㟧": "e",
"㟨": "quan",
"㟩": "min",
"㟪": "wei",
"㟫": "quan",
"㟬": "shu",
"㟭": "min",
"㟮": "tu",
"㟯": "e",
"㟰": "ming",
"㟱": "yao",
"㟲": "jue",
"㟳": "li",
"㟴": "kuai",
"㟵": "gang",
"㟶": "yuan",
"㟷": "da",
"㟹": "lao",
"㟺": "lou",
"㟻": "qian",
"㟼": "ao",
"㟽": "biao",
"㟿": "mang",
"㠀": "dao",
"㠂": "ao",
"㠄": "xi",
"㠅": "fu",
"㠇": "jiu",
"㠈": "run",
"㠉": "tong",
"㠋": "e",
"㠍": "ji",
"㠎": "qi",
"㠏": "hua",
"㠐": "jiao",
"㠑": "zui",
"㠒": "biao",
"㠓": "meng",
"㠔": "bai",
"㠕": "wei",
"㠖": "ji",
"㠗": "ao",
"㠘": "yu",
"㠙": "hao",
"㠚": "dui",
"㠛": "wo",
"㠜": "ni",
"㠝": "cuan",
"㠟": "li",
"㠠": "lu",
"㠡": "niao",
"㠢": "huai",
"㠣": "lai",
"㠥": "lv",
"㠧": "mi",
"㠨": "yu",
"㠪": "ju",
"㠫": "gu",
"㠬": "gu",
"㠭": "zhan",
"㠯": "yi",
"㠱": "ji",
"㠲": "bi",
"㠴": "ren",
"㠵": "huang",
"㠶": "fan",
"㠷": "ge",
"㠸": "ku",
"㠹": "jie",
"㠺": "miao",
"㠽": "tong",
"㠿": "ci",
"㡀": "bi",
"㡁": "kai",
"㡂": "li",
"㡄": "sun",
"㡅": "nuo",
"㡆": "huang",
"㡇": "ji",
"㡈": "men",
"㡉": "xian",
"㡊": "qia",
"㡋": "e",
"㡌": "mao",
"㡏": "tou",
"㡑": "qiao",
"㡒": "zhun",
"㡔": "wu",
"㡖": "chuang",
"㡗": "ti",
"㡘": "lian",
"㡙": "bi",
"㡛": "mang",
"㡜": "xue",
"㡝": "feng",
"㡞": "lei",
"㡠": "zheng",
"㡡": "chu",
"㡢": "man",
"㡣": "long",
"㡥": "yin",
"㡧": "zheng",
"㡨": "qian",
"㡩": "luan",
"㡪": "nie",
"㡫": "yi",
"㡭": "ji",
"㡮": "ji",
"㡯": "zhai",
"㡰": "yu",
"㡱": "jiu",
"㡲": "huan",
"㡳": "di",
"㡵": "ling",
"㡶": "zhi",
"㡷": "ben",
"㡸": "zha",
"㡹": "ci",
"㡺": "dan",
"㡻": "liao",
"㡼": "yi",
"㡽": "zhao",
"㡾": "xian",
"㡿": "chi",
"㢀": "ci",
"㢁": "chi",
"㢂": "yan",
"㢃": "lang",
"㢄": "dou",
"㢅": "long",
"㢆": "chan",
"㢈": "tui",
"㢉": "cha",
"㢊": "ai",
"㢋": "chi",
"㢍": "ying",
"㢎": "cha",
"㢏": "tou",
"㢑": "tui",
"㢒": "cha",
"㢓": "yao",
"㢔": "zong",
"㢗": "qiao",
"㢘": "lian",
"㢙": "qin",
"㢚": "lu",
"㢛": "yan",
"㢞": "yi",
"㢟": "chan",
"㢠": "jiong",
"㢡": "jiang",
"㢣": "jing",
"㢥": "dong",
"㢦": "ge",
"㢧": "juan",
"㢨": "han",
"㢩": "di",
"㢬": "hong",
"㢮": "chi",
"㢯": "min",
"㢰": "bi",
"㢲": "xun",
"㢳": "lu",
"㢵": "she",
"㢶": "bi",
"㢸": "bi",
"㢺": "xian",
"㢻": "wei",
"㢼": "bie",
"㢽": "er",
"㢾": "juan",
"㣀": "zhen",
"㣁": "bei",
"㣂": "yi",
"㣃": "yu",
"㣄": "qu",
"㣅": "zan",
"㣆": "mi",
"㣇": "ni",
"㣈": "si",
"㣌": "shan",
"㣍": "tai",
"㣎": "mu",
"㣏": "jing",
"㣐": "bian",
"㣑": "rong",
"㣒": "ceng",
"㣓": "can",
"㣙": "di",
"㣚": "tong",
"㣛": "ta",
"㣜": "xing",
"㣝": "song",
"㣞": "duo",
"㣟": "xi",
"㣠": "tong",
"㣢": "ti",
"㣣": "shan",
"㣤": "jian",
"㣥": "zhi",
"㣧": "yin",
"㣪": "huan",
"㣫": "zhong",
"㣬": "qi",
"㣯": "xie",
"㣰": "xie",
"㣱": "ze",
"㣲": "wei",
"㣵": "ta",
"㣶": "zhan",
"㣷": "ning",
"㣻": "yi",
"㣼": "ren",
"㣽": "shu",
"㣾": "cha",
"㣿": "zhuo",
"㤁": "mian",
"㤂": "ji",
"㤃": "fang",
"㤄": "pei",
"㤅": "ai",
"㤆": "fan",
"㤇": "ao",
"㤈": "qin",
"㤉": "qia",
"㤊": "xiao",
"㤍": "qiao",
"㤏": "tong",
"㤑": "you",
"㤓": "ben",
"㤔": "fu",
"㤕": "chu",
"㤖": "zhu",
"㤘": "chu",
"㤚": "hang",
"㤛": "nin",
"㤜": "jue",
"㤞": "cha",
"㤟": "kong",
"㤠": "lie",
"㤡": "li",
"㤢": "xu",
"㤤": "yu",
"㤥": "hai",
"㤦": "li",
"㤧": "hou",
"㤨": "gong",
"㤩": "ke",
"㤪": "yuan",
"㤫": "de",
"㤬": "hui",
"㤮": "kuang",
"㤯": "jiong",
"㤰": "zan",
"㤱": "fu",
"㤲": "qie",
"㤳": "bei",
"㤴": "xi",
"㤵": "ci",
"㤶": "pang",
"㤸": "xi",
"㤹": "qiu",
"㤺": "huang",
"㤻": "lun",
"㤽": "chou",
"㤾": "san",
"㥀": "de",
"㥁": "de",
"㥂": "te",
"㥃": "men",
"㥄": "ling",
"㥅": "shou",
"㥆": "dian",
"㥇": "can",
"㥈": "die",
"㥉": "che",
"㥊": "peng",
"㥋": "yi",
"㥌": "ju",
"㥍": "ji",
"㥎": "lai",
"㥏": "tian",
"㥐": "yuan",
"㥒": "cai",
"㥓": "qi",
"㥔": "yu",
"㥕": "lian",
"㥚": "yu",
"㥛": "ji",
"㥜": "wei",
"㥝": "mi",
"㥞": "cui",
"㥟": "xie",
"㥠": "xu",
"㥡": "xi",
"㥢": "qiu",
"㥣": "hui",
"㥥": "yu",
"㥦": "qie",
"㥧": "shun",
"㥨": "chui",
"㥩": "duo",
"㥪": "lou",
"㥫": "dun",
"㥬": "pang",
"㥭": "tai",
"㥮": "zhou",
"㥯": "yin",
"㥱": "fei",
"㥲": "shen",
"㥳": "yuan",
"㥴": "yi",
"㥵": "hun",
"㥶": "se",
"㥷": "ye",
"㥸": "min",
"㥹": "fen",
"㥺": "he",
"㥼": "yin",
"㥽": "ce",
"㥾": "ni",
"㥿": "ao",
"㦀": "feng",
"㦁": "lian",
"㦂": "chang",
"㦃": "chan",
"㦄": "ma",
"㦅": "die",
"㦇": "lu",
"㦉": "yi",
"㦊": "hua",
"㦌": "tui",
"㦍": "e",
"㦎": "hua",
"㦏": "sun",
"㦐": "ni",
"㦑": "lian",
"㦒": "li",
"㦓": "xian",
"㦔": "yan",
"㦕": "long",
"㦖": "men",
"㦗": "jian",
"㦚": "bian",
"㦛": "yu",
"㦜": "huo",
"㦝": "miao",
"㦞": "chou",
"㦟": "hai",
"㦡": "le",
"㦢": "jie",
"㦣": "wei",
"㦤": "yi",
"㦥": "huan",
"㦦": "he",
"㦧": "can",
"㦨": "lan",
"㦩": "yin",
"㦪": "xie",
"㦬": "luo",
"㦭": "ling",
"㦮": "qian",
"㦯": "huo",
"㦰": "jian",
"㦱": "wo",
"㦲": "zai",
"㦴": "ge",
"㦶": "die",
"㦷": "yong",
"㦸": "ji",
"㦹": "ang",
"㦺": "ru",
"㦻": "xi",
"㦼": "shuang",
"㦽": "xu",
"㦾": "yi",
"㦿": "hu",
"㧀": "ji",
"㧁": "qu",
"㧂": "tian",
"㧄": "qian",
"㧅": "mu",
"㧇": "mao",
"㧈": "yin",
"㧉": "gai",
"㧊": "ba",
"㧋": "xian",
"㧌": "mao",
"㧍": "fang",
"㧎": "ya",
"㧏": "gang",
"㧐": "song",
"㧑": "wei",
"㧒": "xue",
"㧔": "guai",
"㧕": "jiu",
"㧖": "e",
"㧗": "zi",
"㧘": "cui",
"㧙": "bi",
"㧚": "wa",
"㧜": "lie",
"㧟": "kuai",
"㧡": "hai",
"㧢": "yin",
"㧣": "zhu",
"㧤": "chong",
"㧥": "xian",
"㧦": "xuan",
"㧨": "qiu",
"㧩": "pei",
"㧪": "gui",
"㧫": "er",
"㧬": "gong",
"㧭": "qiong",
"㧯": "lao",
"㧰": "li",
"㧱": "chen",
"㧲": "san",
"㧳": "bai",
"㧴": "wo",
"㧵": "pou",
"㧷": "duo",
"㧹": "te",
"㧺": "ta",
"㧻": "zhi",
"㧼": "biao",
"㧽": "gu",
"㨀": "bing",
"㨁": "zhi",
"㨂": "dong",
"㨃": "cheng",
"㨄": "zhao",
"㨅": "nei",
"㨆": "lin",
"㨇": "po",
"㨈": "ji",
"㨉": "min",
"㨊": "wei",
"㨋": "che",
"㨌": "gou",
"㨎": "ru",
"㨐": "bu",
"㨒": "kui",
"㨓": "lao",
"㨔": "han",
"㨕": "ying",
"㨖": "zhi",
"㨗": "jie",
"㨘": "xing",
"㨙": "xie",
"㨚": "xun",
"㨛": "shan",
"㨜": "qian",
"㨝": "xie",
"㨞": "su",
"㨟": "hai",
"㨠": "mi",
"㨡": "hun",
"㨤": "hui",
"㨥": "na",
"㨦": "song",
"㨧": "ben",
"㨨": "liu",
"㨩": "jie",
"㨪": "huang",
"㨫": "lan",
"㨭": "hu",
"㨮": "dou",
"㨯": "huo",
"㨰": "ge",
"㨱": "yao",
"㨲": "ce",
"㨳": "gui",
"㨴": "jian",
"㨵": "jian",
"㨶": "chou",
"㨷": "jin",
"㨸": "ma",
"㨹": "hui",
"㨺": "men",
"㨻": "can",
"㨼": "lve",
"㨽": "pi",
"㨾": "yang",
"㨿": "ju",
"㩀": "ju",
"㩁": "que",
"㩃": "qian",
"㩄": "shai",
"㩅": "chou",
"㩆": "jiu",
"㩇": "hua",
"㩈": "xian",
"㩉": "xie",
"㩋": "su",
"㩌": "fei",
"㩍": "ce",
"㩎": "ye",
"㩐": "den",
"㩒": "qin",
"㩓": "hui",
"㩔": "tun",
"㩖": "qiang",
"㩗": "xi",
"㩘": "yi",
"㩚": "meng",
"㩛": "tuan",
"㩜": "lan",
"㩝": "hao",
"㩞": "ci",
"㩟": "zhai",
"㩠": "piao",
"㩡": "luo",
"㩢": "mi",
"㩦": "xie",
"㩧": "bo",
"㩨": "hui",
"㩩": "qi",
"㩪": "xie",
"㩭": "bo",
"㩮": "qian",
"㩯": "ban",
"㩰": "jiao",
"㩱": "jue",
"㩲": "kun",
"㩳": "song",
"㩴": "ju",
"㩵": "e",
"㩶": "nie",
"㩸": "die",
"㩹": "die",
"㩻": "gui",
"㩼": "zhi",
"㩽": "qi",
"㩾": "chui",
"㪀": "yu",
"㪁": "qin",
"㪃": "he",
"㪄": "fu",
"㪆": "di",
"㪇": "xian",
"㪈": "gui",
"㪉": "he",
"㪊": "qun",
"㪋": "han",
"㪌": "tong",
"㪍": "bo",
"㪎": "shan",
"㪏": "bi",
"㪐": "lu",
"㪑": "ye",
"㪒": "ni",
"㪓": "chuai",
"㪔": "san",
"㪕": "diao",
"㪖": "lu",
"㪗": "tou",
"㪘": "lian",
"㪙": "ke",
"㪚": "san",
"㪛": "zhen",
"㪜": "chuai",
"㪝": "lian",
"㪞": "mao",
"㪠": "qian",
"㪡": "ke",
"㪢": "shao",
"㪣": "qiao",
"㪤": "bi",
"㪥": "zha",
"㪦": "yin",
"㪨": "shan",
"㪩": "su",
"㪪": "sa",
"㪫": "rui",
"㪬": "zhuo",
"㪭": "lu",
"㪮": "ling",
"㪯": "cha",
"㪱": "huan",
"㪴": "jia",
"㪵": "ban",
"㪶": "hu",
"㪷": "dou",
"㪹": "lou",
"㪺": "ju",
"㪻": "juan",
"㪼": "ke",
"㪽": "suo",
"㪾": "ge",
"㪿": "zhe",
"㫀": "ding",
"㫁": "duan",
"㫂": "zhu",
"㫃": "yan",
"㫄": "pang",
"㫅": "cha",
"㫊": "yi",
"㫍": "you",
"㫎": "gun",
"㫏": "yao",
"㫐": "yao",
"㫑": "shi",
"㫒": "gong",
"㫓": "qi",
"㫔": "gen",
"㫖": "zhi",
"㫗": "hou",
"㫘": "mi",
"㫙": "fu",
"㫚": "hu",
"㫛": "guang",
"㫜": "dan",
"㫟": "yan",
"㫢": "qu",
"㫤": "chang",
"㫥": "ming",
"㫧": "bao",
"㫫": "xian",
"㫭": "shi",
"㫯": "mao",
"㫰": "lang",
"㫱": "nan",
"㫲": "pei",
"㫳": "chen",
"㫶": "cou",
"㫸": "qie",
"㫹": "dai",
"㫻": "kun",
"㫼": "die",
"㫽": "lu",
"㬂": "yu",
"㬃": "tai",
"㬄": "chan",
"㬅": "man",
"㬆": "mian",
"㬇": "huan",
"㬉": "nuan",
"㬊": "huan",
"㬋": "hou",
"㬌": "jing",
"㬍": "bo",
"㬎": "xian",
"㬏": "li",
"㬐": "jin",
"㬒": "mang",
"㬓": "piao",
"㬔": "hao",
"㬕": "yang",
"㬗": "xian",
"㬘": "su",
"㬙": "wei",
"㬚": "che",
"㬜": "jin",
"㬝": "ceng",
"㬞": "he",
"㬠": "shai",
"㬡": "ling",
"㬣": "dui",
"㬥": "pu",
"㬦": "yue",
"㬧": "bo",
"㬩": "hui",
"㬪": "die",
"㬫": "yan",
"㬬": "ju",
"㬭": "jiao",
"㬮": "kuai",
"㬯": "lie",
"㬰": "yu",
"㬱": "ti",
"㬳": "wu",
"㬴": "hong",
"㬵": "xiao",
"㬶": "hao",
"㬻": "huang",
"㬼": "fu",
"㬿": "dun",
"㭁": "reng",
"㭂": "jiao",
"㭄": "xin",
"㭇": "yuan",
"㭈": "jue",
"㭉": "hua",
"㭋": "bang",
"㭌": "mou",
"㭎": "gang",
"㭏": "wei",
"㭑": "mei",
"㭒": "si",
"㭓": "bian",
"㭔": "lu",
"㭕": "qu",
"㭘": "he",
"㭙": "she",
"㭛": "pai",
"㭜": "rong",
"㭝": "qiu",
"㭞": "lie",
"㭟": "gong",
"㭠": "xian",
"㭡": "xi",
"㭤": "niao",
"㭨": "ye",
"㭩": "lei",
"㭫": "cuan",
"㭬": "zhuo",
"㭭": "fei",
"㭮": "zuo",
"㭯": "die",
"㭰": "ji",
"㭱": "he",
"㭲": "ji",
"㭸": "tu",
"㭹": "xian",
"㭺": "yan",
"㭻": "tang",
"㭼": "ta",
"㭽": "di",
"㭾": "jue",
"㭿": "ang",
"㮀": "han",
"㮁": "yao",
"㮂": "ju",
"㮃": "rui",
"㮄": "bang",
"㮆": "nie",
"㮇": "tian",
"㮈": "nai",
"㮋": "you",
"㮌": "mian",
"㮏": "nai",
"㮐": "xing",
"㮑": "qi",
"㮓": "gen",
"㮔": "tong",
"㮕": "er",
"㮖": "jia",
"㮗": "qin",
"㮘": "mao",
"㮙": "e",
"㮚": "li",
"㮛": "chi",
"㮝": "he",
"㮞": "jie",
"㮟": "ji",
"㮡": "guan",
"㮢": "hou",
"㮣": "gai",
"㮥": "fen",
"㮦": "se",
"㮨": "ji",
"㮩": "xi",
"㮪": "qiong",
"㮫": "he",
"㮭": "xian",
"㮮": "jie",
"㮯": "hua",
"㮰": "bi",
"㮳": "zhen",
"㮶": "shi",
"㮸": "song",
"㮹": "zhi",
"㮺": "ben",
"㮾": "lang",
"㮿": "bi",
"㯀": "xian",
"㯁": "bang",
"㯂": "dai",
"㯅": "pi",
"㯆": "chan",
"㯇": "bi",
"㯈": "su",
"㯉": "huo",
"㯊": "hen",
"㯋": "ying",
"㯌": "chuan",
"㯍": "jiang",
"㯎": "nen",
"㯏": "gu",
"㯐": "fang",
"㯓": "ta",
"㯔": "cui",
"㯕": "xi",
"㯖": "de",
"㯗": "ran",
"㯘": "kuan",
"㯙": "che",
"㯚": "da",
"㯛": "hu",
"㯜": "cui",
"㯝": "lu",
"㯞": "juan",
"㯟": "lu",
"㯠": "qian",
"㯡": "pao",
"㯢": "zhen",
"㯤": "li",
"㯥": "cao",
"㯦": "qi",
"㯩": "ti",
"㯪": "ling",
"㯫": "qu",
"㯬": "lian",
"㯭": "lu",
"㯮": "shu",
"㯯": "gong",
"㯰": "zhe",
"㯱": "biao",
"㯲": "jin",
"㯳": "qing",
"㯶": "zong",
"㯷": "pu",
"㯸": "jin",
"㯹": "biao",
"㯺": "jian",
"㯻": "gun",
"㯿": "lie",
"㰀": "li",
"㰁": "luo",
"㰂": "shen",
"㰃": "mian",
"㰄": "jian",
"㰅": "di",
"㰆": "bei",
"㰈": "lian",
"㰊": "xun",
"㰋": "pin",
"㰌": "que",
"㰍": "long",
"㰎": "zui",
"㰐": "jue",
"㰒": "she",
"㰔": "xie",
"㰖": "lan",
"㰗": "cu",
"㰘": "yi",
"㰙": "nuo",
"㰚": "li",
"㰛": "yue",
"㰝": "yi",
"㰟": "ji",
"㰠": "kang",
"㰡": "xie",
"㰣": "zi",
"㰤": "he",
"㰥": "hui",
"㰦": "qu",
"㰪": "wa",
"㰬": "xun",
"㰮": "shen",
"㰯": "tou",
"㰰": "qie",
"㰱": "sha",
"㰲": "xu",
"㰳": "ya",
"㰴": "po",
"㰵": "zu",
"㰶": "you",
"㰷": "zi",
"㰸": "lian",
"㰹": "jin",
"㰺": "xia",
"㰻": "yi",
"㰼": "qie",
"㰽": "mi",
"㰾": "jiao",
"㱀": "chi",
"㱁": "shi",
"㱂": "kang",
"㱃": "yin",
"㱄": "mo",
"㱅": "yi",
"㱇": "se",
"㱈": "jin",
"㱉": "ye",
"㱋": "que",
"㱌": "che",
"㱍": "luan",
"㱏": "zheng",
"㱔": "xie",
"㱖": "cui",
"㱘": "an",
"㱙": "xiu",
"㱚": "can",
"㱛": "chuan",
"㱜": "zha",
"㱞": "ji",
"㱟": "bo",
"㱠": "ku",
"㱢": "lang",
"㱣": "tui",
"㱥": "ling",
"㱦": "e",
"㱧": "wo",
"㱨": "lian",
"㱩": "du",
"㱪": "men",
"㱫": "lan",
"㱬": "wei",
"㱭": "duan",
"㱮": "kuai",
"㱯": "ai",
"㱰": "zai",
"㱱": "hui",
"㱲": "yi",
"㱳": "mo",
"㱴": "zi",
"㱵": "ben",
"㱶": "beng",
"㱸": "bi",
"㱹": "li",
"㱺": "lu",
"㱻": "luo",
"㱽": "dan",
"㱾": "gai",
"㱿": "que",
"㲀": "chen",
"㲂": "cheng",
"㲃": "jiu",
"㲄": "kou",
"㲅": "ji",
"㲆": "ling",
"㲈": "shao",
"㲉": "que",
"㲊": "rui",
"㲋": "chuo",
"㲌": "neng",
"㲎": "lou",
"㲏": "bao",
"㲒": "bao",
"㲓": "rong",
"㲕": "lei",
"㲘": "qu",
"㲚": "sha",
"㲛": "zhi",
"㲜": "tan",
"㲝": "rong",
"㲞": "zu",
"㲟": "ying",
"㲠": "mao",
"㲡": "nai",
"㲢": "bian",
"㲥": "tang",
"㲦": "han",
"㲧": "zao",
"㲨": "rong",
"㲪": "deng",
"㲫": "pu",
"㲭": "tan",
"㲯": "ran",
"㲰": "ning",
"㲱": "lie",
"㲲": "die",
"㲳": "die",
"㲴": "zhong",
"㲶": "lv",
"㲷": "dan",
"㲹": "gui",
"㲺": "ji",
"㲻": "ni",
"㲼": "yi",
"㲽": "nian",
"㲾": "yu",
"㲿": "wang",
"㳀": "guo",
"㳁": "ze",
"㳂": "yan",
"㳃": "cui",
"㳄": "xian",
"㳅": "jiao",
"㳆": "shu",
"㳇": "fu",
"㳈": "pei",
"㳍": "bu",
"㳎": "bian",
"㳏": "chi",
"㳐": "sa",
"㳑": "yi",
"㳒": "fa",
"㳔": "dui",
"㳕": "lan",
"㳗": "chai",
"㳘": "chong",
"㳙": "xuan",
"㳚": "yu",
"㳛": "yu",
"㳠": "ta",
"㳥": "ju",
"㳦": "xie",
"㳧": "xi",
"㳨": "jian",
"㳪": "pan",
"㳫": "ta",
"㳬": "xuan",
"㳭": "xian",
"㳮": "niao",
"㳴": "mi",
"㳵": "ji",
"㳶": "gou",
"㳷": "wen",
"㳹": "wang",
"㳺": "you",
"㳻": "ze",
"㳼": "bi",
"㳽": "mi",
"㳿": "xie",
"㴀": "fan",
"㴁": "yi",
"㴃": "lei",
"㴄": "ying",
"㴆": "jin",
"㴇": "she",
"㴈": "yin",
"㴉": "ji",
"㴋": "su",
"㴏": "wang",
"㴐": "mian",
"㴑": "su",
"㴒": "yi",
"㴓": "zai",
"㴔": "se",
"㴕": "ji",
"㴖": "luo",
"㴘": "mao",
"㴙": "zha",
"㴚": "sui",
"㴛": "zhi",
"㴜": "bian",
"㴝": "li",
"㴞": "tao",
"㴥": "qiao",
"㴦": "guan",
"㴨": "zhen",
"㴩": "yong",
"㴪": "nie",
"㴫": "jun",
"㴬": "xie",
"㴭": "yao",
"㴮": "xie",
"㴰": "neng",
"㴲": "si",
"㴳": "long",
"㴴": "chen",
"㴵": "mi",
"㴶": "que",
"㴷": "dan",
"㴸": "na",
"㴼": "su",
"㴽": "xie",
"㴾": "bo",
"㴿": "ding",
"㵀": "cuan",
"㵂": "chuang",
"㵃": "she",
"㵄": "han",
"㵅": "dan",
"㵆": "hao",
"㵊": "shen",
"㵋": "mi",
"㵌": "chan",
"㵍": "men",
"㵎": "han",
"㵏": "cui",
"㵐": "jue",
"㵑": "he",
"㵒": "fei",
"㵓": "shi",
"㵔": "che",
"㵕": "shen",
"㵗": "fu",
"㵘": "man",
"㵝": "yi",
"㵞": "chou",
"㵡": "bao",
"㵢": "lei",
"㵣": "ke",
"㵤": "dian",
"㵥": "bi",
"㵦": "sui",
"㵧": "ge",
"㵨": "bi",
"㵩": "yi",
"㵪": "xian",
"㵫": "ni",
"㵬": "ying",
"㵭": "zhu",
"㵮": "chun",
"㵯": "feng",
"㵰": "xu",
"㵱": "piao",
"㵲": "wu",
"㵳": "liao",
"㵴": "cang",
"㵵": "zou",
"㵷": "bian",
"㵸": "yao",
"㵹": "huan",
"㵺": "pai",
"㵻": "sou",
"㵼": "xie",
"㵽": "dui",
"㵾": "jing",
"㵿": "xi",
"㶁": "guo",
"㶄": "yan",
"㶅": "xue",
"㶆": "chu",
"㶇": "heng",
"㶈": "ying",
"㶌": "lian",
"㶍": "xian",
"㶎": "huan",
"㶑": "lian",
"㶒": "shan",
"㶓": "cang",
"㶔": "bei",
"㶕": "jian",
"㶖": "shu",
"㶗": "fan",
"㶘": "dian",
"㶚": "ba",
"㶛": "yu",
"㶞": "nang",
"㶟": "lei",
"㶠": "yi",
"㶡": "dai",
"㶣": "chan",
"㶤": "chao",
"㶦": "jin",
"㶧": "nen",
"㶫": "liao",
"㶬": "mei",
"㶭": "jiu",
"㶯": "liu",
"㶰": "han",
"㶲": "yong",
"㶳": "jin",
"㶴": "chi",
"㶵": "ren",
"㶶": "nong",
"㶹": "hong",
"㶺": "tian",
"㶿": "bo",
"㷀": "qiong",
"㷂": "shu",
"㷃": "cui",
"㷄": "hui",
"㷅": "chao",
"㷆": "dou",
"㷇": "guai",
"㷈": "e",
"㷉": "wei",
"㷊": "fen",
"㷋": "tan",
"㷍": "lun",
"㷎": "he",
"㷏": "yong",
"㷐": "hui",
"㷒": "yu",
"㷓": "zong",
"㷔": "yan",
"㷕": "qiu",
"㷖": "zhao",
"㷗": "jiong",
"㷘": "tai",
"㷟": "tui",
"㷠": "lin",
"㷡": "jiong",
"㷢": "zha",
"㷤": "he",
"㷦": "xu",
"㷩": "xi",
"㷪": "cui",
"㷫": "qing",
"㷬": "mo",
"㷭": "feng",
"㷮": "zao",
"㷯": "beng",
"㷰": "li",
"㷳": "yan",
"㷴": "ge",
"㷵": "mo",
"㷶": "bei",
"㷷": "juan",
"㷸": "die",
"㷹": "shao",
"㷻": "wu",
"㷼": "yan",
"㷾": "jue",
"㸀": "tai",
"㸁": "han",
"㸃": "dian",
"㸄": "ji",
"㸅": "jie",
"㸆": "kao",
"㸉": "xie",
"㸊": "la",
"㸋": "fan",
"㸌": "huo",
"㸍": "xi",
"㸎": "nie",
"㸏": "mi",
"㸐": "ran",
"㸑": "cuan",
"㸒": "yin",
"㸓": "mi",
"㸔": "kan",
"㸕": "jue",
"㸗": "tong",
"㸘": "wan",
"㸚": "li",
"㸛": "shao",
"㸜": "kong",
"㸝": "kan",
"㸞": "ban",
"㸠": "tiao",
"㸢": "bei",
"㸣": "ye",
"㸤": "pian",
"㸥": "chan",
"㸦": "hu",
"㸧": "ken",
"㸩": "an",
"㸪": "chun",
"㸫": "qian",
"㸬": "bei",
"㸮": "fen",
"㸰": "tuo",
"㸱": "tuo",
"㸲": "zuo",
"㸳": "ling",
"㸵": "gui",
"㸷": "shi",
"㸸": "hou",
"㸹": "lie",
"㸺": "sha",
"㸻": "si",
"㸽": "bei",
"㸾": "ren",
"㸿": "du",
"㹀": "bo",
"㹁": "liang",
"㹂": "ci",
"㹃": "bi",
"㹄": "ji",
"㹅": "zong",
"㹇": "he",
"㹈": "li",
"㹉": "yuan",
"㹊": "yue",
"㹌": "chan",
"㹍": "di",
"㹎": "lei",
"㹏": "jin",
"㹐": "chong",
"㹑": "si",
"㹒": "pu",
"㹓": "yi",
"㹔": "jiang",
"㹖": "huan",
"㹗": "tao",
"㹘": "ru",
"㹙": "weng",
"㹚": "weng",
"㹛": "rao",
"㹜": "yin",
"㹝": "shi",
"㹞": "yin",
"㹟": "jue",
"㹠": "tun",
"㹡": "xuan",
"㹣": "zhong",
"㹤": "qie",
"㹥": "zhu",
"㹨": "you",
"㹫": "xi",
"㹬": "shi",
"㹭": "yi",
"㹮": "mo",
"㹱": "hu",
"㹲": "xiao",
"㹳": "wu",
"㹵": "jing",
"㹶": "ting",
"㹷": "shi",
"㹸": "ni",
"㹺": "ta",
"㹼": "chu",
"㹽": "chan",
"㹾": "piao",
"㹿": "diao",
"㺀": "nao",
"㺁": "nao",
"㺂": "gan",
"㺃": "gou",
"㺄": "yu",
"㺅": "hou",
"㺇": "si",
"㺈": "chi",
"㺉": "hu",
"㺊": "yang",
"㺌": "xian",
"㺎": "rong",
"㺏": "lou",
"㺐": "zhao",
"㺑": "can",
"㺒": "liao",
"㺓": "piao",
"㺔": "hai",
"㺕": "fan",
"㺖": "han",
"㺗": "dan",
"㺘": "zhan",
"㺚": "ta",
"㺛": "zhu",
"㺜": "nong",
"㺝": "jian",
"㺞": "yu",
"㺟": "zhuo",
"㺠": "you",
"㺡": "li",
"㺣": "xi",
"㺥": "chan",
"㺦": "lian",
"㺨": "si",
"㺩": "jiu",
"㺪": "pu",
"㺫": "qiu",
"㺬": "gong",
"㺭": "zi",
"㺮": "yu",
"㺱": "reng",
"㺲": "niu",
"㺳": "mei",
"㺵": "jiu",
"㺷": "xu",
"㺸": "ping",
"㺹": "bian",
"㺺": "mao",
"㺿": "yi",
"㻀": "you",
"㻂": "ping",
"㻃": "qu",
"㻄": "bao",
"㻅": "hui",
"㻉": "bu",
"㻊": "mang",
"㻋": "la",
"㻌": "tu",
"㻍": "wu",
"㻎": "li",
"㻏": "ling",
"㻑": "ji",
"㻒": "jun",
"㻔": "duo",
"㻕": "jue",
"㻖": "dai",
"㻗": "bei",
"㻝": "la",
"㻞": "bian",
"㻟": "sui",
"㻠": "tu",
"㻡": "die",
"㻧": "duo",
"㻪": "sui",
"㻫": "bi",
"㻬": "tu",
"㻭": "se",
"㻮": "can",
"㻯": "tu",
"㻰": "mian",
"㻵": "zhan",
"㻶": "bi",
"㻷": "ji",
"㻸": "cen",
"㻺": "li",
"㻽": "sui",
"㻿": "shu",
"㼂": "e",
"㼆": "ying",
"㼇": "qiong",
"㼈": "luo",
"㼉": "yin",
"㼊": "tun",
"㼋": "gu",
"㼌": "yu",
"㼍": "lei",
"㼎": "bei",
"㼏": "nei",
"㼐": "pian",
"㼑": "lian",
"㼒": "qiu",
"㼓": "lian",
"㼖": "li",
"㼗": "ding",
"㼘": "wa",
"㼙": "zhou",
"㼛": "xing",
"㼜": "ang",
"㼝": "fan",
"㼞": "peng",
"㼟": "bai",
"㼠": "tuo",
"㼢": "e",
"㼣": "bai",
"㼤": "qi",
"㼥": "chu",
"㼦": "gong",
"㼧": "tong",
"㼨": "han",
"㼩": "cheng",
"㼪": "jia",
"㼫": "huan",
"㼬": "xing",
"㼭": "dian",
"㼮": "chai",
"㼯": "dong",
"㼰": "e",
"㼱": "ruan",
"㼲": "lie",
"㼳": "sheng",
"㼴": "ou",
"㼵": "di",
"㼶": "yu",
"㼷": "chuan",
"㼸": "rong",
"㼺": "tang",
"㼻": "cong",
"㼼": "piao",
"㼽": "shuang",
"㼾": "lu",
"㼿": "tong",
"㽀": "zheng",
"㽁": "li",
"㽂": "sa",
"㽇": "guai",
"㽈": "yi",
"㽉": "han",
"㽊": "xie",
"㽋": "luo",
"㽌": "liu",
"㽎": "dan",
"㽏": "gan",
"㽑": "tan",
"㽕": "you",
"㽖": "nan",
"㽘": "gang",
"㽙": "jun",
"㽚": "chi",
"㽛": "gou",
"㽜": "wan",
"㽝": "li",
"㽞": "liu",
"㽟": "lie",
"㽠": "xia",
"㽡": "bei",
"㽢": "an",
"㽣": "yu",
"㽤": "ju",
"㽥": "rou",
"㽦": "xun",
"㽨": "cuo",
"㽩": "can",
"㽪": "zeng",
"㽫": "yong",
"㽬": "fu",
"㽭": "ruan",
"㽯": "xi",
"㽰": "shu",
"㽱": "jiao",
"㽲": "jiao",
"㽳": "han",
"㽴": "zhang",
"㽷": "shui",
"㽸": "chen",
"㽹": "fan",
"㽺": "ji",
"㽽": "gu",
"㽾": "wu",
"㾀": "qie",
"㾁": "shu",
"㾃": "tuo",
"㾄": "du",
"㾅": "zi",
"㾆": "ran",
"㾇": "mu",
"㾈": "fu",
"㾉": "ling",
"㾊": "ji",
"㾋": "xiu",
"㾌": "xuan",
"㾍": "nai",
"㾏": "jie",
"㾐": "li",
"㾑": "da",
"㾒": "ji",
"㾓": "yun",
"㾕": "shen",
"㾖": "li",
"㾗": "lang",
"㾘": "geng",
"㾙": "yin",
"㾛": "qin",
"㾜": "qie",
"㾝": "che",
"㾞": "you",
"㾟": "bu",
"㾠": "huang",
"㾡": "que",
"㾢": "lai",
"㾥": "xu",
"㾦": "bang",
"㾧": "ke",
"㾨": "qi",
"㾪": "sheng",
"㾫": "pian",
"㾭": "zhou",
"㾮": "huang",
"㾯": "tui",
"㾰": "hu",
"㾱": "bei",
"㾵": "ji",
"㾶": "gu",
"㾸": "gao",
"㾹": "chai",
"㾺": "ma",
"㾻": "zhu",
"㾼": "tui",
"㾽": "tui",
"㾾": "lian",
"㾿": "lang",
"㿃": "dai",
"㿄": "ai",
"㿅": "xian",
"㿇": "xi",
"㿉": "tui",
"㿊": "can",
"㿋": "sao",
"㿍": "jie",
"㿎": "fen",
"㿏": "qun",
"㿑": "yao",
"㿒": "dao",
"㿓": "jia",
"㿔": "lei",
"㿕": "yan",
"㿖": "lu",
"㿗": "tui",
"㿘": "ying",
"㿙": "pi",
"㿚": "luo",
"㿛": "li",
"㿜": "bie",
"㿞": "mao",
"㿟": "bai",
"㿠": "huang",
"㿢": "yao",
"㿣": "he",
"㿤": "chun",
"㿥": "hu",
"㿦": "ning",
"㿧": "chou",
"㿨": "li",
"㿩": "tang",
"㿪": "huan",
"㿫": "bi",
"㿭": "che",
"㿮": "yang",
"㿯": "da",
"㿰": "ao",
"㿱": "xue",
"㿵": "ran",
"㿷": "cuo",
"㿸": "wan",
"㿹": "ta",
"㿺": "bao",
"㿼": "yan",
"㿾": "zhu",
"㿿": "ya",
"䀀": "fan",
"䀁": "you",
"䀃": "tui",
"䀄": "meng",
"䀅": "she",
"䀆": "jin",
"䀇": "gu",
"䀈": "qi",
"䀉": "qiao",
"䀊": "jiao",
"䀋": "yan",
"䀍": "kan",
"䀎": "mian",
"䀏": "xian",
"䀐": "san",
"䀑": "na",
"䀓": "huan",
"䀔": "niu",
"䀕": "cheng",
"䀗": "jue",
"䀘": "xi",
"䀙": "qi",
"䀚": "ang",
"䀛": "mei",
"䀜": "gu",
"䀟": "fan",
"䀠": "qu",
"䀡": "chan",
"䀢": "shun",
"䀣": "bi",
"䀤": "mao",
"䀥": "shuo",
"䀦": "gu",
"䀧": "hong",
"䀨": "huan",
"䀩": "luo",
"䀪": "hang",
"䀫": "jia",
"䀬": "quan",
"䀭": "gai",
"䀮": "mang",
"䀯": "bu",
"䀰": "gu",
"䀲": "mu",
"䀳": "ai",
"䀴": "ying",
"䀵": "shun",
"䀶": "lang",
"䀷": "jie",
"䀸": "di",
"䀹": "jia",
"䀺": "chou",
"䀻": "pin",
"䀼": "ren",
"䀽": "yan",
"䀾": "du",
"䀿": "di",
"䁁": "lang",
"䁂": "xian",
"䁄": "xing",
"䁅": "bei",
"䁆": "an",
"䁇": "mi",
"䁈": "qi",
"䁉": "qi",
"䁊": "wo",
"䁋": "she",
"䁌": "yu",
"䁍": "jia",
"䁎": "cheng",
"䁏": "yao",
"䁐": "ying",
"䁑": "yang",
"䁒": "ji",
"䁓": "jie",
"䁔": "han",
"䁕": "min",
"䁖": "lou",
"䁗": "kai",
"䁘": "yao",
"䁙": "yan",
"䁚": "sun",
"䁛": "gui",
"䁜": "huang",
"䁝": "ying",
"䁞": "sheng",
"䁟": "cha",
"䁠": "lian",
"䁢": "xuan",
"䁣": "chuan",
"䁤": "che",
"䁥": "ni",
"䁦": "qu",
"䁧": "miao",
"䁨": "huo",
"䁩": "yu",
"䁪": "nan",
"䁫": "hu",
"䁬": "ceng",
"䁮": "qian",
"䁯": "she",
"䁰": "jiang",
"䁱": "ao",
"䁲": "mai",
"䁳": "mang",
"䁴": "zhan",
"䁵": "bian",
"䁶": "jiao",
"䁷": "jue",
"䁸": "nong",
"䁹": "bi",
"䁺": "shi",
"䁻": "li",
"䁼": "mo",
"䁽": "lie",
"䁾": "mie",
"䁿": "mo",
"䂁": "chan",
"䂂": "qu",
"䂃": "jiao",
"䂄": "huo",
"䂆": "xu",
"䂇": "nang",
"䂈": "tong",
"䂉": "hou",
"䂊": "yu",
"䂍": "bo",
"䂎": "zuan",
"䂐": "chuo",
"䂒": "jie",
"䂔": "xing",
"䂕": "hui",
"䂖": "shi",
"䂚": "yao",
"䂛": "yu",
"䂜": "bang",
"䂝": "jie",
"䂞": "zhe",
"䂠": "she",
"䂡": "di",
"䂢": "dong",
"䂣": "ci",
"䂤": "fu",
"䂥": "min",
"䂦": "zhen",
"䂧": "zhen",
"䂩": "yan",
"䂪": "diao",
"䂫": "hong",
"䂬": "gong",
"䂭": "qiao",
"䂯": "guai",
"䂰": "la",
"䂱": "cui",
"䂲": "fa",
"䂳": "cuo",
"䂴": "yan",
"䂶": "jie",
"䂸": "guo",
"䂹": "suo",
"䂺": "wan",
"䂻": "zheng",
"䂼": "nie",
"䂽": "diao",
"䂾": "lai",
"䂿": "ta",
"䃀": "cui",
"䃁": "ya",
"䃂": "gun",
"䃇": "mian",
"䃈": "jie",
"䃉": "min",
"䃊": "ju",
"䃋": "yu",
"䃍": "zhao",
"䃎": "zha",
"䃑": "pan",
"䃒": "he",
"䃓": "gou",
"䃔": "hong",
"䃕": "lao",
"䃖": "wu",
"䃗": "chuo",
"䃙": "lu",
"䃚": "cu",
"䃛": "lian",
"䃝": "qiao",
"䃞": "shu",
"䃡": "cen",
"䃣": "hui",
"䃤": "su",
"䃥": "chuang",
"䃧": "long",
"䃩": "nao",
"䃪": "tan",
"䃫": "dan",
"䃬": "wei",
"䃭": "gan",
"䃮": "da",
"䃯": "li",
"䃱": "xian",
"䃲": "pan",
"䃳": "la",
"䃵": "niao",
"䃶": "huai",
"䃷": "ying",
"䃸": "xian",
"䃹": "lan",
"䃺": "mo",
"䃻": "ba",
"䃽": "fu",
"䃾": "bi",
"䄀": "huo",
"䄁": "yi",
"䄂": "liu",
"䄅": "juan",
"䄆": "huo",
"䄇": "cheng",
"䄈": "dou",
"䄉": "e",
"䄋": "yan",
"䄌": "zhui",
"䄍": "du",
"䄎": "qi",
"䄏": "yu",
"䄐": "quan",
"䄑": "huo",
"䄒": "nie",
"䄓": "heng",
"䄔": "ju",
"䄕": "she",
"䄘": "peng",
"䄙": "ming",
"䄚": "cao",
"䄛": "lou",
"䄜": "li",
"䄝": "chun",
"䄟": "cui",
"䄠": "shan",
"䄢": "qi",
"䄤": "lai",
"䄥": "ling",
"䄦": "liao",
"䄧": "reng",
"䄨": "yu",
"䄩": "nao",
"䄪": "chuo",
"䄫": "qi",
"䄬": "yi",
"䄭": "nian",
"䄯": "jian",
"䄰": "ya",
"䄲": "chui",
"䄶": "bi",
"䄷": "dan",
"䄸": "po",
"䄹": "nian",
"䄺": "zhi",
"䄻": "chao",
"䄼": "tian",
"䄽": "tian",
"䄾": "rou",
"䄿": "yi",
"䅀": "lie",
"䅁": "an",
"䅂": "he",
"䅃": "qiong",
"䅄": "li",
"䅆": "zi",
"䅇": "su",
"䅈": "yuan",
"䅉": "ya",
"䅊": "du",
"䅋": "wan",
"䅌": "juan",
"䅍": "dong",
"䅎": "you",
"䅏": "hui",
"䅐": "jian",
"䅑": "rui",
"䅒": "mang",
"䅓": "ju",
"䅖": "an",
"䅗": "sui",
"䅘": "lai",
"䅙": "hun",
"䅚": "qiang",
"䅜": "duo",
"䅞": "na",
"䅟": "can",
"䅠": "ti",
"䅡": "xu",
"䅢": "jiu",
"䅣": "huang",
"䅤": "qi",
"䅥": "jie",
"䅦": "mao",
"䅧": "yan",
"䅩": "zhi",
"䅪": "tui",
"䅬": "ai",
"䅭": "pang",
"䅮": "cang",
"䅯": "tang",
"䅰": "en",
"䅱": "hun",
"䅲": "qi",
"䅳": "chu",
"䅴": "suo",
"䅵": "zhuo",
"䅶": "nou",
"䅷": "tu",
"䅸": "zu",
"䅹": "lou",
"䅺": "miao",
"䅻": "li",
"䅼": "man",
"䅽": "gu",
"䅾": "cen",
"䅿": "hua",
"䆀": "mei",
"䆂": "lian",
"䆃": "dao",
"䆄": "shan",
"䆅": "ci",
"䆈": "zhi",
"䆉": "ba",
"䆊": "cui",
"䆍": "long",
"䆏": "fei",
"䆐": "guo",
"䆑": "cheng",
"䆒": "jiu",
"䆓": "e",
"䆕": "jue",
"䆖": "hong",
"䆗": "jiao",
"䆘": "cuan",
"䆙": "yao",
"䆚": "tong",
"䆛": "cha",
"䆜": "you",
"䆝": "shu",
"䆞": "yao",
"䆟": "ge",
"䆠": "huan",
"䆡": "lang",
"䆢": "jue",
"䆣": "chen",
"䆦": "shen",
"䆨": "ming",
"䆩": "ming",
"䆬": "yun",
"䆮": "jin",
"䆯": "chuo",
"䆱": "tan",
"䆳": "qiong",
"䆵": "cheng",
"䆷": "yu",
"䆸": "cheng",
"䆹": "tong",
"䆻": "qiao",
"䆽": "ju",
"䆾": "lan",
"䆿": "yi",
"䇀": "rong",
"䇁": "si",
"䇂": "qian",
"䇃": "si",
"䇅": "fa",
"䇇": "meng",
"䇈": "gui",
"䇋": "hai",
"䇌": "qiao",
"䇍": "chuo",
"䇎": "que",
"䇏": "dui",
"䇐": "li",
"䇑": "ba",
"䇒": "jie",
"䇔": "luo",
"䇖": "yun",
"䇘": "hu",
"䇙": "yin",
"䇛": "zhi",
"䇜": "lian",
"䇞": "gan",
"䇟": "jian",
"䇠": "zhou",
"䇡": "zhu",
"䇢": "ku",
"䇣": "na",
"䇤": "dui",
"䇥": "ze",
"䇦": "yang",
"䇧": "zhu",
"䇨": "gong",
"䇩": "yi",
"䇪": "chi",
"䇫": "ji",
"䇬": "chuang",
"䇭": "lao",
"䇮": "ren",
"䇯": "rong",
"䇱": "na",
"䇲": "ce",
"䇵": "yi",
"䇶": "jue",
"䇷": "bi",
"䇸": "cheng",
"䇹": "jun",
"䇺": "chou",
"䇻": "hui",
"䇼": "chi",
"䇽": "zhi",
"䇾": "yan",
"䈁": "lun",
"䈂": "bing",
"䈃": "zhao",
"䈄": "han",
"䈅": "yu",
"䈆": "dai",
"䈇": "zhao",
"䈈": "fei",
"䈉": "sha",
"䈊": "ling",
"䈋": "ta",
"䈍": "mang",
"䈎": "ye",
"䈏": "bao",
"䈐": "kui",
"䈑": "gua",
"䈒": "nan",
"䈓": "ge",
"䈕": "chi",
"䈗": "suo",
"䈘": "ci",
"䈙": "zhou",
"䈚": "tai",
"䈛": "kuai",
"䈜": "qin",
"䈞": "du",
"䈟": "ce",
"䈠": "huan",
"䈢": "sai",
"䈣": "zheng",
"䈤": "qian",
"䈧": "wei",
"䈪": "xi",
"䈫": "na",
"䈬": "pu",
"䈭": "huai",
"䈮": "ju",
"䈰": "shao",
"䈲": "pan",
"䈳": "ta",
"䈴": "qian",
"䈵": "weng",
"䈶": "rong",
"䈷": "luo",
"䈸": "hu",
"䈹": "sou",
"䈻": "pu",
"䈼": "mie",
"䈾": "shao",
"䈿": "mai",
"䉀": "shu",
"䉁": "ling",
"䉂": "lei",
"䉃": "jiang",
"䉄": "leng",
"䉅": "zhi",
"䉆": "diao",
"䉈": "san",
"䉉": "hu",
"䉊": "fan",
"䉋": "mei",
"䉌": "sui",
"䉍": "jian",
"䉎": "tang",
"䉏": "xie",
"䉑": "mo",
"䉒": "fan",
"䉓": "lei",
"䉕": "ceng",
"䉖": "ling",
"䉘": "cong",
"䉙": "yun",
"䉚": "meng",
"䉛": "yu",
"䉜": "zhi",
"䉝": "qi",
"䉞": "dan",
"䉟": "huo",
"䉠": "wei",
"䉡": "tan",
"䉢": "se",
"䉣": "xie",
"䉤": "sou",
"䉥": "song",
"䉧": "liu",
"䉨": "yi",
"䉪": "lei",
"䉫": "li",
"䉬": "fei",
"䉭": "lie",
"䉮": "lin",
"䉯": "xian",
"䉰": "yao",
"䉲": "bie",
"䉳": "xian",
"䉴": "rang",
"䉵": "zhuan",
"䉷": "dan",
"䉸": "bian",
"䉹": "ling",
"䉺": "hong",
"䉻": "qi",
"䉼": "liao",
"䉽": "ban",
"䉾": "mi",
"䉿": "hu",
"䊀": "hu",
"䊂": "ce",
"䊃": "pei",
"䊄": "qiong",
"䊅": "ming",
"䊆": "jiu",
"䊇": "bu",
"䊈": "mei",
"䊉": "san",
"䊊": "mei",
"䊍": "li",
"䊎": "quan",
"䊐": "en",
"䊑": "xiang",
"䊓": "shi",
"䊖": "lan",
"䊗": "huang",
"䊘": "jiu",
"䊙": "yan",
"䊚": "dui",
"䊛": "sa",
"䊜": "tuan",
"䊝": "xie",
"䊞": "zhe",
"䊟": "men",
"䊠": "xi",
"䊡": "man",
"䊣": "huang",
"䊤": "tan",
"䊥": "xiao",
"䊦": "ya",
"䊧": "bi",
"䊨": "luo",
"䊩": "fan",
"䊪": "li",
"䊫": "cui",
"䊬": "cha",
"䊭": "chou",
"䊮": "di",
"䊯": "kuang",
"䊰": "chu",
"䊲": "chan",
"䊳": "mi",
"䊴": "qian",
"䊵": "qiu",
"䊶": "zhen",
"䊺": "gu",
"䊻": "yan",
"䊼": "chi",
"䊽": "guai",
"䊾": "mu",
"䊿": "bo",
"䋀": "kua",
"䋁": "geng",
"䋂": "yao",
"䋃": "mao",
"䋄": "wang",
"䋈": "ru",
"䋉": "jue",
"䋊": "zheng",
"䋋": "min",
"䋌": "jiang",
"䋎": "zhan",
"䋏": "zuo",
"䋐": "yue",
"䋑": "bing",
"䋓": "zhou",
"䋔": "bi",
"䋕": "ren",
"䋖": "yu",
"䋘": "chuo",
"䋙": "er",
"䋚": "yi",
"䋛": "mi",
"䋜": "qing",
"䋞": "wang",
"䋟": "ji",
"䋠": "bu",
"䋢": "bie",
"䋣": "fan",
"䋤": "yao",
"䋥": "li",
"䋦": "fan",
"䋧": "qu",
"䋨": "fu",
"䋩": "er",
"䋫": "zheng",
"䋭": "huo",
"䋮": "jin",
"䋯": "qi",
"䋰": "ju",
"䋱": "lai",
"䋲": "che",
"䋳": "bei",
"䋴": "niu",
"䋵": "yi",
"䋶": "xu",
"䋷": "liu",
"䋸": "xun",
"䋹": "fu",
"䋻": "nin",
"䋼": "ting",
"䋽": "beng",
"䋾": "zha",
"䌂": "ou",
"䌃": "shuo",
"䌄": "geng",
"䌅": "tang",
"䌆": "gui",
"䌇": "suo",
"䌈": "ta",
"䌊": "yao",
"䌌": "qi",
"䌍": "han",
"䌏": "mi",
"䌐": "mi",
"䌒": "lu",
"䌓": "fan",
"䌔": "ou",
"䌕": "mi",
"䌖": "jie",
"䌗": "fu",
"䌘": "mi",
"䌙": "huang",
"䌚": "su",
"䌛": "yao",
"䌜": "nie",
"䌝": "jin",
"䌞": "lian",
"䌟": "bi",
"䌠": "qing",
"䌡": "ti",
"䌢": "ling",
"䌣": "zuan",
"䌤": "zhi",
"䌥": "yin",
"䌦": "dao",
"䌧": "chou",
"䌨": "cai",
"䌩": "mi",
"䌪": "yan",
"䌫": "lan",
"䌬": "chong",
"䌯": "guan",
"䌰": "she",
"䌱": "luo",
"䌳": "shi",
"䌴": "luo",
"䌵": "zhu",
"䌷": "chou",
"䌸": "juan",
"䌹": "jiong",
"䌺": "er",
"䌻": "yi",
"䌼": "rui",
"䌽": "cai",
"䌾": "ren",
"䌿": "fu",
"䍀": "lan",
"䍁": "sui",
"䍂": "yu",
"䍃": "yao",
"䍄": "dian",
"䍅": "ling",
"䍆": "zhu",
"䍇": "ta",
"䍈": "ping",
"䍉": "qian",
"䍊": "jue",
"䍋": "chui",
"䍌": "bu",
"䍍": "gu",
"䍎": "cun",
"䍐": "han",
"䍑": "han",
"䍒": "mou",
"䍓": "hu",
"䍔": "hong",
"䍕": "di",
"䍖": "fu",
"䍗": "xuan",
"䍘": "mi",
"䍙": "mei",
"䍚": "lang",
"䍛": "gu",
"䍜": "zhao",
"䍝": "ta",
"䍞": "yu",
"䍟": "zong",
"䍠": "li",
"䍡": "liao",
"䍢": "wu",
"䍣": "lei",
"䍤": "ji",
"䍥": "lei",
"䍦": "li",
"䍨": "bo",
"䍩": "ang",
"䍪": "kui",
"䍫": "tuo",
"䍮": "zhao",
"䍯": "gui",
"䍱": "xu",
"䍲": "nai",
"䍳": "chuo",
"䍴": "duo",
"䍶": "dong",
"䍷": "gui",
"䍸": "bo",
"䍺": "huan",
"䍻": "xuan",
"䍼": "can",
"䍽": "li",
"䍾": "tui",
"䍿": "huang",
"䎀": "xue",
"䎁": "hu",
"䎂": "bao",
"䎃": "ran",
"䎄": "tiao",
"䎅": "fu",
"䎆": "liao",
"䎈": "yi",
"䎉": "shu",
"䎊": "po",
"䎋": "he",
"䎌": "cu",
"䎎": "na",
"䎏": "an",
"䎐": "chao",
"䎑": "lu",
"䎒": "zhan",
"䎓": "ta",
"䎗": "qiao",
"䎘": "su",
"䎚": "guan",
"䎝": "chu",
"䎟": "er",
"䎠": "er",
"䎡": "nuan",
"䎢": "qi",
"䎣": "si",
"䎤": "chu",
"䎦": "yan",
"䎧": "bang",
"䎨": "an",
"䎪": "ne",
"䎫": "chuang",
"䎬": "bei",
"䎭": "cao",
"䎮": "ti",
"䎯": "han",
"䎰": "zuo",
"䎱": "bei",
"䎲": "zhe",
"䎳": "wa",
"䎴": "sheng",
"䎵": "bi",
"䎶": "er",
"䎷": "zhu",
"䎸": "wu",
"䎹": "wen",
"䎺": "zhi",
"䎻": "zhou",
"䎼": "lu",
"䎽": "wen",
"䎾": "gun",
"䎿": "qiu",
"䏀": "la",
"䏁": "zai",
"䏂": "sou",
"䏃": "mian",
"䏄": "zhi",
"䏅": "qi",
"䏆": "cao",
"䏇": "piao",
"䏈": "lian",
"䏊": "long",
"䏋": "su",
"䏌": "qi",
"䏍": "yuan",
"䏎": "feng",
"䏐": "jue",
"䏑": "di",
"䏒": "pian",
"䏓": "guan",
"䏔": "niu",
"䏕": "ren",
"䏖": "zhen",
"䏗": "gai",
"䏘": "pi",
"䏙": "tan",
"䏚": "chao",
"䏛": "chun",
"䏝": "chun",
"䏞": "mo",
"䏟": "bie",
"䏠": "qi",
"䏡": "shi",
"䏢": "bi",
"䏣": "jue",
"䏤": "si",
"䏦": "hua",
"䏧": "na",
"䏨": "hui",
"䏪": "er",
"䏬": "mou",
"䏮": "xi",
"䏯": "zhi",
"䏰": "ren",
"䏱": "ju",
"䏲": "die",
"䏳": "zhe",
"䏴": "shao",
"䏵": "meng",
"䏶": "bi",
"䏷": "han",
"䏸": "yu",
"䏹": "xian",
"䏻": "neng",
"䏼": "can",
"䏽": "bu",
"䏿": "qi",
"䐀": "ji",
"䐁": "niao",
"䐂": "lu",
"䐃": "jiong",
"䐄": "han",
"䐅": "yi",
"䐆": "cai",
"䐇": "chun",
"䐈": "zhi",
"䐉": "zi",
"䐊": "da",
"䐌": "tian",
"䐍": "zhou",
"䐏": "chun",
"䐑": "zhe",
"䐓": "rou",
"䐔": "bin",
"䐕": "ji",
"䐖": "yi",
"䐗": "du",
"䐘": "jue",
"䐙": "ge",
"䐚": "ji",
"䐛": "da",
"䐜": "chen",
"䐝": "suo",
"䐞": "ruo",
"䐟": "xiang",
"䐠": "huang",
"䐡": "qi",
"䐢": "zhu",
"䐣": "cuo",
"䐤": "chi",
"䐥": "weng",
"䐧": "kao",
"䐨": "gu",
"䐩": "kai",
"䐪": "fan",
"䐬": "cao",
"䐭": "zhi",
"䐮": "chan",
"䐯": "lei",
"䐲": "zhe",
"䐳": "yu",
"䐴": "gui",
"䐵": "huang",
"䐶": "jin",
"䐸": "guo",
"䐹": "sao",
"䐺": "tan",
"䐼": "xi",
"䐽": "man",
"䐾": "duo",
"䐿": "ao",
"䑀": "pi",
"䑁": "wu",
"䑂": "ai",
"䑃": "meng",
"䑄": "pi",
"䑅": "meng",
"䑆": "yang",
"䑇": "zhi",
"䑈": "bo",
"䑉": "ying",
"䑊": "wei",
"䑋": "nao",
"䑌": "lan",
"䑍": "yan",
"䑎": "chan",
"䑏": "quan",
"䑐": "zhen",
"䑑": "pu",
"䑓": "tai",
"䑔": "fei",
"䑕": "shu",
"䑗": "dang",
"䑘": "cha",
"䑙": "ran",
"䑚": "tian",
"䑛": "chi",
"䑜": "ta",
"䑝": "jia",
"䑞": "shun",
"䑟": "huang",
"䑠": "liao",
"䑡": "cha",
"䑣": "chen",
"䑤": "jin",
"䑥": "e",
"䑦": "gou",
"䑧": "fu",
"䑨": "duo",
"䑪": "e",
"䑬": "yao",
"䑭": "di",
"䑯": "di",
"䑰": "bu",
"䑱": "man",
"䑲": "che",
"䑳": "lun",
"䑴": "qi",
"䑵": "mu",
"䑶": "can",
"䑻": "you",
"䑽": "da",
"䑿": "su",
"䒀": "fu",
"䒁": "ji",
"䒂": "jiang",
"䒃": "zao",
"䒄": "bo",
"䒅": "teng",
"䒆": "che",
"䒇": "fu",
"䒈": "bu",
"䒉": "wu",
"䒋": "yang",
"䒌": "ming",
"䒍": "pang",
"䒎": "mang",
"䒐": "meng",
"䒑": "cao",
"䒒": "tiao",
"䒓": "kai",
"䒔": "bai",
"䒕": "xiao",
"䒖": "xin",
"䒗": "qi",
"䒚": "shao",
"䒛": "heng",
"䒜": "niu",
"䒝": "xiao",
"䒞": "chen",
"䒠": "feng",
"䒡": "yin",
"䒢": "ang",
"䒣": "ran",
"䒤": "ri",
"䒥": "fa",
"䒦": "fan",
"䒧": "qu",
"䒨": "shi",
"䒩": "he",
"䒪": "bian",
"䒫": "dai",
"䒬": "mo",
"䒭": "deng",
"䒲": "cha",
"䒳": "duo",
"䒴": "you",
"䒵": "hao",
"䒷": "gua",
"䒸": "xian",
"䒹": "lei",
"䒺": "jin",
"䒻": "qi",
"䒼": "qu",
"䒽": "mei",
"䒿": "liao",
"䓂": "yan",
"䓃": "yi",
"䓄": "yin",
"䓅": "qi",
"䓆": "zhe",
"䓇": "xi",
"䓈": "yi",
"䓉": "ye",
"䓊": "e",
"䓌": "zhi",
"䓍": "han",
"䓎": "chuo",
"䓐": "chun",
"䓑": "bing",
"䓒": "kuai",
"䓓": "chou",
"䓕": "tuo",
"䓖": "qiong",
"䓘": "jiu",
"䓚": "cu",
"䓛": "fu",
"䓜": "zhi",
"䓝": "meng",
"䓞": "li",
"䓟": "lie",
"䓠": "ta",
"䓡": "zhi",
"䓢": "gu",
"䓣": "liang",
"䓥": "la",
"䓦": "dian",
"䓧": "ci",
"䓫": "ji",
"䓭": "cha",
"䓮": "mao",
"䓯": "du",
"䓱": "chai",
"䓲": "rui",
"䓳": "hen",
"䓴": "ruan",
"䓶": "lai",
"䓷": "xing",
"䓹": "yi",
"䓺": "mei",
"䓼": "he",
"䓽": "ji",
"䓾": "suo",
"䓿": "han",
"䔁": "li",
"䔂": "zi",
"䔃": "zu",
"䔄": "yao",
"䔆": "li",
"䔇": "qi",
"䔈": "gan",
"䔉": "li",
"䔌": "gao",
"䔎": "su",
"䔏": "chou",
"䔑": "xie",
"䔒": "bei",
"䔓": "xu",
"䔔": "jing",
"䔕": "pu",
"䔖": "ling",
"䔗": "xiang",
"䔘": "zuo",
"䔙": "diao",
"䔚": "chun",
"䔛": "qing",
"䔜": "nan",
"䔟": "chi",
"䔠": "shao",
"䔡": "yu",
"䔢": "hua",
"䔣": "li",
"䔤": "pa",
"䔧": "li",
"䔪": "dui",
"䔫": "shu",
"䔬": "yi",
"䔭": "ning",
"䔮": "si",
"䔯": "hu",
"䔰": "fu",
"䔲": "cheng",
"䔳": "nan",
"䔴": "ce",
"䔶": "ti",
"䔷": "qin",
"䔸": "biao",
"䔹": "sui",
"䔺": "wei",
"䔼": "se",
"䔽": "ai",
"䔾": "e",
"䔿": "jie",
"䕀": "kuan",
"䕁": "fei",
"䕃": "yin",
"䕅": "sao",
"䕆": "dou",
"䕇": "hui",
"䕈": "xie",
"䕉": "ze",
"䕊": "tan",
"䕋": "chang",
"䕌": "zhi",
"䕍": "yi",
"䕎": "fu",
"䕏": "e",
"䕑": "jun",
"䕓": "cha",
"䕔": "xian",
"䕕": "man",
"䕗": "bi",
"䕘": "ling",
"䕙": "jie",
"䕚": "kui",
"䕛": "jia",
"䕞": "lang",
"䕠": "fei",
"䕡": "lv",
"䕢": "zha",
"䕣": "he",
"䕤": "qi",
"䕥": "ni",
"䕦": "ying",
"䕧": "xiao",
"䕨": "teng",
"䕩": "lao",
"䕪": "ze",
"䕫": "kui",
"䕬": "jiang",
"䕭": "qian",
"䕮": "ju",
"䕯": "piao",
"䕰": "ban",
"䕱": "dou",
"䕲": "lin",
"䕳": "mi",
"䕴": "zhuo",
"䕵": "xie",
"䕶": "hu",
"䕷": "mi",
"䕹": "za",
"䕺": "cong",
"䕻": "ge",
"䕼": "nan",
"䕽": "zhu",
"䕾": "yan",
"䕿": "han",
"䖁": "yi",
"䖂": "luan",
"䖃": "yue",
"䖄": "ran",
"䖅": "ling",
"䖆": "niang",
"䖇": "yu",
"䖈": "nve",
"䖊": "yi",
"䖋": "nve",
"䖌": "qin",
"䖍": "qian",
"䖎": "xia",
"䖏": "chu",
"䖐": "jin",
"䖑": "mi",
"䖒": "xi",
"䖓": "na",
"䖔": "han",
"䖕": "zu",
"䖖": "xia",
"䖗": "yan",
"䖘": "tu",
"䖛": "suo",
"䖜": "yin",
"䖝": "chong",
"䖞": "zhou",
"䖟": "mang",
"䖠": "yuan",
"䖢": "miao",
"䖣": "zao",
"䖤": "wan",
"䖥": "mao",
"䖦": "qu",
"䖧": "na",
"䖨": "shi",
"䖩": "bi",
"䖪": "ci",
"䖫": "bang",
"䖬": "jia",
"䖭": "juan",
"䖮": "xiang",
"䖯": "kui",
"䖰": "pai",
"䖲": "xun",
"䖳": "zha",
"䖴": "yao",
"䖵": "kun",
"䖸": "e",
"䖹": "yang",
"䖺": "tiao",
"䖻": "you",
"䖼": "jue",
"䖽": "li",
"䖿": "li",
"䗁": "ji",
"䗂": "hu",
"䗃": "zhan",
"䗄": "fu",
"䗅": "chang",
"䗆": "guan",
"䗇": "ju",
"䗈": "meng",
"䗊": "cheng",
"䗋": "mou",
"䗍": "li",
"䗐": "shi",
"䗑": "yi",
"䗒": "bing",
"䗔": "hou",
"䗕": "wan",
"䗖": "di",
"䗘": "ge",
"䗙": "han",
"䗚": "bo",
"䗜": "liu",
"䗝": "can",
"䗞": "can",
"䗟": "yi",
"䗠": "xuan",
"䗡": "yan",
"䗢": "zao",
"䗣": "gao",
"䗤": "yong",
"䗥": "zong",
"䗨": "yu",
"䗪": "zhe",
"䗫": "ma",
"䗮": "shuang",
"䗯": "jin",
"䗰": "guan",
"䗱": "pu",
"䗲": "lin",
"䗴": "ting",
"䗶": "la",
"䗷": "yi",
"䗹": "ci",
"䗺": "yan",
"䗻": "jie",
"䗽": "wei",
"䗾": "xian",
"䗿": "ning",
"䘀": "fu",
"䘁": "ge",
"䘃": "mo",
"䘄": "fu",
"䘅": "nai",
"䘆": "xian",
"䘇": "wen",
"䘈": "li",
"䘉": "can",
"䘊": "mie",
"䘌": "ni",
"䘍": "chai",
"䘏": "xu",
"䘑": "mai",
"䘒": "zui",
"䘓": "kan",
"䘔": "ka",
"䘕": "hang",
"䘘": "yu",
"䘙": "wei",
"䘚": "zu",
"䘝": "yi",
"䘟": "diao",
"䘠": "fu",
"䘡": "bi",
"䘢": "zhu",
"䘣": "zi",
"䘤": "shu",
"䘥": "xia",
"䘦": "ni",
"䘨": "jiao",
"䘩": "xuan",
"䘫": "nou",
"䘬": "rong",
"䘭": "die",
"䘮": "sa",
"䘯": "shao",
"䘰": "shan",
"䘱": "yu",
"䘳": "jin",
"䘵": "lu",
"䘶": "han",
"䘸": "yi",
"䘹": "zui",
"䘺": "zhan",
"䘻": "su",
"䘼": "wan",
"䘽": "ni",
"䘾": "guan",
"䘿": "jue",
"䙀": "beng",
"䙁": "can",
"䙃": "duo",
"䙄": "qi",
"䙅": "yao",
"䙆": "gui",
"䙇": "nuan",
"䙈": "hou",
"䙉": "xun",
"䙊": "xie",
"䙌": "hui",
"䙎": "xie",
"䙏": "bo",
"䙐": "ke",
"䙒": "xu",
"䙓": "bai",
"䙔": "yu",
"䙕": "chu",
"䙗": "ti",
"䙘": "chu",
"䙙": "chi",
"䙚": "niao",
"䙛": "guan",
"䙜": "feng",
"䙝": "xie",
"䙟": "duo",
"䙠": "jue",
"䙡": "hui",
"䙢": "zeng",
"䙣": "sa",
"䙤": "duo",
"䙥": "ling",
"䙦": "meng",
"䙨": "guo",
"䙩": "meng",
"䙪": "long",
"䙬": "ying",
"䙮": "guan",
"䙯": "cu",
"䙰": "li",
"䙱": "du",
"䙳": "e",
"䙵": "xi",
"䙷": "de",
"䙸": "de",
"䙹": "jiang",
"䙺": "lian",
"䙼": "shao",
"䙽": "xi",
"䙾": "shi",
"䙿": "wei",
"䚂": "he",
"䚃": "you",
"䚄": "lu",
"䚅": "lai",
"䚆": "ou",
"䚇": "sheng",
"䚈": "juan",
"䚉": "qi",
"䚋": "yun",
"䚍": "qi",
"䚎": "chuang",
"䚏": "leng",
"䚐": "ji",
"䚑": "mai",
"䚒": "chuang",
"䚓": "nian",
"䚔": "bin",
"䚕": "li",
"䚖": "ling",
"䚗": "gang",
"䚘": "chen",
"䚙": "xuan",
"䚚": "xian",
"䚛": "hu",
"䚝": "zu",
"䚞": "dai",
"䚟": "dai",
"䚠": "hun",
"䚡": "sai",
"䚢": "che",
"䚣": "ti",
"䚥": "nuo",
"䚦": "zhi",
"䚧": "liu",
"䚨": "fei",
"䚩": "jiao",
"䚫": "ao",
"䚬": "lin",
"䚭": "xuan",
"䚮": "reng",
"䚯": "tao",
"䚰": "pi",
"䚱": "xin",
"䚲": "shan",
"䚳": "xie",
"䚴": "wa",
"䚵": "tou",
"䚷": "xi",
"䚸": "xie",
"䚹": "pi",
"䚺": "yao",
"䚻": "yao",
"䚽": "hao",
"䚾": "nin",
"䚿": "yin",
"䛀": "fan",
"䛁": "nan",
"䛂": "chi",
"䛃": "wang",
"䛄": "yuan",
"䛅": "xia",
"䛆": "zhou",
"䛇": "yuan",
"䛈": "shi",
"䛉": "mi",
"䛋": "ge",
"䛌": "pao",
"䛍": "fei",
"䛎": "hu",
"䛏": "ni",
"䛐": "ci",
"䛑": "mi",
"䛒": "bian",
"䛔": "na",
"䛕": "yu",
"䛖": "e",
"䛗": "zhi",
"䛘": "nin",
"䛙": "xu",
"䛛": "hui",
"䛜": "xun",
"䛝": "nao",
"䛞": "han",
"䛟": "jia",
"䛠": "dou",
"䛡": "hua",
"䛤": "cu",
"䛥": "xi",
"䛦": "song",
"䛧": "mi",
"䛨": "xin",
"䛩": "wu",
"䛪": "qiong",
"䛫": "zheng",
"䛬": "tao",
"䛭": "xing",
"䛮": "jiu",
"䛯": "ju",
"䛰": "hun",
"䛱": "ti",
"䛲": "man",
"䛳": "jian",
"䛴": "qi",
"䛵": "shou",
"䛶": "lei",
"䛷": "wan",
"䛸": "che",
"䛹": "can",
"䛺": "jie",
"䛻": "you",
"䛼": "hui",
"䛽": "zha",
"䛾": "su",
"䛿": "ge",
"䜀": "nao",
"䜁": "xi",
"䜄": "chi",
"䜅": "wei",
"䜆": "mo",
"䜇": "gun",
"䜈": "jiao",
"䜊": "zao",
"䜋": "hui",
"䜌": "luan",
"䜍": "liao",
"䜎": "lao",
"䜐": "hui",
"䜑": "qia",
"䜒": "ao",
"䜓": "nie",
"䜔": "sui",
"䜕": "mai",
"䜖": "tan",
"䜗": "xin",
"䜘": "jing",
"䜙": "an",
"䜚": "ta",
"䜛": "chan",
"䜜": "wei",
"䜝": "tuan",
"䜞": "ji",
"䜟": "chen",
"䜠": "che",
"䜡": "xu",
"䜢": "xian",
"䜣": "xi",
"䜧": "nao",
"䜩": "yan",
"䜪": "qiu",
"䜫": "hong",
"䜬": "song",
"䜭": "jun",
"䜮": "liao",
"䜯": "ju",
"䜰": "hao",
"䜱": "man",
"䜲": "lie",
"䜴": "chu",
"䜵": "chi",
"䜶": "xiang",
"䜸": "mei",
"䜹": "shu",
"䜺": "ce",
"䜻": "chi",
"䜼": "gu",
"䜽": "yu",
"䜿": "shu",
"䝀": "liao",
"䝁": "lao",
"䝂": "shu",
"䝃": "zhe",
"䝈": "e",
"䝊": "sha",
"䝋": "zong",
"䝌": "jue",
"䝍": "jun",
"䝏": "lou",
"䝐": "wei",
"䝒": "zhu",
"䝓": "la",
"䝕": "zhe",
"䝖": "zhao",
"䝘": "yi",
"䝚": "ni",
"䝝": "yi",
"䝞": "hao",
"䝟": "ya",
"䝠": "huan",
"䝡": "man",
"䝢": "man",
"䝣": "qu",
"䝤": "lao",
"䝥": "hao",
"䝧": "men",
"䝨": "xian",
"䝩": "zhen",
"䝪": "shu",
"䝫": "zuo",
"䝬": "zhu",
"䝭": "gou",
"䝮": "xuan",
"䝯": "yi",
"䝰": "ti",
"䝲": "jin",
"䝳": "can",
"䝵": "bu",
"䝶": "liang",
"䝷": "zhi",
"䝸": "ji",
"䝹": "wan",
"䝺": "guan",
"䝼": "qing",
"䝽": "ai",
"䝾": "fu",
"䝿": "gui",
"䞀": "gou",
"䞁": "xian",
"䞂": "ruan",
"䞃": "zhi",
"䞄": "biao",
"䞅": "yi",
"䞆": "suo",
"䞇": "die",
"䞈": "gui",
"䞉": "sheng",
"䞊": "xun",
"䞋": "chen",
"䞌": "she",
"䞍": "qing",
"䞐": "chun",
"䞑": "hong",
"䞒": "dong",
"䞓": "cheng",
"䞔": "wei",
"䞕": "die",
"䞖": "shu",
"䞗": "cai",
"䞘": "ji",
"䞙": "za",
"䞚": "qi",
"䞜": "fu",
"䞝": "ao",
"䞞": "fu",
"䞟": "po",
"䞡": "tan",
"䞢": "zha",
"䞣": "che",
"䞤": "qu",
"䞥": "you",
"䞦": "he",
"䞧": "hou",
"䞨": "gui",
"䞩": "e",
"䞪": "jiang",
"䞫": "yun",
"䞬": "tou",
"䞭": "qiu",
"䞯": "fu",
"䞰": "zuo",
"䞱": "hu",
"䞳": "bo",
"䞵": "jue",
"䞶": "di",
"䞷": "jue",
"䞸": "fu",
"䞹": "huang",
"䞻": "yong",
"䞼": "chui",
"䞽": "suo",
"䞾": "chi",
"䞿": "qian",
"䟂": "man",
"䟃": "ca",
"䟄": "qi",
"䟅": "jian",
"䟆": "bi",
"䟇": "ji",
"䟈": "zhi",
"䟉": "zhu",
"䟊": "qu",
"䟋": "zhan",
"䟌": "ji",
"䟍": "dian",
"䟏": "li",
"䟐": "li",
"䟑": "la",
"䟒": "quan",
"䟔": "fu",
"䟕": "cha",
"䟖": "tang",
"䟗": "shi",
"䟘": "hang",
"䟙": "qie",
"䟚": "qi",
"䟛": "bo",
"䟜": "na",
"䟝": "tou",
"䟞": "chu",
"䟟": "cu",
"䟠": "yue",
"䟡": "di",
"䟢": "chen",
"䟣": "chu",
"䟤": "bi",
"䟥": "mang",
"䟦": "ba",
"䟧": "tian",
"䟨": "min",
"䟩": "lie",
"䟪": "feng",
"䟬": "qiu",
"䟭": "tiao",
"䟮": "fu",
"䟯": "kuo",
"䟰": "jian",
"䟲": "cong",
"䟴": "zhen",
"䟵": "qiu",
"䟶": "cuo",
"䟷": "chi",
"䟸": "kui",
"䟹": "lie",
"䟺": "bang",
"䟻": "du",
"䟼": "wu",
"䟽": "shu",
"䟾": "jue",
"䟿": "lu",
"䠀": "chang",
"䠂": "chu",
"䠃": "liang",
"䠄": "tian",
"䠅": "kun",
"䠆": "chang",
"䠇": "jue",
"䠈": "tu",
"䠉": "hua",
"䠊": "fei",
"䠋": "bi",
"䠍": "qia",
"䠎": "wo",
"䠏": "ji",
"䠐": "qu",
"䠑": "kui",
"䠒": "hu",
"䠓": "cu",
"䠔": "sui",
"䠗": "qiu",
"䠘": "pi",
"䠙": "bei",
"䠚": "wa",
"䠛": "jiao",
"䠜": "rong",
"䠞": "cu",
"䠟": "die",
"䠠": "chi",
"䠡": "cuo",
"䠢": "meng",
"䠣": "xuan",
"䠤": "duo",
"䠥": "bie",
"䠦": "zhe",
"䠧": "chu",
"䠨": "chan",
"䠩": "gui",
"䠪": "duan",
"䠫": "zou",
"䠬": "deng",
"䠭": "lai",
"䠮": "teng",
"䠯": "yue",
"䠰": "quan",
"䠱": "shu",
"䠲": "ling",
"䠴": "qin",
"䠵": "fu",
"䠶": "she",
"䠷": "tiao",
"䠹": "hai",
"䠻": "qiong",
"䠼": "diao",
"䠽": "hai",
"䠾": "shan",
"䠿": "wai",
"䡀": "zhan",
"䡁": "long",
"䡂": "jiu",
"䡃": "li",
"䡅": "min",
"䡆": "rong",
"䡇": "yue",
"䡈": "jue",
"䡉": "kang",
"䡊": "fan",
"䡋": "qi",
"䡌": "hong",
"䡍": "fu",
"䡎": "lu",
"䡏": "hong",
"䡐": "tuo",
"䡑": "min",
"䡒": "tian",
"䡓": "juan",
"䡔": "qi",
"䡕": "zheng",
"䡖": "jing",
"䡗": "gong",
"䡘": "tian",
"䡙": "lang",
"䡚": "mao",
"䡛": "yin",
"䡜": "lu",
"䡝": "yun",
"䡞": "ju",
"䡟": "pi",
"䡡": "xie",
"䡢": "bian",
"䡣": "hun",
"䡥": "rong",
"䡦": "sang",
"䡧": "wu",
"䡨": "cha",
"䡩": "gu",
"䡪": "chan",
"䡫": "peng",
"䡬": "man",
"䡯": "shuang",
"䡰": "keng",
"䡱": "zhuan",
"䡲": "chan",
"䡴": "chuang",
"䡵": "sui",
"䡶": "bei",
"䡷": "kai",
"䡹": "zhi",
"䡺": "wei",
"䡻": "min",
"䡼": "ling",
"䡾": "nei",
"䡿": "ling",
"䢀": "qi",
"䢁": "yue",
"䢃": "yi",
"䢄": "xi",
"䢅": "chen",
"䢇": "rong",
"䢈": "chen",
"䢉": "nong",
"䢊": "you",
"䢋": "ji",
"䢌": "bo",
"䢍": "fang",
"䢐": "cu",
"䢑": "di",
"䢒": "jiao",
"䢓": "yu",
"䢔": "ge",
"䢕": "xu",
"䢖": "yu",
"䢗": "he",
"䢙": "bai",
"䢚": "gong",
"䢛": "jiong",
"䢝": "ya",
"䢞": "nu",
"䢟": "you",
"䢠": "song",
"䢡": "xie",
"䢢": "cang",
"䢣": "yao",
"䢤": "shu",
"䢥": "yan",
"䢦": "shuai",
"䢧": "liao",
"䢩": "yu",
"䢪": "bo",
"䢫": "sui",
"䢭": "yan",
"䢮": "lei",
"䢯": "lin",
"䢰": "tai",
"䢱": "du",
"䢲": "yue",
"䢳": "ji",
"䢵": "yun",
"䢸": "ju",
"䢹": "ju",
"䢺": "chu",
"䢻": "chen",
"䢼": "gong",
"䢽": "xiang",
"䢾": "xian",
"䣀": "gui",
"䣁": "yu",
"䣂": "lei",
"䣄": "tu",
"䣅": "chen",
"䣆": "xing",
"䣇": "qiu",
"䣈": "hang",
"䣊": "dang",
"䣋": "cai",
"䣌": "di",
"䣍": "yan",
"䣑": "chan",
"䣓": "li",
"䣔": "suo",
"䣕": "ma",
"䣖": "ma",
"䣘": "tang",
"䣙": "pei",
"䣚": "lou",
"䣛": "qi",
"䣜": "cuo",
"䣝": "tu",
"䣞": "e",
"䣟": "can",
"䣠": "jie",
"䣡": "ti",
"䣢": "ji",
"䣣": "dang",
"䣤": "jiao",
"䣥": "bi",
"䣦": "lei",
"䣧": "yi",
"䣨": "chun",
"䣩": "chun",
"䣪": "po",
"䣫": "li",
"䣬": "zai",
"䣭": "tai",
"䣮": "po",
"䣯": "tian",
"䣰": "ju",
"䣱": "xu",
"䣲": "fan",
"䣴": "xu",
"䣵": "er",
"䣶": "huo",
"䣸": "ran",
"䣹": "fa",
"䣺": "juan",
"䣼": "liang",
"䣽": "ti",
"䣾": "mi",
"䤁": "cen",
"䤂": "mei",
"䤃": "yin",
"䤄": "mian",
"䤅": "tu",
"䤆": "kui",
"䤉": "mi",
"䤊": "rong",
"䤋": "guo",
"䤍": "mi",
"䤎": "ju",
"䤏": "pi",
"䤐": "jin",
"䤑": "wang",
"䤒": "ji",
"䤓": "meng",
"䤔": "jian",
"䤕": "xue",
"䤖": "bao",
"䤗": "gan",
"䤘": "chan",
"䤙": "li",
"䤚": "li",
"䤛": "qiu",
"䤜": "dun",
"䤝": "ying",
"䤞": "yun",
"䤟": "chen",
"䤠": "ji",
"䤡": "ran",
"䤥": "gui",
"䤦": "yue",
"䤧": "hui",
"䤨": "pi",
"䤩": "cha",
"䤪": "duo",
"䤫": "chan",
"䤭": "kuan",
"䤮": "she",
"䤯": "xing",
"䤰": "weng",
"䤱": "shi",
"䤲": "chi",
"䤳": "ye",
"䤴": "han",
"䤵": "fei",
"䤶": "ye",
"䤷": "yan",
"䤸": "zuan",
"䤺": "yin",
"䤻": "duo",
"䤼": "xian",
"䤿": "qie",
"䥀": "chan",
"䥁": "han",
"䥂": "meng",
"䥃": "yue",
"䥄": "cu",
"䥅": "qian",
"䥆": "jin",
"䥇": "shan",
"䥈": "mu",
"䥌": "zheng",
"䥍": "zhi",
"䥎": "chun",
"䥏": "yu",
"䥐": "mou",
"䥑": "wan",
"䥒": "jiang",
"䥔": "su",
"䥕": "pie",
"䥖": "tian",
"䥗": "kuan",
"䥘": "cu",
"䥙": "sui",
"䥛": "jie",
"䥜": "jian",
"䥝": "ao",
"䥞": "jiao",
"䥟": "ye",
"䥡": "ye",
"䥢": "long",
"䥣": "zao",
"䥤": "bao",
"䥥": "lian",
"䥧": "huan",
"䥩": "wei",
"䥪": "xian",
"䥫": "tie",
"䥬": "bo",
"䥭": "zheng",
"䥮": "zhu",
"䥯": "ba",
"䥰": "meng",
"䥱": "xie",
"䥴": "juan",
"䥵": "xiao",
"䥶": "li",
"䥷": "zha",
"䥸": "mi",
"䥺": "ye",
"䥾": "xie",
"䦂": "shan",
"䦅": "shan",
"䦆": "jue",
"䦇": "ji",
"䦈": "fang",
"䦊": "niao",
"䦋": "ao",
"䦌": "chu",
"䦍": "wu",
"䦎": "guan",
"䦏": "xie",
"䦐": "ting",
"䦑": "xie",
"䦒": "dang",
"䦔": "tan",
"䦖": "xia",
"䦗": "xu",
"䦘": "bi",
"䦙": "si",
"䦚": "huo",
"䦛": "zheng",
"䦜": "wu",
"䦞": "run",
"䦟": "chuai",
"䦠": "shi",
"䦡": "huan",
"䦢": "kuo",
"䦣": "fu",
"䦤": "chuai",
"䦥": "xian",
"䦦": "qin",
"䦧": "qie",
"䦨": "lan",
"䦩": "wen",
"䦪": "ya",
"䦬": "que",
"䦮": "chun",
"䦯": "zhi",
"䦱": "kui",
"䦲": "qian",
"䦳": "hang",
"䦴": "yi",
"䦵": "ni",
"䦶": "zheng",
"䦷": "chuai",
"䦹": "shi",
"䦺": "ding",
"䦻": "ci",
"䦼": "jue",
"䦽": "xu",
"䦾": "yun",
"䧁": "chu",
"䧂": "dao",
"䧃": "dian",
"䧄": "ge",
"䧅": "ti",
"䧆": "hong",
"䧇": "ni",
"䧉": "li",
"䧊": "ku",
"䧋": "xian",
"䧍": "xi",
"䧎": "xuan",
"䧒": "lai",
"䧔": "mu",
"䧕": "cheng",
"䧖": "jian",
"䧗": "bi",
"䧘": "qi",
"䧙": "ling",
"䧚": "hao",
"䧛": "bang",
"䧜": "tang",
"䧝": "chi",
"䧞": "fu",
"䧟": "xian",
"䧠": "shuan",
"䧢": "qu",
"䧤": "pu",
"䧥": "hui",
"䧦": "wei",
"䧧": "yi",
"䧨": "ye",
"䧪": "che",
"䧫": "hao",
"䧮": "xian",
"䧯": "chan",
"䧰": "hun",
"䧲": "han",
"䧳": "ci",
"䧴": "zhi",
"䧵": "qi",
"䧶": "kui",
"䧷": "rou",
"䧺": "xiong",
"䧼": "hu",
"䧽": "cui",
"䧿": "que",
"䨀": "di",
"䨁": "che",
"䨄": "yan",
"䨅": "liao",
"䨆": "bi",
"䨊": "yuan",
"䨌": "bao",
"䨍": "ying",
"䨎": "hong",
"䨏": "ci",
"䨐": "qia",
"䨑": "ti",
"䨒": "yu",
"䨓": "lei",
"䨔": "bao",
"䨖": "ji",
"䨗": "fu",
"䨘": "xian",
"䨙": "cen",
"䨛": "se",
"䨞": "yu",
"䨠": "ai",
"䨡": "han",
"䨢": "dan",
"䨣": "ge",
"䨤": "di",
"䨥": "hu",
"䨦": "pang",
"䨩": "ling",
"䨪": "mai",
"䨫": "mai",
"䨬": "lian",
"䨮": "xue",
"䨯": "zhen",
"䨰": "po",
"䨱": "fu",
"䨲": "nou",
"䨳": "xi",
"䨴": "dui",
"䨵": "dan",
"䨶": "yun",
"䨷": "xian",
"䨸": "yin",
"䨺": "dui",
"䨻": "beng",
"䨼": "hu",
"䨽": "fei",
"䨾": "fei",
"䨿": "qian",
"䩀": "bei",
"䩁": "fei",
"䩃": "shi",
"䩄": "tian",
"䩅": "zhan",
"䩆": "jian",
"䩈": "hui",
"䩉": "fu",
"䩊": "wan",
"䩋": "mo",
"䩌": "qiao",
"䩍": "liao",
"䩏": "mie",
"䩐": "ge",
"䩑": "hong",
"䩒": "yu",
"䩓": "qi",
"䩔": "duo",
"䩕": "ang",
"䩗": "ba",
"䩘": "di",
"䩙": "xuan",
"䩚": "di",
"䩛": "bi",
"䩜": "zhou",
"䩝": "pao",
"䩞": "nian",
"䩟": "yi",
"䩡": "jia",
"䩢": "da",
"䩣": "duo",
"䩤": "xi",
"䩥": "dan",
"䩦": "tiao",
"䩧": "xie",
"䩨": "chang",
"䩩": "yuan",
"䩪": "guan",
"䩫": "liang",
"䩬": "beng",
"䩮": "lu",
"䩯": "ji",
"䩰": "xuan",
"䩱": "shu",
"䩳": "shu",
"䩴": "hu",
"䩵": "yun",
"䩶": "chan",
"䩸": "rong",
"䩹": "e",
"䩻": "ba",
"䩼": "feng",
"䩾": "zhe",
"䩿": "fen",
"䪀": "guan",
"䪁": "bu",
"䪂": "ge",
"䪄": "huang",
"䪅": "du",
"䪆": "ti",
"䪇": "bo",
"䪈": "qian",
"䪉": "la",
"䪊": "long",
"䪋": "wei",
"䪌": "zhan",
"䪍": "lan",
"䪎": "sui",
"䪏": "na",
"䪐": "bi",
"䪑": "tuo",
"䪒": "jiao",
"䪔": "bu",
"䪕": "ju",
"䪖": "po",
"䪗": "xia",
"䪘": "wei",
"䪙": "fu",
"䪚": "he",
"䪛": "fan",
"䪜": "chan",
"䪝": "hu",
"䪞": "za",
"䪢": "ji",
"䪤": "fan",
"䪥": "xie",
"䪦": "hong",
"䪧": "chi",
"䪨": "bao",
"䪩": "yin",
"䪬": "bo",
"䪭": "ruan",
"䪮": "chou",
"䪯": "ying",
"䪱": "gai",
"䪳": "yun",
"䪴": "zhen",
"䪵": "ya",
"䪷": "hou",
"䪸": "min",
"䪹": "pei",
"䪺": "ge",
"䪻": "bian",
"䪼": "zhuo",
"䪽": "hao",
"䪾": "mi",
"䪿": "sheng",
"䫀": "gen",
"䫁": "bi",
"䫂": "duo",
"䫃": "chun",
"䫄": "chua",
"䫅": "san",
"䫆": "cheng",
"䫇": "ran",
"䫈": "zen",
"䫉": "mao",
"䫊": "bo",
"䫋": "tui",
"䫌": "pi",
"䫍": "fu",
"䫏": "qi",
"䫐": "lin",
"䫒": "men",
"䫓": "wu",
"䫔": "qi",
"䫕": "zhi",
"䫖": "chen",
"䫗": "xia",
"䫘": "he",
"䫙": "sang",
"䫛": "hou",
"䫝": "fu",
"䫞": "rao",
"䫟": "hun",
"䫠": "pei",
"䫡": "qian",
"䫣": "xi",
"䫤": "ming",
"䫥": "kui",
"䫦": "ge",
"䫨": "ao",
"䫩": "san",
"䫪": "shuang",
"䫫": "lou",
"䫬": "zhen",
"䫭": "hui",
"䫮": "can",
"䫰": "lin",
"䫱": "na",
"䫲": "han",
"䫳": "du",
"䫴": "jin",
"䫵": "mian",
"䫶": "fan",
"䫷": "e",
"䫸": "nao",
"䫹": "hong",
"䫺": "hong",
"䫻": "xue",
"䫼": "xue",
"䫾": "bi",
"䬀": "you",
"䬁": "yi",
"䬂": "xue",
"䬃": "sa",
"䬄": "yu",
"䬅": "li",
"䬆": "li",
"䬇": "yuan",
"䬈": "dui",
"䬉": "hao",
"䬊": "qie",
"䬋": "leng",
"䬍": "hu",
"䬎": "guo",
"䬏": "bu",
"䬐": "wei",
"䬑": "wei",
"䬒": "sou",
"䬓": "an",
"䬔": "xu",
"䬕": "shang",
"䬖": "heng",
"䬗": "yang",
"䬙": "yao",
"䬛": "bi",
"䬜": "chi",
"䬝": "heng",
"䬞": "tao",
"䬟": "liu",
"䬡": "zhu",
"䬢": "tao",
"䬣": "qi",
"䬤": "chao",
"䬥": "yi",
"䬦": "dou",
"䬧": "yuan",
"䬨": "cu",
"䬪": "bo",
"䬫": "can",
"䬬": "yang",
"䬮": "yi",
"䬯": "nian",
"䬰": "shao",
"䬱": "ben",
"䬳": "ban",
"䬴": "mo",
"䬵": "ai",
"䬶": "en",
"䬷": "she",
"䬹": "zhi",
"䬺": "yang",
"䬻": "jian",
"䬼": "yuan",
"䬽": "dui",
"䬾": "ti",
"䬿": "wei",
"䭀": "xun",
"䭁": "zhi",
"䭂": "yi",
"䭃": "ren",
"䭄": "shi",
"䭅": "hu",
"䭆": "ne",
"䭇": "yi",
"䭈": "jian",
"䭉": "sui",
"䭊": "ying",
"䭋": "bao",
"䭌": "hu",
"䭍": "hu",
"䭎": "xie",
"䭐": "yang",
"䭑": "lian",
"䭓": "en",
"䭕": "jian",
"䭖": "zhu",
"䭗": "ying",
"䭘": "yan",
"䭙": "jin",
"䭚": "chuang",
"䭛": "dan",
"䭝": "kuai",
"䭞": "yi",
"䭟": "ye",
"䭠": "jian",
"䭡": "en",
"䭢": "ning",
"䭣": "ci",
"䭤": "qian",
"䭥": "xue",
"䭦": "bo",
"䭧": "mi",
"䭨": "shui",
"䭩": "mi",
"䭪": "liang",
"䭫": "qi",
"䭬": "qi",
"䭭": "shou",
"䭮": "bi",
"䭯": "bo",
"䭰": "beng",
"䭱": "bie",
"䭲": "ni",
"䭳": "wei",
"䭴": "huan",
"䭵": "fan",
"䭶": "qi",
"䭷": "liu",
"䭸": "fu",
"䭹": "ang",
"䭺": "ang",
"䭼": "qi",
"䭽": "qun",
"䭾": "tuo",
"䭿": "yi",
"䮀": "bo",
"䮁": "pian",
"䮂": "bo",
"䮄": "xuan",
"䮇": "yu",
"䮈": "chi",
"䮉": "lu",
"䮊": "yi",
"䮋": "li",
"䮍": "niao",
"䮎": "xi",
"䮏": "wu",
"䮑": "lei",
"䮓": "zhao",
"䮔": "zui",
"䮕": "chuo",
"䮗": "an",
"䮘": "er",
"䮙": "yu",
"䮚": "leng",
"䮛": "fu",
"䮜": "sha",
"䮝": "huan",
"䮞": "chu",
"䮟": "sou",
"䮡": "bi",
"䮢": "die",
"䮤": "di",
"䮥": "li",
"䮧": "han",
"䮨": "zai",
"䮩": "gu",
"䮪": "cheng",
"䮫": "lou",
"䮬": "mo",
"䮭": "mi",
"䮮": "mai",
"䮯": "ao",
"䮰": "zhe",
"䮱": "zhu",
"䮲": "huang",
"䮳": "fan",
"䮴": "deng",
"䮵": "tong",
"䮷": "du",
"䮸": "hu",
"䮹": "wei",
"䮺": "ji",
"䮻": "chi",
"䮼": "lin",
"䮾": "pang",
"䮿": "jian",
"䯀": "nie",
"䯁": "luo",
"䯂": "ji",
"䯅": "nie",
"䯆": "yi",
"䯈": "wan",
"䯉": "ya",
"䯊": "qia",
"䯋": "bo",
"䯌": "kao",
"䯍": "ling",
"䯎": "gan",
"䯏": "huo",
"䯐": "hai",
"䯒": "heng",
"䯓": "kui",
"䯔": "cen",
"䯖": "lang",
"䯗": "bi",
"䯘": "huan",
"䯙": "po",
"䯚": "ou",
"䯛": "jian",
"䯜": "ti",
"䯝": "sui",
"䯞": "kua",
"䯟": "dui",
"䯠": "ao",
"䯡": "jian",
"䯢": "mo",
"䯣": "gui",
"䯤": "kuai",
"䯥": "an",
"䯦": "ma",
"䯧": "qing",
"䯨": "fen",
"䯪": "kao",
"䯫": "hao",
"䯬": "duo",
"䯮": "nai",
"䯰": "jie",
"䯱": "fu",
"䯲": "pa",
"䯴": "chang",
"䯵": "nie",
"䯶": "man",
"䯸": "ci",
"䯺": "kuo",
"䯼": "di",
"䯽": "fu",
"䯾": "tiao",
"䯿": "zu",
"䰀": "wo",
"䰁": "fei",
"䰂": "cai",
"䰃": "peng",
"䰄": "shi",
"䰆": "rou",
"䰇": "qi",
"䰈": "cha",
"䰉": "pan",
"䰊": "bo",
"䰋": "man",
"䰌": "zong",
"䰍": "ci",
"䰎": "gui",
"䰏": "ji",
"䰐": "lan",
"䰒": "meng",
"䰓": "mian",
"䰔": "pan",
"䰕": "lu",
"䰖": "cuan",
"䰗": "jiu",
"䰘": "liu",
"䰙": "yi",
"䰚": "wen",
"䰛": "li",
"䰜": "li",
"䰝": "zeng",
"䰞": "zhu",
"䰟": "hun",
"䰠": "shen",
"䰡": "chi",
"䰢": "xing",
"䰣": "wang",
"䰥": "huo",
"䰦": "pi",
"䰧": "hu",
"䰨": "mei",
"䰩": "che",
"䰪": "mei",
"䰫": "chao",
"䰬": "ju",
"䰭": "nou",
"䰯": "ni",
"䰰": "ru",
"䰱": "ling",
"䰲": "ya",
"䰴": "qi",
"䰷": "bang",
"䰹": "ze",
"䰺": "jie",
"䰻": "yu",
"䰼": "xin",
"䰽": "bei",
"䰾": "ba",
"䰿": "tuo",
"䱁": "qiao",
"䱂": "you",
"䱃": "di",
"䱄": "jie",
"䱅": "mo",
"䱆": "sheng",
"䱇": "shan",
"䱈": "qi",
"䱉": "shan",
"䱊": "mi",
"䱋": "dan",
"䱌": "yi",
"䱍": "geng",
"䱎": "geng",
"䱏": "tou",
"䱑": "xue",
"䱒": "yi",
"䱓": "ting",
"䱔": "tiao",
"䱕": "mou",
"䱖": "liu",
"䱘": "li",
"䱚": "lu",
"䱛": "huo",
"䱜": "cuo",
"䱝": "ba",
"䱞": "liu",
"䱟": "ju",
"䱠": "zhan",
"䱡": "ju",
"䱣": "zu",
"䱤": "xian",
"䱥": "zhi",
"䱨": "zhi",
"䱫": "la",
"䱬": "xu",
"䱭": "geng",
"䱮": "e",
"䱯": "mu",
"䱰": "zhong",
"䱱": "di",
"䱲": "yan",
"䱴": "geng",
"䱵": "weng",
"䱶": "lang",
"䱷": "yu",
"䱸": "qiu",
"䱹": "na",
"䱺": "hai",
"䱻": "hua",
"䱼": "zhan",
"䱾": "lou",
"䱿": "chan",
"䲀": "die",
"䲁": "wei",
"䲂": "xuan",
"䲃": "zao",
"䲄": "min",
"䲅": "gui",
"䲊": "tuo",
"䲋": "cen",
"䲌": "kuan",
"䲍": "teng",
"䲎": "nei",
"䲏": "lao",
"䲐": "lu",
"䲑": "yi",
"䲒": "xie",
"䲓": "yan",
"䲔": "qing",
"䲕": "pu",
"䲖": "chou",
"䲗": "xian",
"䲘": "guan",
"䲙": "jie",
"䲚": "lai",
"䲛": "meng",
"䲜": "ye",
"䲞": "li",
"䲟": "yin",
"䲡": "qiu",
"䲢": "teng",
"䲣": "yu",
"䲦": "cha",
"䲧": "du",
"䲨": "hong",
"䲩": "shi",
"䲪": "xi",
"䲬": "qi",
"䲮": "yuan",
"䲯": "ji",
"䲰": "yun",
"䲱": "fang",
"䲳": "hang",
"䲴": "zhen",
"䲵": "hu",
"䲸": "jie",
"䲹": "pei",
"䲺": "gan",
"䲻": "xuan",
"䲽": "dao",
"䲾": "qiao",
"䲿": "ci",
"䳀": "die",
"䳁": "ba",
"䳂": "tiao",
"䳃": "wan",
"䳄": "ci",
"䳅": "zhi",
"䳆": "bai",
"䳇": "wu",
"䳈": "bao",
"䳉": "dong",
"䳊": "ba",
"䳋": "tong",
"䳎": "jiu",
"䳏": "gui",
"䳐": "ci",
"䳑": "you",
"䳒": "yuan",
"䳓": "lao",
"䳔": "jiu",
"䳕": "fou",
"䳖": "nei",
"䳗": "e",
"䳘": "e",
"䳙": "xing",
"䳚": "he",
"䳛": "yan",
"䳜": "tu",
"䳝": "bu",
"䳞": "beng",
"䳟": "kou",
"䳠": "chui",
"䳢": "qi",
"䳣": "yuan",
"䳧": "hou",
"䳨": "huang",
"䳪": "juan",
"䳫": "kui",
"䳬": "e",
"䳭": "ji",
"䳮": "mo",
"䳯": "chong",
"䳰": "bao",
"䳱": "wu",
"䳲": "zhen",
"䳳": "xu",
"䳴": "da",
"䳵": "chi",
"䳷": "cong",
"䳸": "ma",
"䳹": "kou",
"䳺": "yan",
"䳻": "can",
"䳼": "ou",
"䳽": "he",
"䳿": "lan",
"䴀": "tong",
"䴁": "yu",
"䴂": "hang",
"䴃": "nao",
"䴄": "li",
"䴅": "fen",
"䴆": "pu",
"䴇": "ling",
"䴈": "ao",
"䴉": "xuan",
"䴊": "yi",
"䴋": "xuan",
"䴌": "meng",
"䴎": "lei",
"䴏": "yan",
"䴐": "bao",
"䴑": "die",
"䴒": "ling",
"䴓": "shi",
"䴔": "jiao",
"䴕": "lie",
"䴖": "jing",
"䴗": "ju",
"䴘": "ti",
"䴙": "pi",
"䴚": "gang",
"䴛": "jiao",
"䴜": "huai",
"䴝": "bu",
"䴞": "di",
"䴟": "huan",
"䴠": "yao",
"䴡": "li",
"䴢": "mi",
"䴥": "jia",
"䴦": "ren",
"䴩": "piao",
"䴪": "lu",
"䴫": "ling",
"䴬": "yi",
"䴭": "cai",
"䴮": "shan",
"䴰": "shu",
"䴱": "tuo",
"䴲": "mo",
"䴳": "he",
"䴴": "tie",
"䴵": "bing",
"䴶": "peng",
"䴷": "hun",
"䴹": "guo",
"䴺": "bu",
"䴻": "li",
"䴼": "chan",
"䴽": "bai",
"䴾": "cuo",
"䴿": "meng",
"䵀": "suo",
"䵁": "qiang",
"䵂": "zhi",
"䵃": "kuang",
"䵄": "bi",
"䵅": "ao",
"䵆": "meng",
"䵇": "xian",
"䵈": "ku",
"䵉": "tou",
"䵋": "wei",
"䵌": "xian",
"䵎": "tuan",
"䵏": "lao",
"䵐": "chan",
"䵑": "ni",
"䵒": "ni",
"䵓": "li",
"䵔": "dong",
"䵕": "ju",
"䵖": "jian",
"䵗": "fu",
"䵘": "sha",
"䵙": "zha",
"䵚": "tao",
"䵛": "jian",
"䵜": "nong",
"䵝": "ya",
"䵞": "jing",
"䵟": "gan",
"䵠": "di",
"䵡": "jian",
"䵢": "mei",
"䵣": "da",
"䵤": "jian",
"䵥": "she",
"䵦": "xie",
"䵧": "zai",
"䵨": "mang",
"䵩": "li",
"䵪": "gun",
"䵫": "yu",
"䵬": "ta",
"䵭": "zhe",
"䵮": "yang",
"䵯": "tuan",
"䵱": "he",
"䵲": "diao",
"䵳": "wei",
"䵴": "yun",
"䵵": "zha",
"䵶": "qu",
"䵷": "wa",
"䵹": "zhi",
"䵺": "ting",
"䵻": "gu",
"䵽": "ca",
"䵾": "fu",
"䵿": "tie",
"䶀": "ta",
"䶁": "ta",
"䶂": "zhuo",
"䶃": "han",
"䶄": "ping",
"䶅": "he",
"䶇": "zhou",
"䶈": "bo",
"䶉": "liu",
"䶌": "pao",
"䶍": "di",
"䶎": "sha",
"䶏": "ti",
"䶐": "wai",
"䶑": "ti",
"䶒": "qi",
"䶓": "ji",
"䶔": "chi",
"䶕": "pa",
"䶖": "jin",
"䶗": "qia",
"䶘": "li",
"䶙": "ju",
"䶚": "qu",
"䶛": "la",
"䶜": "gu",
"䶝": "qia",
"䶞": "qi",
"䶟": "xian",
"䶠": "jian",
"䶡": "shi",
"䶢": "xian",
"䶣": "ai",
"䶤": "hua",
"䶥": "ju",
"䶦": "ze",
"䶧": "yao",
"䶩": "ji",
"䶪": "cha",
"䶫": "kan",
"䶬": "jian",
"䶮": "yan",
"䶱": "tong",
"䶲": "nan",
"䶳": "yue",
"䶴": "chui",
"䶵": "chi",
"一": "yi",
"丁": "ding",
"丂": "kao",
"七": "qi",
"丄": "shang",
"丅": "xia",
"丆": "han",
"万": "wan",
"丈": "zhang",
"三": "san",
"上": "shang",
"下": "xia",
"丌": "qi",
"不": "bu",
"与": "yu",
"丏": "mian",
"丐": "gai",
"丑": "chou",
"丒": "chou",
"专": "zhuan",
"且": "qie",
"丕": "pi",
"世": "shi",
"丗": "shi",
"丘": "qiu",
"丙": "bing",
"业": "ye",
"丛": "cong",
"东": "dong",
"丝": "si",
"丞": "cheng",
"丟": "diu",
"丠": "qiu",
"両": "liang",
"丢": "diu",
"丣": "you",
"两": "liang",
"严": "yan",
"並": "bing",
"丧": "sang",
"丨": "gun",
"丩": "jiu",
"个": "ge",
"丫": "ya",
"丬": "pan",
"中": "zhong",
"丮": "ji",
"丯": "jie",
"丰": "feng",
"丱": "guan",
"串": "chuan",
"丳": "chan",
"临": "lin",
"丵": "zhuo",
"丶": "zhu",
"丷": "ba",
"丸": "wan",
"丹": "dan",
"为": "wei",
"主": "zhu",
"丼": "jing",
"丽": "li",
"举": "ju",
"丿": "pie",
"乀": "fu",
"乁": "yi",
"乂": "yi",
"乃": "nai",
"乄": "wu",
"久": "jiu",
"乆": "jiu",
"乇": "tuo",
"么": "me",
"义": "yi",
"乊": "yi",
"之": "zhi",
"乌": "wu",
"乍": "zha",
"乎": "hu",
"乏": "fa",
"乐": "le",
"乑": "yin",
"乒": "ping",
"乓": "pang",
"乔": "qiao",
"乕": "hu",
"乖": "guai",
"乗": "cheng",
"乘": "cheng",
"乙": "yi",
"乚": "hao",
"乛": "yi",
"乜": "mie",
"九": "jiu",
"乞": "qi",
"也": "ye",
"习": "xi",
"乡": "xiang",
"乢": "gai",
"乣": "jiu",
"乤": "xia",
"书": "shu",
"乧": "dou",
"乨": "shi",
"乩": "ji",
"乪": "nang",
"乫": "jia",
"乬": "ju",
"乭": "shi",
"乮": "mao",
"乯": "hu",
"买": "mai",
"乱": "luan",
"乳": "ru",
"乴": "xue",
"乵": "yan",
"乶": "fu",
"乷": "sha",
"乸": "na",
"乹": "qian",
"乺": "suo",
"乻": "yu",
"乼": "zhu",
"乽": "zhe",
"乾": "qian",
"乿": "zhi",
"亀": "gui",
"亁": "qian",
"亂": "luan",
"亃": "lin",
"亄": "yi",
"亅": "jue",
"了": "liao",
"亇": "ge",
"予": "yu",
"争": "zheng",
"亊": "shi",
"事": "shi",
"二": "er",
"亍": "chu",
"于": "yu",
"亏": "kui",
"亐": "yu",
"云": "yun",
"互": "hu",
"亓": "qi",
"五": "wu",
"井": "jing",
"亖": "si",
"亗": "sui",
"亘": "gen",
"亙": "gen",
"亚": "ya",
"些": "xie",
"亜": "ya",
"亝": "qi",
"亞": "ya",
"亟": "ji",
"亠": "tou",
"亡": "wang",
"亢": "kang",
"亣": "da",
"交": "jiao",
"亥": "hai",
"亦": "yi",
"产": "chan",
"亨": "heng",
"亩": "mu",
"亪": "ye",
"享": "xiang",
"京": "jing",
"亭": "ting",
"亮": "liang",
"亯": "xiang",
"亰": "jing",
"亱": "ye",
"亲": "qin",
"亳": "bo",
"亴": "you",
"亵": "xie",
"亶": "dan",
"亷": "lian",
"亸": "duo",
"亹": "wei",
"人": "ren",
"亻": "ren",
"亼": "ji",
"亽": "ji",
"亾": "wang",
"亿": "yi",
"什": "shen",
"仁": "ren",
"仂": "le",
"仃": "ding",
"仄": "ze",
"仅": "jin",
"仆": "pu",
"仇": "chou",
"仈": "ba",
"仉": "zhang",
"今": "jin",
"介": "jie",
"仌": "bing",
"仍": "reng",
"从": "cong",
"仏": "fo",
"仐": "jin",
"仑": "lun",
"仒": "bing",
"仓": "cang",
"仔": "zi",
"仕": "shi",
"他": "ta",
"仗": "zhang",
"付": "fu",
"仙": "xian",
"仚": "xian",
"仛": "tuo",
"仜": "hong",
"仝": "tong",
"仞": "ren",
"仟": "qian",
"仠": "gan",
"仡": "yi",
"仢": "bo",
"代": "dai",
"令": "ling",
"以": "yi",
"仦": "chao",
"仧": "chang",
"仨": "sa",
"仩": "chang",
"仪": "yi",
"仫": "mu",
"们": "men",
"仭": "ren",
"仮": "fan",
"仯": "chao",
"仰": "yang",
"仱": "qian",
"仲": "zhong",
"仳": "pi",
"仴": "wo",
"仵": "wu",
"件": "jian",
"价": "jia",
"仸": "yao",
"仹": "feng",
"仺": "cang",
"任": "ren",
"仼": "wang",
"份": "fen",
"仾": "di",
"仿": "fang",
"伀": "zhong",
"企": "qi",
"伂": "pei",
"伃": "yu",
"伄": "diao",
"伅": "dun",
"伆": "wen",
"伇": "yi",
"伈": "xin",
"伉": "kang",
"伊": "yi",
"伋": "ji",
"伌": "ai",
"伍": "wu",
"伎": "ji",
"伏": "fu",
"伐": "fa",
"休": "xiu",
"伒": "jin",
"伓": "pi",
"伔": "dan",
"伕": "fu",
"伖": "tang",
"众": "zhong",
"优": "you",
"伙": "huo",
"会": "hui",
"伛": "yu",
"伜": "cui",
"伝": "yun",
"伞": "san",
"伟": "wei",
"传": "chuan",
"伡": "che",
"伢": "ya",
"伣": "qian",
"伤": "shang",
"伥": "chang",
"伦": "lun",
"伧": "cang",
"伨": "xun",
"伩": "xin",
"伪": "wei",
"伫": "zhu",
"伬": "chi",
"伭": "xian",
"伮": "nu",
"伯": "bo",
"估": "gu",
"伱": "ni",
"伲": "ni",
"伳": "xie",
"伴": "ban",
"伵": "xu",
"伶": "ling",
"伷": "zhou",
"伸": "shen",
"伹": "qu",
"伺": "si",
"伻": "beng",
"似": "si",
"伽": "qie",
"伾": "pi",
"伿": "yi",
"佀": "si",
"佁": "yi",
"佂": "zheng",
"佃": "dian",
"佄": "han",
"佅": "mai",
"但": "dan",
"佇": "zhu",
"佈": "bu",
"佉": "qu",
"佊": "bi",
"佋": "zhao",
"佌": "ci",
"位": "wei",
"低": "di",
"住": "zhu",
"佐": "zuo",
"佑": "you",
"佒": "yang",
"体": "ti",
"佔": "zhan",
"何": "he",
"佖": "bi",
"佗": "tuo",
"佘": "she",
"余": "yu",
"佚": "yi",
"佛": "fo",
"作": "zuo",
"佝": "gou",
"佞": "ning",
"佟": "tong",
"你": "ni",
"佡": "xian",
"佢": "qu",
"佣": "yong",
"佤": "wa",
"佥": "qian",
"佦": "you",
"佧": "ka",
"佨": "bao",
"佩": "pei",
"佪": "hui",
"佫": "ge",
"佬": "lao",
"佭": "xiang",
"佮": "ge",
"佯": "yang",
"佰": "bai",
"佱": "fa",
"佲": "ming",
"佳": "jia",
"佴": "er",
"併": "bing",
"佶": "ji",
"佷": "hen",
"佸": "huo",
"佹": "gui",
"佺": "quan",
"佻": "tiao",
"佼": "jiao",
"佽": "ci",
"佾": "yi",
"使": "shi",
"侀": "xing",
"侁": "shen",
"侂": "tuo",
"侃": "kan",
"侄": "zhi",
"侅": "gai",
"來": "lai",
"侇": "yi",
"侈": "chi",
"侉": "kua",
"侊": "gong",
"例": "li",
"侌": "yin",
"侍": "shi",
"侎": "mi",
"侏": "zhu",
"侐": "xu",
"侑": "you",
"侒": "an",
"侓": "lu",
"侔": "mou",
"侕": "er",
"侖": "lun",
"侗": "dong",
"侘": "cha",
"侙": "chi",
"侚": "xun",
"供": "gong",
"侜": "zhou",
"依": "yi",
"侞": "ru",
"侟": "cun",
"侠": "xia",
"価": "si",
"侢": "dai",
"侣": "lv",
"侤": "ta",
"侥": "jiao",
"侦": "zhen",
"侧": "ce",
"侨": "qiao",
"侩": "kuai",
"侪": "chai",
"侫": "ning",
"侬": "nong",
"侭": "jin",
"侮": "wu",
"侯": "hou",
"侰": "jiong",
"侱": "cheng",
"侲": "zhen",
"侳": "zuo",
"侴": "hao",
"侵": "qin",
"侶": "lv",
"侷": "ju",
"侸": "shu",
"侹": "ting",
"侺": "shen",
"侻": "tuo",
"侼": "bo",
"侽": "nan",
"侾": "xiao",
"便": "bian",
"俀": "tui",
"俁": "yu",
"係": "xi",
"促": "cu",
"俄": "e",
"俅": "qiu",
"俆": "xu",
"俇": "guang",
"俈": "ku",
"俉": "wu",
"俊": "jun",
"俋": "yi",
"俌": "fu",
"俍": "liang",
"俎": "zu",
"俏": "qiao",
"俐": "li",
"俑": "yong",
"俒": "hun",
"俓": "jing",
"俔": "qian",
"俕": "san",
"俖": "pei",
"俗": "su",
"俘": "fu",
"俙": "xi",
"俚": "li",
"俛": "fu",
"俜": "ping",
"保": "bao",
"俞": "yu",
"俟": "si",
"俠": "xia",
"信": "xin",
"俢": "xiu",
"俣": "yu",
"俤": "di",
"俥": "che",
"俦": "chou",
"俧": "zhi",
"俨": "yan",
"俩": "liang",
"俪": "li",
"俫": "lai",
"俬": "si",
"俭": "jian",
"修": "xiu",
"俯": "fu",
"俰": "huo",
"俱": "ju",
"俲": "xiao",
"俳": "pai",
"俴": "jian",
"俵": "biao",
"俶": "chu",
"俷": "fei",
"俸": "feng",
"俹": "ya",
"俺": "an",
"俻": "bei",
"俼": "yu",
"俽": "xin",
"俾": "bi",
"俿": "hu",
"倀": "chang",
"倁": "zhi",
"倂": "bing",
"倃": "jiu",
"倄": "yao",
"倅": "cui",
"倆": "liang",
"倇": "wan",
"倈": "lai",
"倉": "cang",
"倊": "zong",
"個": "ge",
"倌": "guan",
"倍": "bei",
"倎": "tian",
"倏": "shu",
"倐": "shu",
"們": "men",
"倒": "dao",
"倓": "tan",
"倔": "jue",
"倕": "chui",
"倖": "xing",
"倗": "peng",
"倘": "tang",
"候": "hou",
"倚": "yi",
"倛": "qi",
"倜": "ti",
"倝": "gan",
"倞": "liang",
"借": "jie",
"倠": "sui",
"倡": "chang",
"倢": "jie",
"倣": "fang",
"値": "zhi",
"倥": "kong",
"倦": "juan",
"倧": "zong",
"倨": "ju",
"倩": "qian",
"倪": "ni",
"倫": "lun",
"倬": "zhuo",
"倭": "wo",
"倮": "luo",
"倯": "song",
"倰": "leng",
"倱": "hun",
"倲": "dong",
"倳": "zi",
"倴": "ben",
"倵": "wu",
"倶": "ju",
"倷": "nai",
"倸": "cai",
"倹": "jian",
"债": "zhai",
"倻": "ye",
"值": "zhi",
"倽": "sha",
"倾": "qing",
"倿": "ning",
"偀": "ying",
"偁": "cheng",
"偂": "qian",
"偃": "yan",
"偄": "ruan",
"偅": "zhong",
"偆": "chun",
"假": "jia",
"偈": "ji",
"偉": "wei",
"偊": "yu",
"偋": "bing",
"偌": "ruo",
"偍": "ti",
"偎": "wei",
"偏": "pian",
"偐": "yan",
"偑": "feng",
"偒": "tang",
"偓": "wo",
"偔": "e",
"偕": "xie",
"偖": "che",
"偗": "sheng",
"偘": "kan",
"偙": "di",
"做": "zuo",
"偛": "cha",
"停": "ting",
"偝": "bei",
"偞": "xie",
"偟": "huang",
"偠": "yao",
"偡": "zhan",
"偢": "chou",
"偣": "an",
"偤": "you",
"健": "jian",
"偦": "xu",
"偧": "zha",
"偨": "ci",
"偩": "fu",
"偪": "bi",
"偫": "zhi",
"偬": "zong",
"偭": "mian",
"偮": "ji",
"偯": "yi",
"偰": "xie",
"偱": "xun",
"偲": "cai",
"偳": "duan",
"側": "ce",
"偵": "zhen",
"偶": "ou",
"偷": "tou",
"偸": "tou",
"偹": "bei",
"偺": "zan",
"偻": "lv",
"偼": "jie",
"偽": "wei",
"偾": "fen",
"偿": "chang",
"傀": "kui",
"傁": "sou",
"傂": "zhi",
"傃": "su",
"傄": "xia",
"傅": "fu",
"傆": "yuan",
"傇": "rong",
"傈": "li",
"傉": "nu",
"傊": "yun",
"傋": "jiang",
"傌": "ma",
"傍": "bang",
"傎": "dian",
"傏": "tang",
"傐": "hao",
"傑": "jie",
"傒": "xi",
"傓": "shan",
"傔": "qian",
"傕": "que",
"傖": "cang",
"傗": "chu",
"傘": "san",
"備": "bei",
"傚": "xiao",
"傛": "rong",
"傜": "yao",
"傝": "ta",
"傞": "suo",
"傟": "yang",
"傠": "fa",
"傡": "bing",
"傢": "jia",
"傣": "dai",
"傤": "zai",
"傥": "tang",
"傦": "gu",
"傧": "bin",
"储": "chu",
"傩": "nuo",
"傪": "can",
"傫": "lei",
"催": "cui",
"傭": "yong",
"傮": "zao",
"傯": "zong",
"傰": "peng",
"傱": "song",
"傲": "ao",
"傳": "chuan",
"傴": "yu",
"債": "zhai",
"傶": "qi",
"傷": "shang",
"傸": "chuang",
"傹": "jing",
"傺": "chi",
"傻": "sha",
"傼": "han",
"傽": "zhang",
"傾": "qing",
"傿": "yan",
"僀": "di",
"僁": "xie",
"僂": "lv",
"僃": "bei",
"僄": "piao",
"僅": "jin",
"僆": "lian",
"僇": "lu",
"僈": "man",
"僉": "qian",
"僊": "xian",
"僋": "tan",
"僌": "ying",
"働": "dong",
"僎": "zhuan",
"像": "xiang",
"僐": "shan",
"僑": "qiao",
"僒": "jiong",
"僓": "tui",
"僔": "zun",
"僕": "pu",
"僖": "xi",
"僗": "lao",
"僘": "chang",
"僙": "guang",
"僚": "liao",
"僛": "qi",
"僜": "cheng",
"僝": "zhan",
"僞": "wei",
"僟": "ji",
"僠": "bo",
"僡": "hui",
"僢": "chuan",
"僣": "tie",
"僤": "dan",
"僥": "jiao",
"僦": "jiu",
"僧": "seng",
"僨": "fen",
"僩": "xian",
"僪": "yu",
"僫": "e",
"僬": "jiao",
"僭": "jian",
"僮": "tong",
"僯": "lin",
"僰": "bo",
"僱": "gu",
"僲": "xian",
"僳": "su",
"僴": "xian",
"僵": "jiang",
"僶": "min",
"僷": "ye",
"僸": "jin",
"價": "jia",
"僺": "qiao",
"僻": "pi",
"僼": "feng",
"僽": "zhou",
"僾": "ai",
"僿": "sai",
"儀": "yi",
"儁": "jun",
"儂": "nong",
"儃": "chan",
"億": "yi",
"儅": "dang",
"儆": "jing",
"儇": "xuan",
"儈": "kuai",
"儉": "jian",
"儊": "chu",
"儋": "dan",
"儌": "jiao",
"儍": "sha",
"儎": "zai",
"儏": "can",
"儐": "bin",
"儑": "an",
"儒": "ru",
"儓": "tai",
"儔": "chou",
"儕": "chai",
"儖": "lan",
"儗": "ni",
"儘": "jin",
"儙": "qian",
"儚": "meng",
"儛": "wu",
"儜": "ning",
"儝": "qiong",
"儞": "ni",
"償": "chang",
"儠": "lie",
"儡": "lei",
"儢": "lv",
"儣": "kuang",
"儤": "bao",
"儥": "yu",
"儦": "biao",
"儧": "zan",
"儨": "zhi",
"儩": "si",
"優": "you",
"儫": "hao",
"儬": "qing",
"儭": "chen",
"儮": "li",
"儯": "teng",
"儰": "wei",
"儱": "long",
"儲": "chu",
"儳": "chan",
"儴": "rang",
"儵": "shu",
"儶": "hui",
"儷": "li",
"儸": "luo",
"儹": "zan",
"儺": "nuo",
"儻": "tang",
"儼": "yan",
"儽": "lei",
"儾": "nang",
"儿": "er",
"兀": "wu",
"允": "yun",
"兂": "zan",
"元": "yuan",
"兄": "xiong",
"充": "chong",
"兆": "zhao",
"兇": "xiong",
"先": "xian",
"光": "guang",
"兊": "dui",
"克": "ke",
"兌": "dui",
"免": "mian",
"兎": "tu",
"兏": "chang",
"児": "er",
"兑": "dui",
"兒": "er",
"兓": "qin",
"兔": "tu",
"兕": "si",
"兖": "yan",
"兗": "yan",
"兘": "shi",
"党": "dang",
"兜": "dou",
"兟": "shen",
"兠": "dou",
"兢": "jing",
"兤": "huang",
"入": "ru",
"兦": "wang",
"內": "nei",
"全": "quan",
"兩": "liang",
"兪": "yu",
"八": "ba",
"公": "gong",
"六": "liu",
"兮": "xi",
"兯": "han",
"兰": "lan",
"共": "gong",
"兲": "tian",
"关": "guan",
"兴": "xing",
"兵": "bing",
"其": "qi",
"具": "ju",
"典": "dian",
"兹": "zi",
"养": "yang",
"兼": "jian",
"兽": "shou",
"兾": "ji",
"兿": "yi",
"冀": "ji",
"冁": "chan",
"冂": "jiong",
"冃": "mao",
"冄": "ran",
"内": "nei",
"円": "yuan",
"冇": "mao",
"冈": "gang",
"冉": "ran",
"冊": "ce",
"冋": "jiong",
"册": "ce",
"再": "zai",
"冎": "gua",
"冏": "jiong",
"冐": "mao",
"冑": "zhou",
"冒": "mao",
"冓": "gou",
"冔": "xu",
"冕": "mian",
"冖": "mi",
"冗": "rong",
"冘": "yin",
"写": "xie",
"冚": "kan",
"军": "jun",
"农": "nong",
"冝": "yi",
"冞": "mi",
"冟": "shi",
"冠": "guan",
"冡": "meng",
"冢": "zhong",
"冣": "zui",
"冤": "yuan",
"冥": "ming",
"冦": "kou",
"冧": "lin",
"冨": "fu",
"冩": "xie",
"冪": "mi",
"冫": "bing",
"冬": "dong",
"冭": "tai",
"冮": "gang",
"冯": "feng",
"冰": "bing",
"冱": "hu",
"冲": "chong",
"决": "jue",
"冴": "ya",
"况": "kuang",
"冶": "ye",
"冷": "leng",
"冸": "pan",
"冹": "fa",
"冺": "min",
"冻": "dong",
"冼": "xian",
"冽": "lie",
"冾": "qia",
"冿": "jian",
"净": "jing",
"凁": "sou",
"凂": "mei",
"凃": "tu",
"凄": "qi",
"凅": "gu",
"准": "zhun",
"凇": "song",
"凈": "jing",
"凉": "liang",
"凊": "qing",
"凋": "diao",
"凌": "ling",
"凍": "dong",
"凎": "gan",
"减": "jian",
"凐": "yin",
"凑": "cou",
"凒": "ai",
"凓": "li",
"凔": "cang",
"凕": "ming",
"凖": "zhun",
"凗": "cui",
"凘": "si",
"凙": "duo",
"凚": "jin",
"凛": "lin",
"凜": "lin",
"凝": "ning",
"凞": "xi",
"凟": "du",
"几": "ji",
"凡": "fan",
"凢": "fan",
"凣": "fan",
"凤": "feng",
"凥": "ju",
"処": "chu",
"凧": "zheng",
"凨": "feng",
"凩": "mu",
"凪": "zhi",
"凫": "fu",
"凬": "feng",
"凭": "ping",
"凮": "feng",
"凯": "kai",
"凰": "huang",
"凱": "kai",
"凲": "gan",
"凳": "deng",
"凴": "ping",
"凵": "kan",
"凶": "xiong",
"凷": "kuai",
"凸": "tu",
"凹": "ao",
"出": "chu",
"击": "ji",
"凼": "dang",
"函": "han",
"凾": "han",
"凿": "zao",
"刀": "dao",
"刁": "diao",
"刂": "dao",
"刃": "ren",
"刄": "ren",
"刅": "chuang",
"分": "fen",
"切": "qie",
"刈": "yi",
"刉": "ji",
"刊": "kan",
"刋": "qian",
"刌": "cun",
"刍": "chu",
"刎": "wen",
"刏": "ji",
"刐": "dan",
"刑": "xing",
"划": "hua",
"刓": "wan",
"刔": "jue",
"刕": "li",
"刖": "yue",
"列": "lie",
"刘": "liu",
"则": "ze",
"刚": "gang",
"创": "chuang",
"刜": "fu",
"初": "chu",
"刞": "qu",
"刟": "diao",
"删": "shan",
"刡": "min",
"刢": "ling",
"刣": "zhong",
"判": "pan",
"別": "bie",
"刦": "jie",
"刧": "jie",
"刨": "pao",
"利": "li",
"刪": "shan",
"别": "bie",
"刬": "chan",
"刭": "jing",
"刮": "gua",
"刯": "geng",
"到": "dao",
"刱": "chuang",
"刲": "kui",
"刳": "ku",
"刴": "duo",
"刵": "er",
"制": "zhi",
"刷": "shua",
"券": "quan",
"刹": "cha",
"刺": "ci",
"刻": "ke",
"刼": "jie",
"刽": "gui",
"刾": "ci",
"刿": "gui",
"剀": "kai",
"剁": "duo",
"剂": "ji",
"剃": "ti",
"剄": "jing",
"剅": "lou",
"剆": "luo",
"則": "ze",
"剈": "yuan",
"剉": "cuo",
"削": "xiao",
"剋": "kei",
"剌": "la",
"前": "qian",
"剎": "cha",
"剏": "chuang",
"剐": "gua",
"剑": "jian",
"剒": "cuo",
"剓": "li",
"剔": "ti",
"剕": "fei",
"剖": "pou",
"剗": "chan",
"剘": "qi",
"剙": "chuang",
"剚": "zi",
"剛": "gang",
"剜": "wan",
"剝": "bao",
"剞": "ji",
"剟": "duo",
"剠": "qing",
"剡": "yan",
"剢": "du",
"剣": "jian",
"剤": "ji",
"剥": "bao",
"剦": "yan",
"剧": "ju",
"剨": "huo",
"剩": "sheng",
"剪": "jian",
"剫": "duo",
"剬": "zhi",
"剭": "wu",
"剮": "gua",
"副": "fu",
"剰": "sheng",
"剱": "jian",
"割": "ge",
"剳": "da",
"剴": "kai",
"創": "chuang",
"剶": "chuan",
"剷": "chan",
"剸": "tuan",
"剹": "lu",
"剺": "li",
"剻": "peng",
"剼": "shan",
"剽": "piao",
"剾": "kou",
"剿": "jiao",
"劀": "gua",
"劁": "qiao",
"劂": "jue",
"劃": "hua",
"劄": "zha",
"劅": "zhuo",
"劆": "lian",
"劇": "ju",
"劈": "pi",
"劉": "liu",
"劊": "gui",
"劋": "jiao",
"劌": "gui",
"劍": "jian",
"劎": "jian",
"劏": "tang",
"劐": "huo",
"劑": "ji",
"劒": "jian",
"劓": "yi",
"劔": "jian",
"劕": "zhi",
"劖": "chan",
"劗": "zuan",
"劘": "mo",
"劙": "li",
"劚": "zhu",
"力": "li",
"劜": "ya",
"劝": "quan",
"办": "ban",
"功": "gong",
"加": "jia",
"务": "wu",
"劢": "mai",
"劣": "lie",
"劤": "jin",
"劥": "keng",
"劦": "xie",
"劧": "zhi",
"动": "dong",
"助": "zhu",
"努": "nu",
"劫": "jie",
"劬": "qu",
"劭": "shao",
"劮": "yi",
"劯": "zhu",
"劰": "miao",
"励": "li",
"劲": "jin",
"劳": "lao",
"労": "lao",
"劵": "juan",
"劶": "kou",
"劷": "yang",
"劸": "wa",
"効": "xiao",
"劺": "mou",
"劻": "kuang",
"劼": "jie",
"劽": "lie",
"劾": "he",
"势": "shi",
"勀": "ke",
"勁": "jin",
"勂": "gao",
"勃": "bo",
"勄": "min",
"勅": "chi",
"勆": "lang",
"勇": "yong",
"勈": "yong",
"勉": "mian",
"勊": "ke",
"勋": "xun",
"勌": "juan",
"勍": "qing",
"勎": "lu",
"勏": "bu",
"勐": "meng",
"勑": "chi",
"勒": "le",
"勓": "kai",
"勔": "mian",
"動": "dong",
"勖": "xu",
"勗": "xu",
"勘": "kan",
"務": "wu",
"勚": "yi",
"勛": "xun",
"勜": "weng",
"勝": "sheng",
"勞": "lao",
"募": "mu",
"勠": "lu",
"勡": "piao",
"勢": "shi",
"勣": "ji",
"勤": "qin",
"勥": "jiang",
"勦": "jiao",
"勧": "quan",
"勨": "xiang",
"勩": "yi",
"勪": "qiao",
"勫": "fan",
"勬": "juan",
"勭": "tong",
"勮": "ju",
"勯": "dan",
"勰": "xie",
"勱": "mai",
"勲": "xun",
"勳": "xun",
"勴": "lv",
"勵": "li",
"勶": "che",
"勷": "rang",
"勸": "quan",
"勹": "bao",
"勺": "shao",
"勻": "yun",
"勼": "jiu",
"勽": "bao",
"勾": "gou",
"勿": "wu",
"匀": "yun",
"匂": "xiong",
"匃": "gai",
"匄": "gai",
"包": "bao",
"匆": "cong",
"匇": "yi",
"匈": "xiong",
"匉": "peng",
"匊": "ju",
"匋": "tao",
"匌": "ge",
"匍": "pu",
"匎": "e",
"匏": "pao",
"匐": "fu",
"匑": "gong",
"匒": "da",
"匓": "jiu",
"匔": "gong",
"匕": "bi",
"化": "hua",
"北": "bei",
"匘": "nao",
"匙": "chi",
"匚": "fang",
"匛": "jiu",
"匜": "yi",
"匝": "za",
"匞": "jiang",
"匟": "kang",
"匠": "jiang",
"匡": "kuang",
"匢": "hu",
"匣": "xia",
"匤": "qu",
"匥": "fan",
"匦": "gui",
"匧": "qie",
"匨": "zang",
"匩": "kuang",
"匪": "fei",
"匫": "hu",
"匬": "yu",
"匭": "gui",
"匮": "kui",
"匯": "hui",
"匰": "dan",
"匱": "kui",
"匲": "lian",
"匳": "lian",
"匴": "suan",
"匵": "du",
"匶": "jiu",
"匷": "jue",
"匸": "xi",
"匹": "pi",
"区": "qu",
"医": "yi",
"匼": "ke",
"匽": "yan",
"匾": "bian",
"匿": "ni",
"區": "qu",
"十": "shi",
"卂": "xun",
"千": "qian",
"卄": "nian",
"卅": "sa",
"卆": "zu",
"升": "sheng",
"午": "wu",
"卉": "hui",
"半": "ban",
"卋": "shi",
"卌": "xi",
"卍": "wan",
"华": "hua",
"协": "xie",
"卐": "wan",
"卑": "bei",
"卒": "zu",
"卓": "zhuo",
"協": "xie",
"单": "dan",
"卖": "mai",
"南": "nan",
"単": "dan",
"卙": "ji",
"博": "bo",
"卛": "shuai",
"卜": "bu",
"卝": "guan",
"卞": "bian",
"卟": "bu",
"占": "zhan",
"卡": "qia",
"卢": "lu",
"卣": "you",
"卤": "lu",
"卥": "xi",
"卦": "gua",
"卧": "wo",
"卨": "xie",
"卩": "jie",
"卪": "jie",
"卫": "wei",
"卬": "yang",
"卭": "qiong",
"卮": "zhi",
"卯": "mao",
"印": "yin",
"危": "wei",
"卲": "shao",
"即": "ji",
"却": "que",
"卵": "luan",
"卶": "chi",
"卷": "juan",
"卸": "xie",
"卹": "xu",
"卺": "jin",
"卻": "que",
"卼": "wu",
"卽": "ji",
"卾": "e",
"卿": "qing",
"厀": "xi",
"厁": "san",
"厂": "chang",
"厃": "wei",
"厄": "e",
"厅": "ting",
"历": "li",
"厇": "zhe",
"厈": "han",
"厉": "li",
"厊": "ya",
"压": "ya",
"厌": "yan",
"厍": "she",
"厎": "di",
"厏": "zha",
"厐": "pang",
"厑": "ya",
"厒": "qie",
"厓": "ya",
"厔": "zhi",
"厕": "ce",
"厖": "mang",
"厗": "ti",
"厘": "li",
"厙": "she",
"厚": "hou",
"厛": "ting",
"厜": "zui",
"厝": "cuo",
"厞": "fei",
"原": "yuan",
"厠": "ce",
"厡": "yuan",
"厢": "xiang",
"厣": "yan",
"厤": "li",
"厥": "jue",
"厦": "sha",
"厧": "dian",
"厨": "chu",
"厩": "jiu",
"厪": "jin",
"厫": "ao",
"厬": "gui",
"厭": "yan",
"厮": "si",
"厯": "li",
"厰": "chang",
"厱": "qian",
"厲": "li",
"厳": "yan",
"厴": "yan",
"厵": "yuan",
"厶": "si",
"厷": "gong",
"厸": "lin",
"厹": "rou",
"厺": "qu",
"去": "qu",
"厽": "lei",
"厾": "du",
"县": "xian",
"叀": "zhuan",
"叁": "san",
"参": "can",
"參": "can",
"叄": "can",
"叅": "can",
"叆": "ai",
"叇": "dai",
"又": "you",
"叉": "cha",
"及": "ji",
"友": "you",
"双": "shuang",
"反": "fan",
"収": "shou",
"叏": "guai",
"叐": "ba",
"发": "fa",
"叒": "ruo",
"叓": "li",
"叔": "shu",
"叕": "zhuo",
"取": "qu",
"受": "shou",
"变": "bian",
"叙": "xu",
"叚": "jia",
"叛": "pan",
"叜": "sou",
"叝": "ji",
"叞": "wei",
"叟": "sou",
"叠": "die",
"叡": "rui",
"叢": "cong",
"口": "kou",
"古": "gu",
"句": "ju",
"另": "ling",
"叧": "gua",
"叨": "tao",
"叩": "kou",
"只": "zhi",
"叫": "jiao",
"召": "zhao",
"叭": "ba",
"叮": "ding",
"可": "ke",
"台": "tai",
"叱": "chi",
"史": "shi",
"右": "you",
"叴": "qiu",
"叵": "po",
"叶": "ye",
"号": "hao",
"司": "si",
"叹": "tan",
"叺": "chi",
"叻": "le",
"叼": "diao",
"叽": "ji",
"叿": "hong",
"吀": "mie",
"吁": "xu",
"吂": "mang",
"吃": "chi",
"各": "ge",
"吅": "xuan",
"吆": "yao",
"吇": "zi",
"合": "he",
"吉": "ji",
"吊": "diao",
"吋": "dou",
"同": "tong",
"名": "ming",
"后": "hou",
"吏": "li",
"吐": "tu",
"向": "xiang",
"吒": "zha",
"吓": "xia",
"吔": "ye",
"吕": "lv",
"吖": "ya",
"吗": "ma",
"吘": "ou",
"吙": "huo",
"吚": "yi",
"君": "jun",
"吜": "chou",
"吝": "lin",
"吞": "tun",
"吟": "yin",
"吠": "fei",
"吡": "pi",
"吢": "qin",
"吣": "qin",
"吤": "jie",
"吥": "bu",
"否": "fou",
"吧": "ba",
"吨": "dun",
"吩": "fen",
"吪": "e",
"含": "han",
"听": "ting",
"吭": "hang",
"吮": "shun",
"启": "qi",
"吰": "hong",
"吱": "zhi",
"吲": "yin",
"吳": "wu",
"吴": "wu",
"吵": "chao",
"吶": "na",
"吷": "xue",
"吸": "xi",
"吹": "chui",
"吺": "dou",
"吻": "wen",
"吼": "hou",
"吽": "hou",
"吾": "wu",
"吿": "gao",
"呀": "ya",
"呁": "jun",
"呂": "lv",
"呃": "e",
"呄": "ge",
"呅": "wen",
"呆": "dai",
"呇": "qi",
"呈": "cheng",
"呉": "wu",
"告": "gao",
"呋": "fu",
"呌": "jiao",
"呍": "hong",
"呎": "chi",
"呏": "sheng",
"呐": "na",
"呑": "tun",
"呒": "fu",
"呓": "yi",
"呔": "dai",
"呕": "ou",
"呖": "li",
"呗": "bei",
"员": "yuan",
"呙": "wai",
"呚": "hua",
"呛": "qiang",
"呜": "wu",
"呝": "e",
"呞": "shi",
"呟": "juan",
"呠": "pen",
"呡": "wen",
"呢": "ne",
"呣": "mou",
"呤": "ling",
"呥": "ran",
"呦": "you",
"呧": "di",
"周": "zhou",
"呩": "shi",
"呪": "zhou",
"呫": "tie",
"呬": "xi",
"呭": "yi",
"呮": "qi",
"呯": "ping",
"呰": "zi",
"呱": "gua",
"呲": "zi",
"味": "wei",
"呴": "xu",
"呵": "he",
"呶": "nao",
"呷": "xia",
"呸": "pei",
"呹": "yi",
"呺": "xiao",
"呻": "shen",
"呼": "hu",
"命": "ming",
"呾": "da",
"呿": "qu",
"咀": "ju",
"咁": "xian",
"咂": "za",
"咃": "tuo",
"咄": "duo",
"咅": "pou",
"咆": "pao",
"咇": "bi",
"咈": "fu",
"咉": "yang",
"咊": "he",
"咋": "za",
"和": "he",
"咍": "hai",
"咎": "jiu",
"咏": "yong",
"咐": "fu",
"咑": "da",
"咒": "zhou",
"咓": "wa",
"咔": "ka",
"咕": "gu",
"咖": "ka",
"咗": "zuo",
"咘": "bu",
"咙": "long",
"咚": "dong",
"咛": "ning",
"咜": "tuo",
"咝": "si",
"咞": "xian",
"咟": "huo",
"咠": "qi",
"咡": "er",
"咢": "e",
"咣": "guang",
"咤": "zha",
"咥": "die",
"咦": "yi",
"咧": "lie",
"咨": "zi",
"咩": "mie",
"咪": "mi",
"咫": "zhi",
"咬": "yao",
"咭": "ji",
"咮": "zhou",
"咯": "ka",
"咰": "shu",
"咱": "zan",
"咲": "xiao",
"咳": "ke",
"咴": "hui",
"咵": "kua",
"咶": "huai",
"咷": "tao",
"咸": "xian",
"咹": "e",
"咺": "xuan",
"咻": "xiu",
"咼": "wai",
"咽": "yan",
"咾": "lao",
"咿": "yi",
"哀": "ai",
"品": "pin",
"哂": "shen",
"哃": "tong",
"哄": "hong",
"哅": "xiong",
"哆": "duo",
"哇": "wa",
"哈": "ha",
"哉": "zai",
"哊": "you",
"哋": "die",
"哌": "pai",
"响": "xiang",
"哎": "ai",
"哏": "gen",
"哐": "kuang",
"哑": "ya",
"哒": "da",
"哓": "xiao",
"哔": "bi",
"哕": "yue",
"哖": "nian",
"哗": "hua",
"哘": "xing",
"哙": "kuai",
"哚": "duo",
"哜": "ji",
"哝": "nong",
"哞": "mou",
"哟": "yo",
"哠": "hao",
"員": "yuan",
"哢": "long",
"哣": "pou",
"哤": "mang",
"哥": "ge",
"哦": "o",
"哧": "chi",
"哨": "shao",
"哩": "li",
"哪": "na",
"哫": "zu",
"哬": "he",
"哭": "ku",
"哮": "xiao",
"哯": "xian",
"哰": "lao",
"哱": "po",
"哲": "zhe",
"哳": "zha",
"哴": "liang",
"哵": "ba",
"哶": "mie",
"哷": "lie",
"哸": "sui",
"哹": "fu",
"哺": "bu",
"哻": "han",
"哼": "heng",
"哽": "geng",
"哾": "chuo",
"哿": "ge",
"唀": "you",
"唁": "yan",
"唂": "gu",
"唃": "gu",
"唄": "bei",
"唅": "han",
"唆": "suo",
"唇": "chun",
"唈": "yi",
"唉": "ai",
"唊": "jia",
"唋": "tu",
"唌": "dan",
"唍": "wan",
"唎": "li",
"唏": "xi",
"唐": "tang",
"唑": "zuo",
"唒": "qiu",
"唓": "che",
"唔": "wu",
"唕": "zao",
"唖": "ya",
"唗": "dou",
"唘": "qi",
"唙": "di",
"唚": "qin",
"唛": "mai",
"唝": "gong",
"唞": "dou",
"唠": "lao",
"唡": "liang",
"唢": "suo",
"唣": "zao",
"唤": "huan",
"唥": "leng",
"唦": "sha",
"唧": "ji",
"唨": "zu",
"唩": "wo",
"唪": "feng",
"唫": "jin",
"唬": "hu",
"唭": "qi",
"售": "shou",
"唯": "wei",
"唰": "shua",
"唱": "chang",
"唲": "er",
"唳": "li",
"唴": "qiang",
"唵": "an",
"唶": "jie",
"唷": "yo",
"唸": "nian",
"唹": "yu",
"唺": "tian",
"唻": "lai",
"唼": "sha",
"唽": "xi",
"唾": "tuo",
"唿": "hu",
"啀": "ai",
"啁": "zhou",
"啂": "gou",
"啃": "ken",
"啄": "zhuo",
"啅": "zhuo",
"商": "shang",
"啇": "di",
"啈": "heng",
"啉": "lan",
"啊": "a",
"啋": "cai",
"啌": "qiang",
"啍": "zhun",
"啎": "wu",
"問": "wen",
"啐": "cui",
"啑": "sha",
"啒": "gu",
"啓": "qi",
"啔": "qi",
"啕": "tao",
"啖": "dan",
"啗": "dan",
"啘": "yue",
"啙": "zi",
"啚": "bi",
"啛": "cui",
"啜": "chuo",
"啝": "he",
"啞": "ya",
"啟": "qi",
"啠": "zhe",
"啡": "fei",
"啢": "liang",
"啣": "xian",
"啤": "pi",
"啥": "sha",
"啦": "la",
"啧": "ze",
"啨": "qing",
"啩": "gua",
"啪": "pa",
"啫": "zhe",
"啬": "se",
"啭": "zhuan",
"啮": "nie",
"啯": "guo",
"啰": "luo",
"啱": "yan",
"啲": "di",
"啳": "quan",
"啴": "tan",
"啵": "bo",
"啶": "ding",
"啷": "lang",
"啸": "xiao",
"啹": "ju",
"啺": "tang",
"啻": "chi",
"啼": "ti",
"啽": "an",
"啾": "jiu",
"啿": "dan",
"喀": "ka",
"喁": "yong",
"喂": "wei",
"喃": "nan",
"善": "shan",
"喅": "yu",
"喆": "zhe",
"喇": "la",
"喈": "jie",
"喉": "hou",
"喊": "han",
"喋": "die",
"喌": "zhou",
"喍": "chai",
"喎": "wai",
"喏": "nuo",
"喐": "huo",
"喑": "yin",
"喒": "zan",
"喓": "yao",
"喔": "o",
"喕": "mian",
"喖": "hu",
"喗": "yun",
"喘": "chuan",
"喙": "hui",
"喚": "huan",
"喛": "huan",
"喜": "xi",
"喝": "he",
"喞": "ji",
"喟": "kui",
"喠": "zhong",
"喡": "wei",
"喢": "sha",
"喣": "xu",
"喤": "huang",
"喥": "duo",
"喦": "yan",
"喧": "xuan",
"喨": "liang",
"喩": "yu",
"喪": "sang",
"喫": "chi",
"喬": "qiao",
"喭": "yan",
"單": "dan",
"喯": "pen",
"喰": "can",
"喱": "li",
"喲": "yo",
"喳": "zha",
"喴": "wei",
"喵": "miao",
"営": "ying",
"喷": "pen",
"喹": "kui",
"喺": "xi",
"喻": "yu",
"喼": "jie",
"喽": "lou",
"喾": "ku",
"喿": "zao",
"嗀": "hu",
"嗁": "ti",
"嗂": "yao",
"嗃": "he",
"嗄": "sha",
"嗅": "xiu",
"嗆": "qiang",
"嗇": "se",
"嗈": "yong",
"嗉": "su",
"嗊": "gong",
"嗋": "xie",
"嗌": "yi",
"嗍": "suo",
"嗎": "ma",
"嗏": "cha",
"嗐": "hai",
"嗑": "ke",
"嗒": "ta",
"嗓": "sang",
"嗔": "chen",
"嗕": "ru",
"嗖": "sou",
"嗗": "wa",
"嗘": "ji",
"嗙": "beng",
"嗚": "wu",
"嗛": "xian",
"嗜": "shi",
"嗝": "ge",
"嗞": "zi",
"嗟": "jie",
"嗠": "lao",
"嗡": "weng",
"嗢": "wa",
"嗣": "si",
"嗤": "chi",
"嗥": "hao",
"嗦": "suo",
"嗨": "hai",
"嗩": "suo",
"嗪": "qin",
"嗫": "nie",
"嗬": "he",
"嗭": "zi",
"嗮": "sai",
"嗯": "ňg",
"嗰": "ge",
"嗱": "na",
"嗲": "dia",
"嗳": "ai",
"嗴": "qiang",
"嗵": "tong",
"嗶": "bi",
"嗷": "ao",
"嗸": "ao",
"嗹": "lian",
"嗺": "zui",
"嗻": "zhe",
"嗼": "mo",
"嗽": "sou",
"嗾": "sou",
"嗿": "tan",
"嘀": "di",
"嘁": "qi",
"嘂": "jiao",
"嘃": "chong",
"嘄": "jiao",
"嘅": "kai",
"嘆": "tan",
"嘇": "shan",
"嘈": "cao",
"嘉": "jia",
"嘊": "ai",
"嘋": "xiao",
"嘌": "piao",
"嘍": "lou",
"嘎": "ga",
"嘏": "gu",
"嘐": "xiao",
"嘑": "hu",
"嘒": "hui",
"嘓": "guo",
"嘔": "ou",
"嘕": "xian",
"嘖": "ze",
"嘗": "chang",
"嘘": "xu",
"嘙": "po",
"嘚": "de",
"嘛": "ma",
"嘜": "ma",
"嘝": "hu",
"嘞": "lei",
"嘟": "du",
"嘠": "ga",
"嘡": "tang",
"嘢": "ye",
"嘣": "beng",
"嘤": "ying",
"嘥": "sai",
"嘦": "jiao",
"嘧": "mi",
"嘨": "xiao",
"嘩": "hua",
"嘪": "mai",
"嘫": "ran",
"嘬": "zuo",
"嘭": "peng",
"嘮": "lao",
"嘯": "xiao",
"嘰": "ji",
"嘱": "zhu",
"嘲": "chao",
"嘳": "kui",
"嘴": "zui",
"嘵": "xiao",
"嘶": "si",
"嘷": "hao",
"嘸": "fu",
"嘹": "liao",
"嘺": "qiao",
"嘻": "xi",
"嘼": "chu",
"嘽": "tan",
"嘾": "dan",
"嘿": "hei",
"噀": "xun",
"噁": "e",
"噂": "zun",
"噃": "fan",
"噄": "chi",
"噅": "hui",
"噆": "zan",
"噇": "chuang",
"噈": "cu",
"噉": "dan",
"噊": "jue",
"噋": "tun",
"噌": "ceng",
"噍": "jiao",
"噎": "ye",
"噏": "xi",
"噐": "qi",
"噑": "hao",
"噒": "lian",
"噓": "xu",
"噔": "deng",
"噕": "hui",
"噖": "yin",
"噗": "pu",
"噘": "jue",
"噙": "qin",
"噚": "xun",
"噛": "nie",
"噜": "lu",
"噝": "si",
"噞": "yan",
"噟": "ying",
"噠": "da",
"噡": "zhan",
"噢": "o",
"噣": "zhou",
"噤": "jin",
"噥": "nong",
"噦": "yue",
"噧": "xie",
"器": "qi",
"噩": "e",
"噪": "zao",
"噫": "yi",
"噬": "shi",
"噭": "jiao",
"噮": "yuan",
"噯": "ai",
"噰": "yong",
"噱": "jue",
"噲": "kuai",
"噳": "yu",
"噴": "pen",
"噵": "dao",
"噶": "ga",
"噷": "xin",
"噸": "dun",
"噹": "dang",
"噺": "xin",
"噻": "sai",
"噼": "pi",
"噽": "pi",
"噾": "yin",
"噿": "zui",
"嚀": "ning",
"嚁": "di",
"嚂": "lan",
"嚃": "ta",
"嚄": "huo",
"嚅": "ru",
"嚆": "hao",
"嚇": "he",
"嚈": "yan",
"嚉": "duo",
"嚊": "xiu",
"嚋": "zhou",
"嚌": "ji",
"嚍": "jin",
"嚎": "hao",
"嚏": "ti",
"嚐": "chang",
"嚑": "xun",
"嚒": "me",
"嚓": "ca",
"嚔": "ti",
"嚕": "lu",
"嚖": "hui",
"嚗": "bo",
"嚘": "you",
"嚙": "nie",
"嚚": "yin",
"嚛": "hu",
"嚜": "mei",
"嚝": "hong",
"嚞": "zhe",
"嚟": "li",
"嚠": "liu",
"嚡": "xie",
"嚢": "nang",
"嚣": "xiao",
"嚤": "mo",
"嚥": "yan",
"嚦": "li",
"嚧": "lu",
"嚨": "long",
"嚩": "po",
"嚪": "dan",
"嚫": "chen",
"嚬": "pin",
"嚭": "pi",
"嚮": "xiang",
"嚯": "huo",
"嚰": "me",
"嚱": "xi",
"嚲": "duo",
"嚳": "ku",
"嚴": "yan",
"嚵": "chan",
"嚶": "ying",
"嚷": "rang",
"嚸": "dian",
"嚹": "la",
"嚺": "ta",
"嚻": "xiao",
"嚼": "jiao",
"嚽": "chuo",
"嚾": "huan",
"嚿": "huo",
"囀": "zhuan",
"囁": "nie",
"囂": "xiao",
"囃": "za",
"囄": "li",
"囅": "chan",
"囆": "chai",
"囇": "li",
"囈": "yi",
"囉": "luo",
"囊": "nang",
"囋": "za",
"囌": "su",
"囍": "xi",
"囎": "zeng",
"囏": "jian",
"囐": "yan",
"囑": "zhu",
"囒": "lan",
"囓": "nie",
"囔": "nang",
"囖": "luo",
"囗": "wei",
"囘": "hui",
"囙": "yin",
"囚": "qiu",
"四": "si",
"囜": "nin",
"囝": "jian",
"回": "hui",
"囟": "xin",
"因": "yin",
"囡": "nan",
"团": "tuan",
"団": "tuan",
"囤": "dun",
"囥": "kang",
"囦": "yuan",
"囧": "jiong",
"囨": "pian",
"囩": "yun",
"囪": "cong",
"囫": "hu",
"囬": "hui",
"园": "yuan",
"囮": "e",
"囯": "guo",
"困": "kun",
"囱": "cong",
"囲": "wei",
"図": "tu",
"围": "wei",
"囵": "lun",
"囶": "guo",
"囷": "qun",
"囸": "ri",
"囹": "ling",
"固": "gu",
"囻": "guo",
"囼": "tai",
"国": "guo",
"图": "tu",
"囿": "you",
"圀": "guo",
"圁": "yin",
"圂": "hun",
"圃": "pu",
"圄": "yu",
"圅": "han",
"圆": "yuan",
"圇": "lun",
"圈": "quan",
"圉": "yu",
"圊": "qing",
"國": "guo",
"圌": "chuan",
"圍": "wei",
"圎": "yuan",
"圏": "quan",
"圐": "ku",
"圑": "pu",
"園": "yuan",
"圓": "yuan",
"圔": "ya",
"圕": "tuan",
"圖": "tu",
"圗": "tu",
"團": "tuan",
"圙": "lve",
"圚": "hui",
"圛": "yi",
"圜": "huan",
"圝": "luan",
"圞": "luan",
"土": "tu",
"圠": "ya",
"圡": "tu",
"圢": "ting",
"圣": "sheng",
"圤": "pu",
"圥": "lu",
"圦": "kuai",
"圧": "ya",
"在": "zai",
"圩": "wei",
"圪": "ge",
"圫": "yu",
"圬": "wu",
"圭": "gui",
"圮": "pi",
"圯": "yi",
"地": "di",
"圱": "qian",
"圲": "qian",
"圳": "zhen",
"圴": "zhuo",
"圵": "dang",
"圶": "qia",
"圷": "xia",
"圸": "shan",
"圹": "kuang",
"场": "chang",
"圻": "qi",
"圼": "nie",
"圽": "mo",
"圾": "ji",
"圿": "jia",
"址": "zhi",
"坁": "zhi",
"坂": "ban",
"坃": "xun",
"坄": "yi",
"坅": "qin",
"坆": "mei",
"均": "jun",
"坈": "rong",
"坉": "tun",
"坊": "fang",
"坋": "ben",
"坌": "ben",
"坍": "tan",
"坎": "kan",
"坏": "huai",
"坐": "zuo",
"坑": "keng",
"坒": "bi",
"坓": "jing",
"坔": "di",
"坕": "jing",
"坖": "ji",
"块": "kuai",
"坘": "di",
"坙": "jing",
"坚": "jian",
"坛": "tan",
"坜": "li",
"坝": "ba",
"坞": "wu",
"坟": "fen",
"坠": "zhui",
"坡": "po",
"坢": "pan",
"坣": "tang",
"坤": "kun",
"坥": "qu",
"坦": "tan",
"坧": "zhi",
"坨": "tuo",
"坩": "gan",
"坪": "ping",
"坫": "dian",
"坬": "gua",
"坭": "ni",
"坮": "tai",
"坯": "pi",
"坰": "jiong",
"坱": "yang",
"坲": "fo",
"坳": "ao",
"坴": "lu",
"坵": "qiu",
"坶": "mu",
"坷": "ke",
"坸": "gou",
"坹": "xue",
"坺": "fa",
"坻": "di",
"坼": "che",
"坽": "ling",
"坾": "zhu",
"坿": "fu",
"垀": "hu",
"垁": "zhi",
"垂": "chui",
"垃": "la",
"垄": "long",
"垅": "long",
"垆": "lu",
"垇": "ao",
"垈": "dai",
"垉": "pao",
"垊": "min",
"型": "xing",
"垌": "dong",
"垍": "ji",
"垎": "he",
"垏": "lv",
"垐": "ci",
"垑": "chi",
"垒": "lei",
"垓": "gai",
"垔": "yin",
"垕": "hou",
"垖": "dui",
"垗": "zhao",
"垘": "fu",
"垙": "guang",
"垚": "yao",
"垛": "duo",
"垜": "duo",
"垝": "gui",
"垞": "cha",
"垟": "yang",
"垠": "yin",
"垡": "fa",
"垢": "gou",
"垣": "yuan",
"垤": "die",
"垥": "xie",
"垦": "ken",
"垧": "shang",
"垨": "shou",
"垩": "e",
"垪": "bing",
"垫": "dian",
"垬": "hong",
"垭": "ya",
"垮": "kua",
"垯": "da",
"垰": "ka",
"垱": "dang",
"垲": "kai",
"垳": "hang",
"垴": "nao",
"垵": "an",
"垶": "xing",
"垷": "xian",
"垸": "yuan",
"垹": "bang",
"垺": "pou",
"垻": "ba",
"垼": "yi",
"垽": "yin",
"垾": "han",
"垿": "xu",
"埀": "chui",
"埁": "cen",
"埂": "geng",
"埃": "ai",
"埄": "beng",
"埅": "di",
"埆": "que",
"埇": "yong",
"埈": "jun",
"埉": "xia",
"埊": "di",
"埋": "mai",
"埌": "lang",
"埍": "juan",
"城": "cheng",
"埏": "yan",
"埐": "qin",
"埑": "zhe",
"埒": "lie",
"埓": "lie",
"埔": "pu",
"埕": "cheng",
"埖": "hua",
"埗": "bu",
"埘": "shi",
"埙": "xun",
"埚": "guo",
"埛": "jiong",
"埜": "ye",
"埝": "nian",
"埞": "di",
"域": "yu",
"埠": "bu",
"埡": "ya",
"埢": "quan",
"埣": "sui",
"埤": "pi",
"埥": "qing",
"埦": "wan",
"埧": "ju",
"埨": "lun",
"埩": "zheng",
"埪": "kong",
"埫": "chong",
"埬": "dong",
"埭": "dai",
"埮": "tan",
"埯": "an",
"埰": "cai",
"埱": "chu",
"埲": "beng",
"埳": "xian",
"埴": "zhi",
"埵": "duo",
"埶": "yi",
"執": "zhi",
"埸": "yi",
"培": "pei",
"基": "ji",
"埻": "zhun",
"埼": "qi",
"埽": "sao",
"埾": "ju",
"埿": "ni",
"堀": "ku",
"堁": "ke",
"堂": "tang",
"堃": "kun",
"堄": "ni",
"堅": "jian",
"堆": "dui",
"堇": "jin",
"堈": "gang",
"堉": "yu",
"堊": "e",
"堋": "peng",
"堌": "gu",
"堍": "tu",
"堎": "leng",
"堏": "fang",
"堐": "ya",
"堑": "qian",
"堒": "kun",
"堓": "an",
"堔": "shen",
"堕": "duo",
"堖": "nao",
"堗": "tu",
"堘": "cheng",
"堙": "yin",
"堚": "huan",
"堛": "bi",
"堜": "lian",
"堝": "guo",
"堞": "die",
"堟": "zhuan",
"堠": "hou",
"堡": "bao",
"堢": "bao",
"堣": "yu",
"堤": "di",
"堥": "mao",
"堦": "jie",
"堧": "ruan",
"堨": "e",
"堩": "geng",
"堪": "kan",
"堫": "zong",
"堬": "yu",
"堭": "huang",
"堮": "e",
"堯": "yao",
"堰": "yan",
"報": "bao",
"堲": "ji",
"堳": "mei",
"場": "chang",
"堵": "du",
"堶": "tuo",
"堷": "yin",
"堸": "feng",
"堹": "zhong",
"堺": "jie",
"堻": "jin",
"堼": "feng",
"堽": "gang",
"堾": "chuan",
"堿": "jian",
"塀": "ping",
"塁": "lei",
"塂": "jiang",
"塃": "huang",
"塄": "leng",
"塅": "duan",
"塆": "wan",
"塇": "xuan",
"塈": "ji",
"塉": "ji",
"塊": "kuai",
"塋": "ying",
"塌": "ta",
"塍": "cheng",
"塎": "yong",
"塏": "kai",
"塐": "su",
"塑": "su",
"塒": "shi",
"塓": "mi",
"塔": "ta",
"塕": "weng",
"塖": "cheng",
"塗": "tu",
"塘": "tang",
"塙": "que",
"塚": "zhong",
"塛": "li",
"塜": "peng",
"塝": "bang",
"塞": "sai",
"塟": "zang",
"塠": "dui",
"塡": "tian",
"塢": "wu",
"塣": "zheng",
"塤": "xun",
"塥": "ge",
"塦": "zhen",
"塧": "ai",
"塨": "gong",
"塩": "yan",
"塪": "xian",
"填": "tian",
"塬": "yuan",
"塭": "wen",
"塮": "xie",
"塯": "liu",
"塰": "hai",
"塱": "lang",
"塲": "chang",
"塳": "peng",
"塴": "beng",
"塵": "chen",
"塶": "lu",
"塷": "lu",
"塸": "ou",
"塹": "qian",
"塺": "mei",
"塻": "mo",
"塼": "zhuan",
"塽": "shuang",
"塾": "shu",
"塿": "lou",
"墀": "chi",
"墁": "man",
"墂": "biao",
"境": "jing",
"墄": "qi",
"墅": "shu",
"墆": "zhi",
"墇": "zhang",
"墈": "kan",
"墉": "yong",
"墊": "dian",
"墋": "chen",
"墌": "zhi",
"墍": "xi",
"墎": "guo",
"墏": "qiang",
"墐": "jin",
"墑": "di",
"墒": "shang",
"墓": "mu",
"墔": "cui",
"墕": "yan",
"墖": "ta",
"増": "zeng",
"墘": "qian",
"墙": "qiang",
"墚": "liang",
"墛": "wei",
"墜": "zhui",
"墝": "qiao",
"增": "zeng",
"墟": "xu",
"墠": "shan",
"墡": "shan",
"墢": "fa",
"墣": "pu",
"墤": "kuai",
"墥": "tuan",
"墦": "fan",
"墧": "qiao",
"墨": "mo",
"墩": "dun",
"墪": "dun",
"墫": "zun",
"墬": "di",
"墭": "sheng",
"墮": "duo",
"墯": "duo",
"墰": "tan",
"墱": "deng",
"墲": "wu",
"墳": "fen",
"墴": "huang",
"墵": "tan",
"墶": "da",
"墷": "ye",
"墸": "zhu",
"墹": "jian",
"墺": "ao",
"墻": "qiang",
"墼": "ji",
"墽": "qiao",
"墾": "ken",
"墿": "yi",
"壀": "pi",
"壁": "bi",
"壂": "dian",
"壃": "jiang",
"壄": "ye",
"壅": "yong",
"壆": "xue",
"壇": "tan",
"壈": "lan",
"壉": "ju",
"壊": "huai",
"壋": "dang",
"壌": "rang",
"壍": "qian",
"壎": "xun",
"壏": "xian",
"壐": "xi",
"壑": "he",
"壒": "ai",
"壓": "ya",
"壔": "dao",
"壕": "hao",
"壖": "ruan",
"壗": "jin",
"壘": "lei",
"壙": "kuang",
"壚": "lu",
"壛": "yan",
"壜": "tan",
"壝": "wei",
"壞": "huai",
"壟": "long",
"壠": "long",
"壡": "rui",
"壢": "li",
"壣": "lin",
"壤": "rang",
"壥": "chan",
"壦": "xun",
"壧": "yan",
"壨": "lei",
"壩": "ba",
"壪": "wan",
"士": "shi",
"壬": "ren",
"壭": "san",
"壮": "zhuang",
"壯": "zhuang",
"声": "sheng",
"壱": "yi",
"売": "mai",
"壳": "ke",
"壴": "zhu",
"壵": "zhuang",
"壶": "hu",
"壷": "hu",
"壸": "kun",
"壹": "yi",
"壺": "hu",
"壻": "xu",
"壼": "kun",
"壽": "shou",
"壾": "mang",
"壿": "cun",
"夀": "shou",
"夁": "yi",
"夂": "zhi",
"夃": "gu",
"处": "chu",
"夅": "jiang",
"夆": "feng",
"备": "bei",
"夈": "zhai",
"変": "bian",
"夊": "sui",
"夋": "qun",
"夌": "ling",
"复": "fu",
"夎": "cuo",
"夏": "xia",
"夐": "xiong",
"夑": "xie",
"夒": "nao",
"夓": "xia",
"夔": "kui",
"夕": "xi",
"外": "wai",
"夗": "yuan",
"夘": "mao",
"夙": "su",
"多": "duo",
"夛": "duo",
"夜": "ye",
"夝": "qing",
"够": "gou",
"夠": "gou",
"夡": "qi",
"夢": "meng",
"夣": "meng",
"夤": "yin",
"夥": "huo",
"夦": "chen",
"大": "da",
"夨": "ce",
"天": "tian",
"太": "tai",
"夫": "fu",
"夬": "guai",
"夭": "yao",
"央": "yang",
"夯": "hang",
"夰": "gao",
"失": "shi",
"夲": "tao",
"夳": "tai",
"头": "tou",
"夵": "yan",
"夶": "bi",
"夷": "yi",
"夸": "kua",
"夹": "jia",
"夺": "duo",
"夻": "hua",
"夼": "kuang",
"夽": "yun",
"夾": "jia",
"夿": "ba",
"奀": "en",
"奁": "lian",
"奂": "huan",
"奃": "di",
"奄": "yan",
"奅": "pao",
"奆": "juan",
"奇": "qi",
"奈": "nai",
"奉": "feng",
"奊": "xie",
"奋": "fen",
"奌": "dian",
"奍": "quan",
"奎": "kui",
"奏": "zou",
"奐": "huan",
"契": "qi",
"奒": "kai",
"奓": "she",
"奔": "ben",
"奕": "yi",
"奖": "jiang",
"套": "tao",
"奘": "zang",
"奙": "ben",
"奚": "xi",
"奛": "huang",
"奜": "fei",
"奝": "diao",
"奞": "xun",
"奟": "beng",
"奠": "dian",
"奡": "ao",
"奢": "she",
"奣": "weng",
"奤": "po",
"奥": "ao",
"奦": "wu",
"奧": "ao",
"奨": "jiang",
"奩": "lian",
"奪": "duo",
"奫": "yun",
"奬": "jiang",
"奭": "shi",
"奮": "fen",
"奯": "huo",
"奰": "bi",
"奱": "luan",
"奲": "duo",
"女": "nv",
"奴": "nu",
"奵": "ding",
"奶": "nai",
"奷": "qian",
"奸": "jian",
"她": "ta",
"奺": "jiu",
"奻": "nuan",
"奼": "cha",
"好": "hao",
"奾": "xian",
"奿": "fan",
"妀": "ji",
"妁": "shuo",
"如": "ru",
"妃": "fei",
"妄": "wang",
"妅": "hong",
"妆": "zhuang",
"妇": "fu",
"妈": "ma",
"妉": "dan",
"妊": "ren",
"妋": "fu",
"妌": "jing",
"妍": "yan",
"妎": "hai",
"妏": "wen",
"妐": "zhong",
"妑": "pa",
"妒": "du",
"妓": "ji",
"妔": "keng",
"妕": "zhong",
"妖": "yao",
"妗": "jin",
"妘": "yun",
"妙": "miao",
"妚": "fou",
"妛": "chi",
"妜": "yue",
"妝": "zhuang",
"妞": "niu",
"妟": "yan",
"妠": "na",
"妡": "xin",
"妢": "fen",
"妣": "bi",
"妤": "yu",
"妥": "tuo",
"妦": "feng",
"妧": "wan",
"妨": "fang",
"妩": "wu",
"妪": "yu",
"妫": "gui",
"妬": "du",
"妭": "ba",
"妮": "ni",
"妯": "zhou",
"妰": "zhuo",
"妱": "zhao",
"妲": "da",
"妳": "ni",
"妴": "yuan",
"妵": "tou",
"妶": "xian",
"妷": "zhi",
"妸": "e",
"妹": "mei",
"妺": "mo",
"妻": "qi",
"妼": "bi",
"妽": "shen",
"妾": "qie",
"妿": "e",
"姀": "he",
"姁": "xu",
"姂": "fa",
"姃": "zheng",
"姄": "min",
"姅": "ban",
"姆": "mu",
"姇": "fu",
"姈": "ling",
"姉": "zi",
"姊": "zi",
"始": "shi",
"姌": "ran",
"姍": "shan",
"姎": "yang",
"姏": "man",
"姐": "jie",
"姑": "gu",
"姒": "si",
"姓": "xing",
"委": "wei",
"姕": "zi",
"姖": "ju",
"姗": "shan",
"姘": "pin",
"姙": "ren",
"姚": "yao",
"姛": "dong",
"姜": "jiang",
"姝": "shu",
"姞": "ji",
"姟": "gai",
"姠": "xiang",
"姡": "hua",
"姢": "juan",
"姣": "jiao",
"姤": "gou",
"姥": "mu",
"姦": "jian",
"姧": "jian",
"姨": "yi",
"姩": "nian",
"姪": "zhi",
"姫": "zhen",
"姬": "ji",
"姭": "xian",
"姮": "heng",
"姯": "guang",
"姰": "jun",
"姱": "kua",
"姲": "yan",
"姳": "ming",
"姴": "lie",
"姵": "pei",
"姶": "e",
"姷": "you",
"姸": "yan",
"姹": "cha",
"姺": "shen",
"姻": "yin",
"姼": "shi",
"姽": "gui",
"姾": "quan",
"姿": "zi",
"娀": "song",
"威": "wei",
"娂": "hong",
"娃": "wa",
"娄": "lou",
"娅": "ya",
"娆": "rao",
"娇": "jiao",
"娈": "luan",
"娉": "ping",
"娊": "xian",
"娋": "shao",
"娌": "li",
"娍": "cheng",
"娎": "xie",
"娏": "mang",
"娐": "fu",
"娑": "suo",
"娒": "wu",
"娓": "wei",
"娔": "ke",
"娕": "chuo",
"娖": "chuo",
"娗": "ting",
"娘": "niang",
"娙": "xing",
"娚": "nan",
"娛": "yu",
"娜": "na",
"娝": "pou",
"娞": "nei",
"娟": "juan",
"娠": "shen",
"娡": "zhi",
"娢": "han",
"娣": "di",
"娤": "zhuang",
"娥": "e",
"娦": "pin",
"娧": "tui",
"娨": "man",
"娩": "mian",
"娪": "wu",
"娫": "yan",
"娬": "wu",
"娭": "xi",
"娮": "yan",
"娯": "yu",
"娰": "si",
"娱": "yu",
"娲": "wa",
"娳": "li",
"娴": "xian",
"娵": "ju",
"娶": "qu",
"娷": "zhui",
"娸": "qi",
"娹": "xian",
"娺": "zhuo",
"娻": "dong",
"娼": "chang",
"娽": "lu",
"娾": "ai",
"娿": "e",
"婀": "e",
"婁": "lou",
"婂": "mian",
"婃": "cong",
"婄": "pou",
"婅": "ju",
"婆": "po",
"婇": "cai",
"婈": "ling",
"婉": "wan",
"婊": "biao",
"婋": "xiao",
"婌": "shu",
"婍": "qi",
"婎": "hui",
"婏": "fu",
"婐": "wo",
"婑": "wo",
"婒": "tan",
"婓": "fei",
"婔": "fei",
"婕": "jie",
"婖": "tian",
"婗": "ni",
"婘": "quan",
"婙": "jing",
"婚": "hun",
"婛": "jing",
"婜": "qian",
"婝": "dian",
"婞": "xing",
"婟": "hu",
"婠": "wan",
"婡": "lai",
"婢": "bi",
"婣": "yin",
"婤": "zhou",
"婥": "chuo",
"婦": "fu",
"婧": "jing",
"婨": "lun",
"婩": "nve",
"婪": "lan",
"婫": "hun",
"婬": "yin",
"婭": "ya",
"婮": "ju",
"婯": "li",
"婰": "dian",
"婱": "xian",
"婲": "hua",
"婳": "hua",
"婴": "ying",
"婵": "chan",
"婶": "shen",
"婷": "ting",
"婸": "dang",
"婹": "yao",
"婺": "wu",
"婻": "nan",
"婼": "ruo",
"婽": "jia",
"婾": "tou",
"婿": "xu",
"媀": "yu",
"媁": "wei",
"媂": "di",
"媃": "rou",
"媄": "mei",
"媅": "dan",
"媆": "ruan",
"媇": "qin",
"媈": "hui",
"媉": "wo",
"媊": "qian",
"媋": "chun",
"媌": "miao",
"媍": "fu",
"媎": "jie",
"媏": "duan",
"媐": "yi",
"媑": "zhong",
"媒": "mei",
"媓": "huang",
"媔": "mian",
"媕": "an",
"媖": "ying",
"媗": "xuan",
"媘": "jie",
"媙": "wei",
"媚": "mei",
"媛": "yuan",
"媜": "zheng",
"媝": "qiu",
"媞": "ti",
"媟": "xie",
"媠": "tuo",
"媡": "lian",
"媢": "mao",
"媣": "ran",
"媤": "si",
"媥": "pian",
"媦": "wei",
"媧": "wa",
"媨": "cu",
"媩": "hu",
"媪": "ao",
"媫": "jie",
"媬": "bao",
"媭": "xu",
"媮": "tou",
"媯": "gui",
"媰": "chu",
"媱": "yao",
"媲": "pi",
"媳": "xi",
"媴": "yuan",
"媵": "ying",
"媶": "rong",
"媷": "ru",
"媸": "chi",
"媹": "liu",
"媺": "mei",
"媻": "pan",
"媼": "ao",
"媽": "ma",
"媾": "gou",
"媿": "kui",
"嫀": "qin",
"嫁": "jia",
"嫂": "sao",
"嫃": "zhen",
"嫄": "yuan",
"嫅": "jie",
"嫆": "rong",
"嫇": "ming",
"嫈": "ying",
"嫉": "ji",
"嫊": "su",
"嫋": "niao",
"嫌": "xian",
"嫍": "tao",
"嫎": "pang",
"嫏": "lang",
"嫐": "nao",
"嫑": "biao",
"嫒": "ai",
"嫓": "pi",
"嫔": "pin",
"嫕": "yi",
"嫖": "piao",
"嫗": "yu",
"嫘": "lei",
"嫙": "xuan",
"嫚": "man",
"嫛": "yi",
"嫜": "zhang",
"嫝": "kang",
"嫞": "yong",
"嫟": "ni",
"嫠": "li",
"嫡": "di",
"嫢": "gui",
"嫣": "yan",
"嫤": "jin",
"嫥": "zhuan",
"嫦": "chang",
"嫧": "ze",
"嫨": "han",
"嫩": "nen",
"嫪": "lao",
"嫫": "mo",
"嫬": "zhe",
"嫭": "hu",
"嫮": "hu",
"嫯": "ao",
"嫰": "nen",
"嫱": "qiang",
"嫲": "ma",
"嫳": "pie",
"嫴": "gu",
"嫵": "wu",
"嫶": "qiao",
"嫷": "tuo",
"嫸": "zhan",
"嫹": "miao",
"嫺": "xian",
"嫻": "xian",
"嫼": "mo",
"嫽": "liao",
"嫾": "lian",
"嫿": "hua",
"嬀": "gui",
"嬁": "deng",
"嬂": "zhi",
"嬃": "xu",
"嬄": "yi",
"嬅": "hua",
"嬆": "xi",
"嬇": "kui",
"嬈": "rao",
"嬉": "xi",
"嬊": "yan",
"嬋": "chan",
"嬌": "jiao",
"嬍": "mei",
"嬎": "fan",
"嬏": "fan",
"嬐": "xian",
"嬑": "yi",
"嬒": "hui",
"嬓": "jiao",
"嬔": "fu",
"嬕": "shi",
"嬖": "bi",
"嬗": "shan",
"嬘": "sui",
"嬙": "qiang",
"嬚": "lian",
"嬛": "huan",
"嬜": "xin",
"嬝": "niao",
"嬞": "dong",
"嬟": "yi",
"嬠": "can",
"嬡": "ai",
"嬢": "niang",
"嬣": "ning",
"嬤": "mo",
"嬥": "tiao",
"嬦": "chou",
"嬧": "jin",
"嬨": "ci",
"嬩": "yu",
"嬪": "pin",
"嬫": "rong",
"嬬": "ru",
"嬭": "nai",
"嬮": "yan",
"嬯": "tai",
"嬰": "ying",
"嬱": "qian",
"嬲": "niao",
"嬳": "yue",
"嬴": "ying",
"嬵": "mian",
"嬶": "bi",
"嬷": "mo",
"嬸": "shen",
"嬹": "xing",
"嬺": "ni",
"嬻": "du",
"嬼": "liu",
"嬽": "yuan",
"嬾": "lan",
"嬿": "yan",
"孀": "shuang",
"孁": "ling",
"孂": "jiao",
"孃": "niang",
"孄": "lan",
"孅": "xian",
"孆": "ying",
"孇": "shuang",
"孈": "xie",
"孉": "huan",
"孊": "mi",
"孋": "li",
"孌": "luan",
"孍": "yan",
"孎": "zhu",
"孏": "lan",
"子": "zi",
"孑": "jie",
"孒": "jue",
"孓": "jue",
"孔": "kong",
"孕": "yun",
"孖": "zi",
"字": "zi",
"存": "cun",
"孙": "sun",
"孚": "fu",
"孛": "bei",
"孜": "zi",
"孝": "xiao",
"孞": "xin",
"孟": "meng",
"孠": "si",
"孡": "tai",
"孢": "bao",
"季": "ji",
"孤": "gu",
"孥": "nu",
"学": "xue",
"孧": "you",
"孨": "zhuan",
"孩": "hai",
"孪": "luan",
"孫": "sun",
"孬": "nao",
"孭": "mie",
"孮": "cong",
"孯": "qian",
"孰": "shu",
"孱": "chan",
"孲": "ya",
"孳": "zi",
"孴": "ni",
"孵": "fu",
"孶": "zi",
"孷": "li",
"學": "xue",
"孹": "bo",
"孺": "ru",
"孻": "nai",
"孼": "nie",
"孽": "nie",
"孾": "ying",
"孿": "luan",
"宀": "mian",
"宁": "ning",
"宂": "rong",
"它": "ta",
"宄": "gui",
"宅": "zhai",
"宆": "qiong",
"宇": "yu",
"守": "shou",
"安": "an",
"宊": "tu",
"宋": "song",
"完": "wan",
"宍": "rou",
"宎": "yao",
"宏": "hong",
"宐": "yi",
"宑": "jing",
"宒": "zhun",
"宓": "mi",
"宔": "zhu",
"宕": "dang",
"宖": "hong",
"宗": "zong",
"官": "guan",
"宙": "zhou",
"定": "ding",
"宛": "wan",
"宜": "yi",
"宝": "bao",
"实": "shi",
"実": "shi",
"宠": "chong",
"审": "shen",
"客": "ke",
"宣": "xuan",
"室": "shi",
"宥": "you",
"宦": "huan",
"宧": "yi",
"宨": "tiao",
"宩": "shi",
"宪": "xian",
"宫": "gong",
"宬": "cheng",
"宭": "qun",
"宮": "gong",
"宯": "xiao",
"宰": "zai",
"宱": "zha",
"宲": "bao",
"害": "hai",
"宴": "yan",
"宵": "xiao",
"家": "jia",
"宷": "shen",
"宸": "chen",
"容": "rong",
"宺": "huang",
"宻": "mi",
"宼": "kou",
"宽": "kuan",
"宾": "bin",
"宿": "su",
"寀": "cai",
"寁": "zan",
"寂": "ji",
"寃": "yuan",
"寄": "ji",
"寅": "yin",
"密": "mi",
"寇": "kou",
"寈": "qing",
"寉": "he",
"寊": "zhen",
"寋": "jian",
"富": "fu",
"寍": "ning",
"寎": "bing",
"寏": "huan",
"寐": "mei",
"寑": "qin",
"寒": "han",
"寓": "yu",
"寔": "shi",
"寕": "ning",
"寖": "jin",
"寗": "ning",
"寘": "zhi",
"寙": "yu",
"寚": "bao",
"寛": "kuan",
"寜": "ning",
"寝": "qin",
"寞": "mo",
"察": "cha",
"寠": "ju",
"寡": "gua",
"寢": "qin",
"寣": "hu",
"寤": "wu",
"寥": "liao",
"實": "shi",
"寧": "ning",
"寨": "zhai",
"審": "shen",
"寪": "wei",
"寫": "xie",
"寬": "kuan",
"寭": "hui",
"寮": "liao",
"寯": "jun",
"寰": "huan",
"寱": "yi",
"寲": "yi",
"寳": "bao",
"寴": "qin",
"寵": "chong",
"寶": "bao",
"寷": "feng",
"寸": "cun",
"对": "dui",
"寺": "si",
"寻": "xun",
"导": "dao",
"寽": "lve",
"対": "dui",
"寿": "shou",
"尀": "po",
"封": "feng",
"専": "zhuan",
"尃": "fu",
"射": "she",
"尅": "kei",
"将": "jiang",
"將": "jiang",
"專": "zhuan",
"尉": "wei",
"尊": "zun",
"尋": "xun",
"尌": "shu",
"對": "dui",
"導": "dao",
"小": "xiao",
"尐": "jie",
"少": "shao",
"尒": "er",
"尓": "er",
"尔": "er",
"尕": "ga",
"尖": "jian",
"尗": "shu",
"尘": "chen",
"尙": "shang",
"尚": "shang",
"尛": "mo",
"尜": "ga",
"尝": "chang",
"尞": "liao",
"尟": "xian",
"尠": "xian",
"尡": "hun",
"尢": "you",
"尣": "wang",
"尤": "you",
"尥": "liao",
"尦": "liao",
"尧": "yao",
"尨": "long",
"尩": "wang",
"尪": "wang",
"尫": "wang",
"尬": "ga",
"尭": "yao",
"尮": "duo",
"尯": "kui",
"尰": "zhong",
"就": "jiu",
"尲": "gan",
"尳": "gu",
"尴": "gan",
"尵": "tui",
"尶": "gan",
"尷": "gan",
"尸": "shi",
"尹": "yin",
"尺": "chi",
"尻": "kao",
"尼": "ni",
"尽": "jin",
"尾": "wei",
"尿": "niao",
"局": "ju",
"屁": "pi",
"层": "ceng",
"屃": "xi",
"屄": "bi",
"居": "ju",
"屆": "jie",
"屇": "tian",
"屈": "qu",
"屉": "ti",
"届": "jie",
"屋": "wu",
"屌": "diao",
"屍": "shi",
"屎": "shi",
"屏": "ping",
"屐": "ji",
"屑": "xie",
"屒": "zhen",
"屓": "xi",
"屔": "ni",
"展": "zhan",
"屖": "xi",
"屗": "wei",
"屘": "man",
"屙": "e",
"屚": "lou",
"屛": "ping",
"屜": "ti",
"屝": "fei",
"属": "shu",
"屟": "xie",
"屠": "tu",
"屡": "lv",
"屢": "lv",
"屣": "xi",
"層": "ceng",
"履": "lv",
"屦": "ju",
"屧": "xie",
"屨": "ju",
"屩": "jue",
"屪": "liao",
"屫": "jue",
"屬": "shu",
"屭": "xi",
"屮": "che",
"屯": "tun",
"屰": "ni",
"山": "shan",
"屲": "wa",
"屳": "xian",
"屴": "li",
"屵": "an",
"屶": "hui",
"屷": "hui",
"屸": "hong",
"屹": "yi",
"屺": "qi",
"屻": "ren",
"屼": "wu",
"屽": "han",
"屾": "shen",
"屿": "yu",
"岀": "chu",
"岁": "sui",
"岂": "qi",
"岃": "ren",
"岄": "yue",
"岅": "ban",
"岆": "yao",
"岇": "ang",
"岈": "ya",
"岉": "wu",
"岊": "jie",
"岋": "e",
"岌": "ji",
"岍": "qian",
"岎": "fen",
"岏": "wan",
"岐": "qi",
"岑": "cen",
"岒": "qian",
"岓": "qi",
"岔": "cha",
"岕": "jie",
"岖": "qu",
"岗": "gang",
"岘": "xian",
"岙": "ao",
"岚": "lan",
"岛": "dao",
"岜": "ba",
"岝": "zuo",
"岞": "zuo",
"岟": "yang",
"岠": "ju",
"岡": "gang",
"岢": "ke",
"岣": "gou",
"岤": "xue",
"岥": "po",
"岦": "li",
"岧": "tiao",
"岨": "ju",
"岩": "yan",
"岪": "fu",
"岫": "xiu",
"岬": "jia",
"岭": "ling",
"岮": "tuo",
"岯": "pi",
"岰": "ao",
"岱": "dai",
"岲": "kuang",
"岳": "yue",
"岴": "qu",
"岵": "hu",
"岶": "po",
"岷": "min",
"岸": "an",
"岹": "tiao",
"岺": "ling",
"岻": "di",
"岼": "ping",
"岽": "dong",
"岾": "zhan",
"岿": "kui",
"峀": "xiu",
"峁": "mao",
"峂": "tong",
"峃": "xue",
"峄": "yi",
"峅": "bian",
"峆": "he",
"峇": "ke",
"峈": "luo",
"峉": "e",
"峊": "fu",
"峋": "xun",
"峌": "die",
"峍": "lu",
"峎": "en",
"峏": "er",
"峐": "gai",
"峑": "quan",
"峒": "tong",
"峓": "yi",
"峔": "mu",
"峕": "shi",
"峖": "an",
"峗": "wei",
"峘": "huan",
"峙": "zhi",
"峚": "mi",
"峛": "li",
"峜": "fa",
"峝": "tong",
"峞": "wei",
"峟": "you",
"峠": "qia",
"峡": "xia",
"峢": "li",
"峣": "yao",
"峤": "jiao",
"峥": "zheng",
"峦": "luan",
"峧": "jiao",
"峨": "e",
"峩": "e",
"峪": "yu",
"峫": "xie",
"峬": "bu",
"峭": "qiao",
"峮": "qun",
"峯": "feng",
"峰": "feng",
"峱": "nao",
"峲": "li",
"峳": "you",
"峴": "xian",
"峵": "rong",
"島": "dao",
"峷": "shen",
"峸": "cheng",
"峹": "tu",
"峺": "geng",
"峻": "jun",
"峼": "gao",
"峽": "xia",
"峾": "yin",
"峿": "wu",
"崀": "lang",
"崁": "kan",
"崂": "lao",
"崃": "lai",
"崄": "xian",
"崅": "que",
"崆": "kong",
"崇": "chong",
"崈": "chong",
"崉": "ta",
"崊": "lin",
"崋": "hua",
"崌": "ju",
"崍": "lai",
"崎": "qi",
"崏": "min",
"崐": "kun",
"崑": "kun",
"崒": "zu",
"崓": "gu",
"崔": "cui",
"崕": "ya",
"崖": "ya",
"崗": "gang",
"崘": "lun",
"崙": "lun",
"崚": "ling",
"崛": "jue",
"崜": "duo",
"崝": "zheng",
"崞": "guo",
"崟": "yin",
"崠": "dong",
"崡": "han",
"崢": "zheng",
"崣": "wei",
"崤": "xiao",
"崥": "pi",
"崦": "yan",
"崧": "song",
"崨": "jie",
"崩": "beng",
"崪": "zu",
"崫": "jue",
"崬": "dong",
"崭": "zhan",
"崮": "gu",
"崯": "yin",
"崰": "zi",
"崱": "ze",
"崲": "huang",
"崳": "yu",
"崴": "wai",
"崵": "yang",
"崶": "feng",
"崷": "qiu",
"崸": "yang",
"崹": "ti",
"崺": "yi",
"崻": "zhi",
"崼": "shi",
"崽": "zai",
"崾": "yao",
"崿": "e",
"嵀": "zhu",
"嵁": "kan",
"嵂": "lv",
"嵃": "yan",
"嵄": "mei",
"嵅": "han",
"嵆": "ji",
"嵇": "ji",
"嵈": "huan",
"嵉": "ting",
"嵊": "sheng",
"嵋": "mei",
"嵌": "qian",
"嵍": "wu",
"嵎": "yu",
"嵏": "zong",
"嵐": "lan",
"嵑": "ke",
"嵒": "yan",
"嵓": "yan",
"嵔": "wei",
"嵕": "zong",
"嵖": "cha",
"嵗": "sui",
"嵘": "rong",
"嵙": "ke",
"嵚": "qin",
"嵛": "yu",
"嵜": "qi",
"嵝": "lou",
"嵞": "tu",
"嵟": "cui",
"嵠": "xi",
"嵡": "weng",
"嵢": "cang",
"嵣": "dang",
"嵤": "rong",
"嵥": "jie",
"嵦": "kai",
"嵧": "liu",
"嵨": "wu",
"嵩": "song",
"嵪": "kao",
"嵫": "zi",
"嵬": "wei",
"嵭": "beng",
"嵮": "dian",
"嵯": "cuo",
"嵰": "qin",
"嵱": "yong",
"嵲": "nie",
"嵳": "cuo",
"嵴": "ji",
"嵵": "shi",
"嵶": "ruo",
"嵷": "song",
"嵸": "zong",
"嵹": "jiang",
"嵺": "liao",
"嵻": "kang",
"嵼": "chan",
"嵽": "die",
"嵾": "cen",
"嵿": "ding",
"嶀": "tu",
"嶁": "lou",
"嶂": "zhang",
"嶃": "zhan",
"嶄": "zhan",
"嶅": "ao",
"嶆": "cao",
"嶇": "qu",
"嶈": "qiang",
"嶉": "wei",
"嶊": "zui",
"嶋": "dao",
"嶌": "dao",
"嶍": "xi",
"嶎": "yu",
"嶏": "pi",
"嶐": "long",
"嶑": "xiang",
"嶒": "ceng",
"嶓": "bo",
"嶔": "qin",
"嶕": "jiao",
"嶖": "yan",
"嶗": "lao",
"嶘": "zhan",
"嶙": "lin",
"嶚": "liao",
"嶛": "liao",
"嶜": "qin",
"嶝": "deng",
"嶞": "tuo",
"嶟": "zun",
"嶠": "jiao",
"嶡": "jue",
"嶢": "yao",
"嶣": "jiao",
"嶤": "yao",
"嶥": "jue",
"嶦": "zhan",
"嶧": "yi",
"嶨": "xue",
"嶩": "nao",
"嶪": "ye",
"嶫": "ye",
"嶬": "yi",
"嶭": "nie",
"嶮": "xian",
"嶯": "ji",
"嶰": "xie",
"嶱": "ke",
"嶲": "gui",
"嶳": "di",
"嶴": "ao",
"嶵": "zui",
"嶶": "wei",
"嶷": "yi",
"嶸": "rong",
"嶹": "dao",
"嶺": "ling",
"嶻": "jie",
"嶼": "yu",
"嶽": "yue",
"嶾": "yin",
"嶿": "ru",
"巀": "jie",
"巁": "li",
"巂": "gui",
"巃": "long",
"巄": "long",
"巅": "dian",
"巆": "ying",
"巇": "xi",
"巈": "ju",
"巉": "chan",
"巊": "ying",
"巋": "kui",
"巌": "yan",
"巍": "wei",
"巎": "nao",
"巏": "quan",
"巐": "chao",
"巑": "cuan",
"巒": "luan",
"巓": "dian",
"巔": "dian",
"巕": "nie",
"巖": "yan",
"巗": "yan",
"巘": "yan",
"巙": "kui",
"巚": "yan",
"巛": "chuan",
"巜": "kuai",
"川": "chuan",
"州": "zhou",
"巟": "huang",
"巠": "jing",
"巡": "xun",
"巢": "chao",
"巣": "chao",
"巤": "lie",
"工": "gong",
"左": "zuo",
"巧": "qiao",
"巨": "ju",
"巩": "gong",
"巪": "ju",
"巫": "wu",
"巬": "gu",
"巭": "gu",
"差": "cha",
"巯": "qiu",
"巰": "qiu",
"己": "ji",
"已": "yi",
"巳": "si",
"巴": "ba",
"巵": "zhi",
"巶": "zhao",
"巷": "xiang",
"巸": "yi",
"巹": "jin",
"巺": "xun",
"巻": "juan",
"巽": "xun",
"巾": "jin",
"巿": "fu",
"帀": "za",
"币": "bi",
"市": "shi",
"布": "bu",
"帄": "ding",
"帅": "shuai",
"帆": "fan",
"帇": "nie",
"师": "shi",
"帉": "fen",
"帊": "pa",
"帋": "zhi",
"希": "xi",
"帍": "hu",
"帎": "dan",
"帏": "wei",
"帐": "zhang",
"帑": "tang",
"帒": "dai",
"帓": "mo",
"帔": "pei",
"帕": "pa",
"帖": "tie",
"帗": "fu",
"帘": "lian",
"帙": "zhi",
"帚": "zhou",
"帛": "bo",
"帜": "zhi",
"帝": "di",
"帞": "mo",
"帟": "yi",
"帠": "yi",
"帡": "ping",
"帢": "qia",
"帣": "juan",
"帤": "ru",
"帥": "shuai",
"带": "dai",
"帧": "zhen",
"帨": "shui",
"帩": "qiao",
"帪": "zhen",
"師": "shi",
"帬": "qun",
"席": "xi",
"帮": "bang",
"帯": "dai",
"帰": "gui",
"帱": "chou",
"帲": "ping",
"帳": "zhang",
"帴": "jian",
"帵": "wan",
"帶": "dai",
"帷": "wei",
"常": "chang",
"帹": "sha",
"帺": "qi",
"帻": "ze",
"帼": "guo",
"帽": "mao",
"帾": "zhu",
"帿": "hou",
"幀": "zhen",
"幁": "zheng",
"幂": "mi",
"幃": "wei",
"幄": "wo",
"幅": "fu",
"幆": "yi",
"幇": "bang",
"幈": "ping",
"幉": "die",
"幊": "gong",
"幋": "pan",
"幌": "huang",
"幍": "tao",
"幎": "mi",
"幏": "jia",
"幐": "teng",
"幑": "hui",
"幒": "zhong",
"幓": "shan",
"幔": "man",
"幕": "mu",
"幖": "biao",
"幗": "guo",
"幘": "ze",
"幙": "mu",
"幚": "bang",
"幛": "zhang",
"幜": "jing",
"幝": "chan",
"幞": "fu",
"幟": "zhi",
"幠": "hu",
"幡": "fan",
"幢": "chuang",
"幣": "bi",
"幤": "bi",
"幥": "zhang",
"幦": "mi",
"幧": "qiao",
"幨": "chan",
"幩": "fen",
"幪": "meng",
"幫": "bang",
"幬": "chou",
"幭": "mie",
"幮": "chu",
"幯": "jie",
"幰": "xian",
"幱": "lan",
"干": "gan",
"平": "ping",
"年": "nian",
"幵": "jian",
"并": "bing",
"幷": "bing",
"幸": "xing",
"幹": "gan",
"幺": "yao",
"幻": "huan",
"幼": "you",
"幽": "you",
"幾": "ji",
"广": "guang",
"庀": "pi",
"庁": "ting",
"庂": "ze",
"広": "guang",
"庄": "zhuang",
"庅": "mo",
"庆": "qing",
"庇": "bi",
"庈": "qin",
"庉": "dun",
"床": "chuang",
"庋": "gui",
"庌": "ya",
"庍": "bai",
"庎": "jie",
"序": "xu",
"庐": "lu",
"庑": "wu",
"庒": "zhuang",
"库": "ku",
"应": "ying",
"底": "di",
"庖": "pao",
"店": "dian",
"庘": "ya",
"庙": "miao",
"庚": "geng",
"庛": "ci",
"府": "fu",
"庝": "tong",
"庞": "pang",
"废": "fei",
"庠": "xiang",
"庡": "yi",
"庢": "zhi",
"庣": "tiao",
"庤": "zhi",
"庥": "xiu",
"度": "du",
"座": "zuo",
"庨": "xiao",
"庩": "tu",
"庪": "gui",
"庫": "ku",
"庬": "mang",
"庭": "ting",
"庮": "you",
"庯": "bu",
"庰": "bing",
"庱": "cheng",
"庲": "lai",
"庳": "bei",
"庴": "ji",
"庵": "an",
"庶": "shu",
"康": "kang",
"庸": "yong",
"庹": "tuo",
"庺": "song",
"庻": "shu",
"庼": "qing",
"庽": "yu",
"庾": "yu",
"庿": "miao",
"廀": "sou",
"廁": "ce",
"廂": "xiang",
"廃": "fei",
"廄": "jiu",
"廅": "e",
"廆": "gui",
"廇": "liu",
"廈": "sha",
"廉": "lian",
"廊": "lang",
"廋": "sou",
"廌": "zhi",
"廍": "bu",
"廎": "qing",
"廏": "jiu",
"廐": "jiu",
"廑": "jin",
"廒": "ao",
"廓": "kuo",
"廔": "lou",
"廕": "yin",
"廖": "liao",
"廗": "dai",
"廘": "lu",
"廙": "yi",
"廚": "chu",
"廛": "chan",
"廜": "tu",
"廝": "si",
"廞": "xin",
"廟": "miao",
"廠": "chang",
"廡": "wu",
"廢": "fei",
"廣": "guang",
"廤": "ku",
"廥": "kuai",
"廦": "bi",
"廧": "qiang",
"廨": "xie",
"廩": "lin",
"廪": "lin",
"廫": "liao",
"廬": "lu",
"廭": "ji",
"廮": "ying",
"廯": "xian",
"廰": "ting",
"廱": "yong",
"廲": "li",
"廳": "ting",
"廴": "yin",
"廵": "xun",
"延": "yan",
"廷": "ting",
"廸": "di",
"廹": "po",
"建": "jian",
"廻": "hui",
"廼": "nai",
"廽": "hui",
"廾": "gong",
"廿": "nian",
"开": "kai",
"弁": "bian",
"异": "yi",
"弃": "qi",
"弄": "nong",
"弅": "fen",
"弆": "ju",
"弇": "yan",
"弈": "yi",
"弉": "zang",
"弊": "bi",
"弋": "yi",
"弌": "yi",
"弍": "er",
"弎": "san",
"式": "shi",
"弐": "er",
"弑": "shi",
"弒": "shi",
"弓": "gong",
"弔": "diao",
"引": "yin",
"弖": "hu",
"弗": "fu",
"弘": "hong",
"弙": "wu",
"弚": "tui",
"弛": "chi",
"弜": "jiang",
"弝": "ba",
"弞": "shen",
"弟": "di",
"张": "zhang",
"弡": "jue",
"弢": "tao",
"弣": "fu",
"弤": "di",
"弥": "mi",
"弦": "xian",
"弧": "hu",
"弨": "chao",
"弩": "nu",
"弪": "jing",
"弫": "zhen",
"弬": "yi",
"弭": "mi",
"弮": "juan",
"弯": "wan",
"弰": "shao",
"弱": "ruo",
"弲": "xuan",
"弳": "jing",
"弴": "diao",
"張": "zhang",
"弶": "jiang",
"強": "qiang",
"弸": "peng",
"弹": "dan",
"强": "qiang",
"弻": "bi",
"弼": "bi",
"弽": "she",
"弾": "dan",
"弿": "jian",
"彀": "gou",
"彁": "ge",
"彂": "fa",
"彃": "bi",
"彄": "kou",
"彅": "jian",
"彆": "bie",
"彇": "xiao",
"彈": "dan",
"彉": "guo",
"彊": "qiang",
"彋": "hong",
"彌": "mi",
"彍": "guo",
"彎": "wan",
"彏": "jue",
"彐": "ji",
"彑": "ji",
"归": "gui",
"当": "dang",
"彔": "lu",
"录": "lu",
"彖": "tuan",
"彗": "hui",
"彘": "zhi",
"彙": "hui",
"彚": "hui",
"彛": "yi",
"彜": "yi",
"彝": "yi",
"彞": "yi",
"彟": "huo",
"彠": "huo",
"彡": "shan",
"形": "xing",
"彣": "wen",
"彤": "tong",
"彥": "yan",
"彦": "yan",
"彧": "yu",
"彨": "chi",
"彩": "cai",
"彪": "biao",
"彫": "diao",
"彬": "bin",
"彭": "peng",
"彮": "yong",
"彯": "piao",
"彰": "zhang",
"影": "ying",
"彲": "chi",
"彳": "chi",
"彴": "zhuo",
"彵": "tuo",
"彶": "ji",
"彷": "pang",
"彸": "zhong",
"役": "yi",
"彺": "wang",
"彻": "che",
"彼": "bi",
"彽": "di",
"彾": "ling",
"彿": "fu",
"往": "wang",
"征": "zheng",
"徂": "cu",
"徃": "wang",
"径": "jing",
"待": "dai",
"徆": "xi",
"徇": "xun",
"很": "hen",
"徉": "yang",
"徊": "huai",
"律": "lv",
"後": "hou",
"徍": "wang",
"徎": "cheng",
"徏": "zhi",
"徐": "xu",
"徑": "jing",
"徒": "tu",
"従": "cong",
"徔": "cong",
"徕": "lai",
"徖": "cong",
"得": "de",
"徘": "pai",
"徙": "xi",
"徚": "dong",
"徛": "ji",
"徜": "chang",
"徝": "zhi",
"從": "cong",
"徟": "zhou",
"徠": "lai",
"御": "yu",
"徢": "xie",
"徣": "jie",
"徤": "jian",
"徥": "shi",
"徦": "jia",
"徧": "bian",
"徨": "huang",
"復": "fu",
"循": "xun",
"徫": "wei",
"徬": "pang",
"徭": "yao",
"微": "wei",
"徯": "xi",
"徰": "zheng",
"徱": "piao",
"徲": "ti",
"徳": "de",
"徴": "zhi",
"徵": "zhi",
"徶": "bie",
"德": "de",
"徸": "zhong",
"徹": "che",
"徺": "jiao",
"徻": "hui",
"徼": "jiao",
"徽": "hui",
"徾": "mei",
"徿": "long",
"忀": "xiang",
"忁": "bao",
"忂": "qu",
"心": "xin",
"忄": "xin",
"必": "bi",
"忆": "yi",
"忇": "le",
"忈": "ren",
"忉": "dao",
"忊": "ding",
"忋": "gai",
"忌": "ji",
"忍": "ren",
"忎": "ren",
"忏": "chan",
"忐": "tan",
"忑": "te",
"忒": "te",
"忓": "gan",
"忔": "yi",
"忕": "shi",
"忖": "cun",
"志": "zhi",
"忘": "wang",
"忙": "mang",
"忚": "xi",
"忛": "fan",
"応": "ying",
"忝": "tian",
"忞": "min",
"忟": "min",
"忠": "zhong",
"忡": "chong",
"忢": "wu",
"忣": "ji",
"忤": "wu",
"忥": "xi",
"忦": "jia",
"忧": "you",
"忨": "wan",
"忩": "cong",
"忪": "song",
"快": "kuai",
"忬": "yu",
"忭": "bian",
"忮": "zhi",
"忯": "qi",
"忰": "cui",
"忱": "chen",
"忲": "tai",
"忳": "tun",
"忴": "qian",
"念": "nian",
"忶": "hun",
"忷": "xiong",
"忸": "niu",
"忹": "kuang",
"忺": "xian",
"忻": "xin",
"忼": "kang",
"忽": "hu",
"忾": "kai",
"忿": "fen",
"怀": "huai",
"态": "tai",
"怂": "song",
"怃": "wu",
"怄": "ou",
"怅": "chang",
"怆": "chuang",
"怇": "ju",
"怈": "yi",
"怉": "bao",
"怊": "chao",
"怋": "min",
"怌": "pei",
"怍": "zuo",
"怎": "zen",
"怏": "yang",
"怐": "kou",
"怑": "ban",
"怒": "nu",
"怓": "nao",
"怔": "zheng",
"怕": "pa",
"怖": "bu",
"怗": "tie",
"怘": "hu",
"怙": "hu",
"怚": "cu",
"怛": "da",
"怜": "lian",
"思": "si",
"怞": "you",
"怟": "di",
"怠": "dai",
"怡": "yi",
"怢": "tu",
"怣": "you",
"怤": "fu",
"急": "ji",
"怦": "peng",
"性": "xing",
"怨": "yuan",
"怩": "ni",
"怪": "guai",
"怫": "fu",
"怬": "xi",
"怭": "bi",
"怮": "you",
"怯": "qie",
"怰": "xuan",
"怱": "cong",
"怲": "bing",
"怳": "huang",
"怴": "xu",
"怵": "chu",
"怶": "bi",
"怷": "shu",
"怸": "xi",
"怹": "tan",
"怺": "yong",
"总": "zong",
"怼": "dui",
"怽": "mi",
"怿": "yi",
"恀": "shi",
"恁": "nen",
"恂": "xun",
"恃": "shi",
"恄": "xi",
"恅": "lao",
"恆": "heng",
"恇": "kuang",
"恈": "mou",
"恉": "zhi",
"恊": "xie",
"恋": "lian",
"恌": "tiao",
"恍": "huang",
"恎": "die",
"恏": "hao",
"恐": "kong",
"恑": "gui",
"恒": "heng",
"恓": "xi",
"恔": "xiao",
"恕": "shu",
"恖": "si",
"恗": "hu",
"恘": "qiu",
"恙": "yang",
"恚": "hui",
"恛": "hui",
"恜": "chi",
"恝": "jia",
"恞": "yi",
"恟": "xiong",
"恠": "guai",
"恡": "lin",
"恢": "hui",
"恣": "zi",
"恤": "xu",
"恥": "chi",
"恦": "shang",
"恧": "nv",
"恨": "hen",
"恩": "en",
"恪": "ke",
"恫": "dong",
"恬": "tian",
"恭": "gong",
"恮": "quan",
"息": "xi",
"恰": "qia",
"恱": "yue",
"恲": "peng",
"恳": "ken",
"恴": "de",
"恵": "hui",
"恶": "e",
"恷": "qiu",
"恸": "tong",
"恹": "yan",
"恺": "kai",
"恻": "ce",
"恼": "nao",
"恽": "yun",
"恾": "mang",
"恿": "yong",
"悀": "yong",
"悁": "yuan",
"悂": "pi",
"悃": "kun",
"悄": "qiao",
"悅": "yue",
"悆": "yu",
"悇": "tu",
"悈": "jie",
"悉": "xi",
"悊": "zhe",
"悋": "lin",
"悌": "ti",
"悍": "han",
"悎": "hao",
"悏": "qie",
"悐": "ti",
"悑": "bu",
"悒": "yi",
"悓": "qian",
"悔": "hui",
"悕": "xi",
"悖": "bei",
"悗": "man",
"悘": "yi",
"悙": "heng",
"悚": "song",
"悛": "quan",
"悜": "cheng",
"悝": "kui",
"悞": "wu",
"悟": "wu",
"悠": "you",
"悡": "li",
"悢": "liang",
"患": "huan",
"悤": "cong",
"悥": "yi",
"悦": "yue",
"悧": "li",
"您": "nin",
"悩": "nao",
"悪": "e",
"悫": "que",
"悬": "xuan",
"悭": "qian",
"悮": "wu",
"悯": "min",
"悰": "cong",
"悱": "fei",
"悲": "bei",
"悳": "de",
"悴": "cui",
"悵": "chang",
"悶": "men",
"悷": "li",
"悸": "ji",
"悹": "guan",
"悺": "guan",
"悻": "xing",
"悼": "dao",
"悽": "qi",
"悾": "kong",
"悿": "tian",
"惀": "lun",
"惁": "xi",
"惂": "kan",
"惃": "gun",
"惄": "ni",
"情": "qing",
"惆": "chou",
"惇": "dun",
"惈": "guo",
"惉": "zhan",
"惊": "jing",
"惋": "wan",
"惌": "yuan",
"惍": "jin",
"惎": "ji",
"惏": "lan",
"惐": "yu",
"惑": "huo",
"惒": "he",
"惓": "juan",
"惔": "tan",
"惕": "ti",
"惖": "ti",
"惗": "nian",
"惘": "wang",
"惙": "chuo",
"惚": "hu",
"惛": "hun",
"惜": "xi",
"惝": "chang",
"惞": "xin",
"惟": "wei",
"惠": "hui",
"惡": "e",
"惢": "suo",
"惣": "zong",
"惤": "jian",
"惥": "yong",
"惦": "dian",
"惧": "ju",
"惨": "can",
"惩": "cheng",
"惪": "de",
"惫": "bei",
"惬": "qie",
"惭": "can",
"惮": "dan",
"惯": "guan",
"惰": "duo",
"惱": "nao",
"惲": "yun",
"想": "xiang",
"惴": "zhui",
"惵": "die",
"惶": "huang",
"惷": "chun",
"惸": "qiong",
"惹": "re",
"惺": "xing",
"惻": "ce",
"惼": "bian",
"惽": "min",
"惾": "zong",
"惿": "ti",
"愀": "qiao",
"愁": "chou",
"愂": "bei",
"愃": "xuan",
"愄": "wei",
"愅": "ge",
"愆": "qian",
"愇": "wei",
"愈": "yu",
"愉": "yu",
"愊": "bi",
"愋": "xuan",
"愌": "huan",
"愍": "min",
"愎": "bi",
"意": "yi",
"愐": "mian",
"愑": "yong",
"愒": "qi",
"愓": "dang",
"愔": "yin",
"愕": "e",
"愖": "chen",
"愗": "mao",
"愘": "ke",
"愙": "ke",
"愚": "yu",
"愛": "ai",
"愜": "qie",
"愝": "yan",
"愞": "nuo",
"感": "gan",
"愠": "yun",
"愡": "cong",
"愢": "sai",
"愣": "leng",
"愤": "fen",
"愥": "ying",
"愦": "kui",
"愧": "kui",
"愨": "que",
"愩": "gong",
"愪": "yun",
"愫": "su",
"愬": "su",
"愭": "qi",
"愮": "yao",
"愯": "song",
"愰": "huang",
"愱": "ji",
"愲": "gu",
"愳": "ju",
"愴": "chuang",
"愵": "ni",
"愶": "xie",
"愷": "kai",
"愸": "zheng",
"愹": "yong",
"愺": "cao",
"愻": "xun",
"愼": "shen",
"愽": "bo",
"愾": "kai",
"愿": "yuan",
"慀": "xi",
"慁": "hun",
"慂": "yong",
"慃": "yang",
"慄": "li",
"慅": "cao",
"慆": "tao",
"慇": "yin",
"慈": "ci",
"慉": "xu",
"慊": "qian",
"態": "tai",
"慌": "huang",
"慍": "yun",
"慎": "shen",
"慏": "ming",
"慐": "gong",
"慑": "she",
"慒": "cao",
"慓": "piao",
"慔": "mu",
"慕": "mu",
"慖": "guo",
"慗": "chi",
"慘": "can",
"慙": "can",
"慚": "can",
"慛": "cui",
"慜": "min",
"慝": "te",
"慞": "zhang",
"慟": "tong",
"慠": "ao",
"慡": "shuang",
"慢": "man",
"慣": "guan",
"慤": "que",
"慥": "zao",
"慦": "jiu",
"慧": "hui",
"慨": "kai",
"慩": "lian",
"慪": "ou",
"慫": "song",
"慬": "qin",
"慭": "yin",
"慮": "lv",
"慯": "shang",
"慰": "wei",
"慱": "tuan",
"慲": "man",
"慳": "qian",
"慴": "she",
"慵": "yong",
"慶": "qing",
"慷": "kang",
"慸": "di",
"慹": "zhi",
"慺": "lou",
"慻": "juan",
"慼": "qi",
"慽": "qi",
"慾": "yu",
"慿": "ping",
"憀": "liao",
"憁": "cong",
"憂": "you",
"憃": "chong",
"憄": "zhi",
"憅": "tong",
"憆": "cheng",
"憇": "qi",
"憈": "qu",
"憉": "peng",
"憊": "bei",
"憋": "bie",
"憌": "qiong",
"憍": "jiao",
"憎": "zeng",
"憏": "chi",
"憐": "lian",
"憑": "ping",
"憒": "kui",
"憓": "hui",
"憔": "qiao",
"憕": "cheng",
"憖": "yin",
"憗": "yin",
"憘": "xi",
"憙": "xi",
"憚": "dan",
"憛": "tan",
"憜": "duo",
"憝": "dui",
"憞": "dui",
"憟": "su",
"憠": "jue",
"憡": "ce",
"憢": "xiao",
"憣": "fan",
"憤": "fen",
"憥": "lao",
"憦": "lao",
"憧": "chong",
"憨": "han",
"憩": "qi",
"憪": "xian",
"憫": "min",
"憬": "jing",
"憭": "liao",
"憮": "wu",
"憯": "can",
"憰": "jue",
"憱": "cu",
"憲": "xian",
"憳": "tan",
"憴": "sheng",
"憵": "pi",
"憶": "yi",
"憷": "chu",
"憸": "xian",
"憹": "nao",
"憺": "dan",
"憻": "tan",
"憼": "jing",
"憽": "song",
"憾": "han",
"憿": "jiao",
"懀": "wei",
"懁": "xuan",
"懂": "dong",
"懃": "qin",
"懄": "qin",
"懅": "ju",
"懆": "cao",
"懇": "ken",
"懈": "xie",
"應": "ying",
"懊": "ao",
"懋": "mao",
"懌": "yi",
"懍": "lin",
"懎": "se",
"懏": "jun",
"懐": "huai",
"懑": "men",
"懒": "lan",
"懓": "ai",
"懔": "lin",
"懕": "yan",
"懖": "guo",
"懗": "xia",
"懘": "chi",
"懙": "yu",
"懚": "yin",
"懛": "dai",
"懜": "meng",
"懝": "ai",
"懞": "meng",
"懟": "dui",
"懠": "qi",
"懡": "mo",
"懢": "lan",
"懣": "men",
"懤": "chou",
"懥": "zhi",
"懦": "nuo",
"懧": "nuo",
"懨": "yan",
"懩": "yang",
"懪": "bo",
"懫": "zhi",
"懬": "kuang",
"懭": "kuang",
"懮": "you",
"懯": "fu",
"懰": "liu",
"懱": "mie",
"懲": "cheng",
"懳": "hui",
"懴": "chan",
"懵": "meng",
"懶": "lan",
"懷": "huai",
"懸": "xuan",
"懹": "rang",
"懺": "chan",
"懻": "ji",
"懼": "ju",
"懽": "huan",
"懾": "she",
"懿": "yi",
"戀": "lian",
"戁": "nan",
"戂": "mi",
"戃": "tang",
"戄": "jue",
"戅": "gang",
"戆": "gang",
"戇": "gang",
"戈": "ge",
"戉": "yue",
"戊": "wu",
"戋": "jian",
"戌": "xu",
"戍": "shu",
"戎": "rong",
"戏": "xi",
"成": "cheng",
"我": "wo",
"戒": "jie",
"戓": "ge",
"戔": "jian",
"戕": "qiang",
"或": "huo",
"戗": "qiang",
"战": "zhan",
"戙": "dong",
"戚": "qi",
"戛": "jia",
"戜": "die",
"戝": "zei",
"戞": "jia",
"戟": "ji",
"戠": "zhi",
"戡": "kan",
"戢": "ji",
"戣": "kui",
"戤": "gai",
"戥": "deng",
"戦": "zhan",
"戧": "qiang",
"戨": "ge",
"戩": "jian",
"截": "jie",
"戫": "yu",
"戬": "jian",
"戭": "yan",
"戮": "lu",
"戯": "xi",
"戰": "zhan",
"戱": "xi",
"戲": "xi",
"戳": "chuo",
"戴": "dai",
"戵": "qu",
"戶": "hu",
"户": "hu",
"戸": "hu",
"戹": "e",
"戺": "shi",
"戻": "ti",
"戼": "mao",
"戽": "hu",
"戾": "li",
"房": "fang",
"所": "suo",
"扁": "bian",
"扂": "dian",
"扃": "jiong",
"扄": "shang",
"扅": "yi",
"扆": "yi",
"扇": "shan",
"扈": "hu",
"扉": "fei",
"扊": "yan",
"手": "shou",
"扌": "shou",
"才": "cai",
"扎": "za",
"扏": "qiu",
"扐": "le",
"扑": "pu",
"扒": "ba",
"打": "da",
"扔": "reng",
"払": "fan",
"扖": "ru",
"扗": "zai",
"托": "tuo",
"扙": "zhang",
"扚": "diao",
"扛": "kang",
"扜": "yu",
"扝": "yu",
"扞": "han",
"扟": "shen",
"扠": "cha",
"扡": "tuo",
"扢": "gu",
"扣": "kou",
"扤": "wu",
"扥": "den",
"扦": "qian",
"执": "zhi",
"扨": "ren",
"扩": "kuo",
"扪": "men",
"扫": "sao",
"扬": "yang",
"扭": "niu",
"扮": "ban",
"扯": "che",
"扰": "rao",
"扱": "xi",
"扲": "qian",
"扳": "ban",
"扴": "jia",
"扵": "yu",
"扶": "fu",
"扷": "ba",
"扸": "xi",
"批": "pi",
"扺": "zhi",
"扻": "zhi",
"扼": "e",
"扽": "den",
"找": "zhao",
"承": "cheng",
"技": "ji",
"抁": "yan",
"抂": "kuang",
"抃": "bian",
"抄": "chao",
"抅": "ju",
"抆": "wen",
"抇": "hu",
"抈": "yue",
"抉": "jue",
"把": "ba",
"抋": "qin",
"抌": "dan",
"抍": "zheng",
"抎": "yun",
"抏": "wan",
"抐": "ne",
"抑": "yi",
"抒": "shu",
"抓": "zhua",
"抔": "pou",
"投": "tou",
"抖": "dou",
"抗": "kang",
"折": "zhe",
"抙": "pou",
"抚": "fu",
"抛": "pao",
"抜": "ba",
"抝": "ao",
"択": "ze",
"抟": "tuan",
"抠": "kou",
"抡": "lun",
"抢": "qiang",
"抣": "yun",
"护": "hu",
"报": "bao",
"抦": "bing",
"抧": "zhi",
"抨": "peng",
"抩": "nan",
"抪": "bu",
"披": "pi",
"抬": "tai",
"抭": "yao",
"抮": "zhen",
"抯": "zha",
"抰": "yang",
"抱": "bao",
"抲": "he",
"抳": "ni",
"抴": "ye",
"抵": "di",
"抶": "chi",
"抷": "pi",
"抸": "jia",
"抹": "mo",
"抺": "mei",
"抻": "chen",
"押": "ya",
"抽": "chou",
"抾": "qu",
"抿": "min",
"拀": "zhu",
"拁": "jia",
"拂": "fu",
"拃": "zha",
"拄": "zhu",
"担": "dan",
"拆": "chai",
"拇": "mu",
"拈": "nian",
"拉": "la",
"拊": "fu",
"拋": "pao",
"拌": "ban",
"拍": "pai",
"拎": "lin",
"拏": "na",
"拐": "guai",
"拑": "qian",
"拒": "ju",
"拓": "tuo",
"拔": "ba",
"拕": "tuo",
"拖": "tuo",
"拗": "ao",
"拘": "ju",
"拙": "zhuo",
"拚": "pan",
"招": "zhao",
"拜": "bai",
"拝": "bai",
"拞": "di",
"拟": "ni",
"拠": "ju",
"拡": "kuo",
"拢": "long",
"拣": "jian",
"拤": "qia",
"拥": "yong",
"拦": "lan",
"拧": "ning",
"拨": "bo",
"择": "ze",
"拪": "qian",
"拫": "hen",
"括": "kuo",
"拭": "shi",
"拮": "jie",
"拯": "zheng",
"拰": "nin",
"拱": "gong",
"拲": "gong",
"拳": "quan",
"拴": "shuan",
"拵": "cun",
"拶": "za",
"拷": "kao",
"拸": "yi",
"拹": "xie",
"拺": "ce",
"拻": "hui",
"拼": "pin",
"拽": "zhuai",
"拾": "shi",
"拿": "na",
"挀": "bai",
"持": "chi",
"挂": "gua",
"挃": "zhi",
"挄": "kuo",
"挅": "duo",
"挆": "duo",
"指": "zhi",
"挈": "qie",
"按": "an",
"挊": "nong",
"挋": "zhen",
"挌": "ge",
"挍": "jiao",
"挎": "kua",
"挏": "dong",
"挐": "ru",
"挑": "tiao",
"挒": "lie",
"挓": "zha",
"挔": "lv",
"挕": "die",
"挖": "wa",
"挗": "jue",
"挘": "lie",
"挙": "ju",
"挚": "zhi",
"挛": "luan",
"挜": "ya",
"挝": "zhua",
"挞": "ta",
"挟": "xie",
"挠": "nao",
"挡": "dang",
"挢": "jiao",
"挣": "zheng",
"挤": "ji",
"挥": "hui",
"挦": "xian",
"挧": "yu",
"挨": "ai",
"挩": "tuo",
"挪": "nuo",
"挫": "cuo",
"挬": "bo",
"挭": "geng",
"挮": "ti",
"振": "zhen",
"挰": "cheng",
"挱": "suo",
"挲": "suo",
"挳": "keng",
"挴": "mei",
"挵": "nong",
"挶": "ju",
"挷": "bang",
"挸": "jian",
"挹": "yi",
"挺": "ting",
"挻": "shan",
"挼": "ruo",
"挽": "wan",
"挾": "xie",
"挿": "cha",
"捀": "peng",
"捁": "jiao",
"捂": "wu",
"捃": "jun",
"捄": "jiu",
"捅": "tong",
"捆": "kun",
"捇": "huo",
"捈": "tu",
"捉": "zhuo",
"捊": "pou",
"捋": "luo",
"捌": "ba",
"捍": "han",
"捎": "shao",
"捏": "nie",
"捐": "juan",
"捑": "ze",
"捒": "shu",
"捓": "ye",
"捔": "jue",
"捕": "bu",
"捖": "wan",
"捗": "bu",
"捘": "zun",
"捙": "ye",
"捚": "zhai",
"捛": "lv",
"捜": "sou",
"捝": "tuo",
"捞": "lao",
"损": "sun",
"捠": "bang",
"捡": "jian",
"换": "huan",
"捣": "dao",
"捤": "wei",
"捥": "wan",
"捦": "qin",
"捧": "peng",
"捨": "she",
"捩": "lie",
"捪": "min",
"捫": "men",
"捬": "fu",
"捭": "bai",
"据": "ju",
"捯": "dao",
"捰": "wo",
"捱": "ai",
"捲": "juan",
"捳": "yue",
"捴": "zong",
"捵": "chen",
"捶": "chui",
"捷": "jie",
"捸": "tu",
"捹": "ben",
"捺": "na",
"捻": "nian",
"捼": "ruo",
"捽": "zuo",
"捾": "wo",
"捿": "qi",
"掀": "xian",
"掁": "cheng",
"掂": "dian",
"掃": "sao",
"掄": "lun",
"掅": "qing",
"掆": "gang",
"掇": "duo",
"授": "shou",
"掉": "diao",
"掊": "pou",
"掋": "di",
"掌": "zhang",
"掍": "hun",
"掎": "ji",
"掏": "tao",
"掐": "qia",
"掑": "qi",
"排": "pai",
"掓": "shu",
"掔": "qian",
"掕": "ling",
"掖": "ye",
"掗": "ya",
"掘": "jue",
"掙": "zheng",
"掚": "liang",
"掛": "gua",
"掜": "ni",
"掝": "huo",
"掞": "shan",
"掟": "zheng",
"掠": "lve",
"採": "cai",
"探": "tan",
"掣": "che",
"掤": "bing",
"接": "jie",
"掦": "ti",
"控": "kong",
"推": "tui",
"掩": "yan",
"措": "cuo",
"掫": "zou",
"掬": "ju",
"掭": "tian",
"掮": "qian",
"掯": "ken",
"掰": "bai",
"掱": "pa",
"掲": "jie",
"掳": "lu",
"掴": "guo",
"掵": "ming",
"掶": "jie",
"掷": "zhi",
"掸": "dan",
"掹": "meng",
"掺": "chan",
"掻": "sao",
"掼": "guan",
"掽": "peng",
"掾": "yuan",
"掿": "nuo",
"揀": "jian",
"揁": "zheng",
"揂": "jiu",
"揃": "jian",
"揄": "yu",
"揅": "yan",
"揆": "kui",
"揇": "nan",
"揈": "hong",
"揉": "rou",
"揊": "pi",
"揋": "wei",
"揌": "sai",
"揍": "zou",
"揎": "xuan",
"描": "miao",
"提": "ti",
"揑": "nie",
"插": "cha",
"揓": "shi",
"揔": "zong",
"揕": "zhen",
"揖": "yi",
"揗": "xun",
"揘": "huang",
"揙": "bian",
"揚": "yang",
"換": "huan",
"揜": "yan",
"揝": "zan",
"揞": "an",
"揟": "xu",
"揠": "ya",
"握": "wo",
"揢": "ke",
"揣": "chuai",
"揤": "ji",
"揥": "ti",
"揦": "la",
"揧": "la",
"揨": "cheng",
"揩": "kai",
"揪": "jiu",
"揫": "jiu",
"揬": "tu",
"揭": "jie",
"揮": "hui",
"揯": "gen",
"揰": "chong",
"揱": "xiao",
"揲": "she",
"揳": "xie",
"援": "yuan",
"揵": "qian",
"揶": "ye",
"揷": "cha",
"揸": "zha",
"揹": "bei",
"揺": "yao",
"揻": "wei",
"揼": "beng",
"揽": "lan",
"揾": "wen",
"揿": "qin",
"搀": "chan",
"搁": "ge",
"搂": "lou",
"搃": "zong",
"搄": "gen",
"搅": "jiao",
"搆": "gou",
"搇": "qin",
"搈": "rong",
"搉": "que",
"搊": "chou",
"搋": "chuai",
"搌": "zhan",
"損": "sun",
"搎": "sun",
"搏": "bo",
"搐": "chu",
"搑": "rong",
"搒": "bang",
"搓": "cuo",
"搔": "sao",
"搕": "ke",
"搖": "yao",
"搗": "dao",
"搘": "zhi",
"搙": "nu",
"搚": "la",
"搛": "jian",
"搜": "sou",
"搝": "qiu",
"搞": "gao",
"搟": "xian",
"搠": "shuo",
"搡": "sang",
"搢": "jin",
"搣": "mie",
"搤": "e",
"搥": "chui",
"搦": "nuo",
"搧": "shan",
"搨": "ta",
"搩": "jie",
"搪": "tang",
"搫": "pan",
"搬": "ban",
"搭": "da",
"搮": "li",
"搯": "tao",
"搰": "hu",
"搱": "zhi",
"搲": "wa",
"搳": "hua",
"搴": "qian",
"搵": "wen",
"搶": "qiang",
"搷": "tian",
"搸": "zhen",
"搹": "e",
"携": "xie",
"搻": "na",
"搼": "quan",
"搽": "cha",
"搾": "zha",
"搿": "ge",
"摀": "wu",
"摁": "en",
"摂": "she",
"摃": "gang",
"摄": "she",
"摅": "shu",
"摆": "bai",
"摇": "yao",
"摈": "bin",
"摉": "sou",
"摊": "tan",
"摋": "sa",
"摌": "chan",
"摍": "suo",
"摎": "jiu",
"摏": "chong",
"摐": "chuang",
"摑": "guo",
"摒": "bing",
"摓": "feng",
"摔": "shuai",
"摕": "di",
"摖": "qi",
"摗": "sou",
"摘": "zhai",
"摙": "lian",
"摚": "cheng",
"摛": "chi",
"摜": "guan",
"摝": "lu",
"摞": "luo",
"摟": "lou",
"摠": "zong",
"摡": "gai",
"摢": "hu",
"摣": "zha",
"摤": "qiang",
"摥": "tang",
"摦": "hua",
"摧": "cui",
"摨": "zhi",
"摩": "mo",
"摪": "jiang",
"摫": "gui",
"摬": "ying",
"摭": "zhi",
"摮": "ao",
"摯": "zhi",
"摰": "nie",
"摱": "man",
"摲": "chan",
"摳": "kou",
"摴": "chu",
"摵": "se",
"摶": "tuan",
"摷": "jiao",
"摸": "mo",
"摹": "mo",
"摺": "zhe",
"摻": "chan",
"摼": "keng",
"摽": "biao",
"摾": "jiang",
"摿": "yao",
"撀": "gou",
"撁": "qian",
"撂": "liao",
"撃": "ji",
"撄": "ying",
"撅": "jue",
"撆": "pie",
"撇": "pie",
"撈": "lao",
"撉": "dun",
"撊": "xian",
"撋": "ruan",
"撌": "gui",
"撍": "zan",
"撎": "yi",
"撏": "xian",
"撐": "cheng",
"撑": "cheng",
"撒": "sa",
"撓": "nao",
"撔": "hong",
"撕": "si",
"撖": "han",
"撗": "heng",
"撘": "da",
"撙": "zun",
"撚": "nian",
"撛": "lin",
"撜": "zheng",
"撝": "hui",
"撞": "zhuang",
"撟": "jiao",
"撠": "ji",
"撡": "cao",
"撢": "dan",
"撣": "dan",
"撤": "che",
"撥": "bo",
"撦": "che",
"撧": "jue",
"撨": "xiao",
"撩": "liao",
"撪": "ben",
"撫": "fu",
"撬": "qiao",
"播": "bo",
"撮": "cuo",
"撯": "zhuo",
"撰": "zhuan",
"撱": "wei",
"撲": "pu",
"撳": "qin",
"撴": "dun",
"撵": "nian",
"撶": "hua",
"撷": "xie",
"撸": "lu",
"撹": "jiao",
"撺": "cuan",
"撻": "ta",
"撼": "han",
"撽": "qiao",
"撾": "zhua",
"撿": "jian",
"擀": "gan",
"擁": "yong",
"擂": "lei",
"擃": "nang",
"擄": "lu",
"擅": "shan",
"擆": "zhuo",
"擇": "ze",
"擈": "pu",
"擉": "chuo",
"擊": "ji",
"擋": "dang",
"擌": "se",
"操": "cao",
"擎": "qing",
"擏": "qing",
"擐": "huan",
"擑": "jie",
"擒": "qin",
"擓": "kuai",
"擔": "dan",
"擕": "xie",
"擖": "qia",
"擗": "pi",
"擘": "bo",
"擙": "ao",
"據": "ju",
"擛": "ye",
"擜": "e",
"擝": "meng",
"擞": "sou",
"擟": "mi",
"擠": "ji",
"擡": "tai",
"擢": "zhuo",
"擣": "dao",
"擤": "xing",
"擥": "lan",
"擦": "ca",
"擧": "ju",
"擨": "ye",
"擩": "ru",
"擪": "ye",
"擫": "ye",
"擬": "ni",
"擭": "huo",
"擮": "jie",
"擯": "bin",
"擰": "ning",
"擱": "ge",
"擲": "zhi",
"擳": "zhi",
"擴": "kuo",
"擵": "mo",
"擶": "jian",
"擷": "xie",
"擸": "lie",
"擹": "tan",
"擺": "bai",
"擻": "sou",
"擼": "lu",
"擽": "li",
"擾": "rao",
"擿": "ti",
"攀": "pan",
"攁": "yang",
"攂": "lei",
"攃": "ca",
"攄": "shu",
"攅": "zan",
"攆": "nian",
"攇": "xian",
"攈": "jun",
"攉": "huo",
"攊": "li",
"攋": "la",
"攌": "huan",
"攍": "ying",
"攎": "lu",
"攏": "long",
"攐": "qian",
"攑": "qian",
"攒": "zan",
"攓": "qian",
"攔": "lan",
"攕": "xian",
"攖": "ying",
"攗": "mei",
"攘": "rang",
"攙": "chan",
"攚": "weng",
"攛": "cuan",
"攜": "xie",
"攝": "she",
"攞": "luo",
"攟": "jun",
"攠": "mi",
"攡": "chi",
"攢": "zan",
"攣": "luan",
"攤": "tan",
"攥": "zuan",
"攦": "li",
"攧": "dian",
"攨": "wa",
"攩": "dang",
"攪": "jiao",
"攫": "jue",
"攬": "lan",
"攭": "li",
"攮": "nang",
"支": "zhi",
"攰": "gui",
"攱": "gui",
"攲": "qi",
"攳": "xun",
"攴": "pu",
"攵": "pu",
"收": "shou",
"攷": "kao",
"攸": "you",
"改": "gai",
"攺": "yi",
"攻": "gong",
"攼": "gan",
"攽": "ban",
"放": "fang",
"政": "zheng",
"敀": "po",
"敁": "dian",
"敂": "kou",
"敃": "min",
"敄": "wu",
"故": "gu",
"敆": "he",
"敇": "ce",
"效": "xiao",
"敉": "mi",
"敊": "chu",
"敋": "ge",
"敌": "di",
"敍": "xu",
"敎": "jiao",
"敏": "min",
"敐": "chen",
"救": "jiu",
"敒": "shen",
"敓": "duo",
"敔": "yu",
"敕": "chi",
"敖": "ao",
"敗": "bai",
"敘": "xu",
"教": "jiao",
"敚": "duo",
"敛": "lian",
"敜": "nie",
"敝": "bi",
"敞": "chang",
"敟": "dian",
"敠": "duo",
"敡": "yi",
"敢": "gan",
"散": "san",
"敤": "ke",
"敥": "yan",
"敦": "dun",
"敧": "qi",
"敨": "tou",
"敩": "xiao",
"敪": "duo",
"敫": "jiao",
"敬": "jing",
"敭": "yang",
"敮": "xia",
"敯": "min",
"数": "shu",
"敱": "ai",
"敲": "qiao",
"敳": "ai",
"整": "zheng",
"敵": "di",
"敶": "chen",
"敷": "fu",
"數": "shu",
"敹": "liao",
"敺": "qu",
"敻": "xiong",
"敼": "yi",
"敽": "jiao",
"敾": "shan",
"敿": "jiao",
"斀": "zhuo",
"斁": "yi",
"斂": "lian",
"斃": "bi",
"斄": "li",
"斅": "xiao",
"斆": "xiao",
"文": "wen",
"斈": "xue",
"斉": "qi",
"斊": "qi",
"斋": "zhai",
"斌": "bin",
"斍": "jue",
"斎": "zhai",
"斏": "lang",
"斐": "fei",
"斑": "ban",
"斒": "ban",
"斓": "lan",
"斔": "yu",
"斕": "lan",
"斖": "wei",
"斗": "dou",
"斘": "sheng",
"料": "liao",
"斚": "jia",
"斛": "hu",
"斜": "xie",
"斝": "jia",
"斞": "yu",
"斟": "zhen",
"斠": "jiao",
"斡": "wo",
"斢": "tou",
"斣": "dou",
"斤": "jin",
"斥": "chi",
"斦": "yin",
"斧": "fu",
"斨": "qiang",
"斩": "zhan",
"斪": "qu",
"斫": "zhuo",
"斬": "zhan",
"断": "duan",
"斮": "zhuo",
"斯": "si",
"新": "xin",
"斱": "zhuo",
"斲": "zhuo",
"斳": "qin",
"斴": "lin",
"斵": "zhuo",
"斶": "chu",
"斷": "duan",
"斸": "zhu",
"方": "fang",
"斺": "chan",
"斻": "hang",
"於": "yu",
"施": "shi",
"斾": "pei",
"斿": "liu",
"旀": "mei",
"旁": "pang",
"旂": "qi",
"旃": "zhan",
"旄": "mao",
"旅": "lv",
"旆": "pei",
"旇": "pi",
"旈": "liu",
"旉": "fu",
"旊": "fang",
"旋": "xuan",
"旌": "jing",
"旍": "jing",
"旎": "ni",
"族": "zu",
"旐": "zhao",
"旑": "yi",
"旒": "liu",
"旓": "shao",
"旔": "jian",
"旖": "yi",
"旗": "qi",
"旘": "zhi",
"旙": "fan",
"旚": "piao",
"旛": "fan",
"旜": "zhan",
"旝": "kuai",
"旞": "sui",
"旟": "yu",
"无": "wu",
"旡": "ji",
"既": "ji",
"旣": "ji",
"旤": "huo",
"日": "ri",
"旦": "dan",
"旧": "jiu",
"旨": "zhi",
"早": "zao",
"旪": "xie",
"旫": "tiao",
"旬": "xun",
"旭": "xu",
"旮": "ga",
"旯": "la",
"旰": "gan",
"旱": "han",
"旲": "tai",
"旳": "di",
"旴": "xu",
"旵": "chan",
"时": "shi",
"旷": "kuang",
"旸": "yang",
"旹": "shi",
"旺": "wang",
"旻": "min",
"旼": "min",
"旽": "tun",
"旾": "chun",
"旿": "wu",
"昀": "yun",
"昁": "bei",
"昂": "ang",
"昃": "ze",
"昄": "ban",
"昅": "jie",
"昆": "kun",
"昇": "sheng",
"昈": "hu",
"昉": "fang",
"昊": "hao",
"昋": "gui",
"昌": "chang",
"昍": "xuan",
"明": "ming",
"昏": "hun",
"昐": "fen",
"昑": "qin",
"昒": "hu",
"易": "yi",
"昔": "xi",
"昕": "xin",
"昖": "yan",
"昗": "ze",
"昘": "fang",
"昙": "tan",
"昚": "shen",
"昛": "ju",
"昜": "yang",
"昝": "zan",
"昞": "bing",
"星": "xing",
"映": "ying",
"昡": "xuan",
"昢": "po",
"昣": "zhen",
"昤": "ling",
"春": "chun",
"昦": "hao",
"昧": "mei",
"昨": "zuo",
"昩": "mo",
"昪": "bian",
"昫": "xu",
"昬": "hun",
"昭": "zhao",
"昮": "zong",
"是": "shi",
"昰": "shi",
"昱": "yu",
"昲": "fei",
"昳": "die",
"昴": "mao",
"昵": "ni",
"昶": "chang",
"昷": "wen",
"昸": "dong",
"昹": "ai",
"昺": "bing",
"昻": "ang",
"昼": "zhou",
"昽": "long",
"显": "xian",
"昿": "kuang",
"晀": "tiao",
"晁": "chao",
"時": "shi",
"晃": "huang",
"晄": "huang",
"晅": "xuan",
"晆": "kui",
"晇": "xu",
"晈": "jiao",
"晉": "jin",
"晊": "zhi",
"晋": "jin",
"晌": "shang",
"晍": "tong",
"晎": "hong",
"晏": "yan",
"晐": "gai",
"晑": "xiang",
"晒": "shai",
"晓": "xiao",
"晔": "ye",
"晕": "yun",
"晖": "hui",
"晗": "han",
"晘": "han",
"晙": "jun",
"晚": "wan",
"晛": "xian",
"晜": "kun",
"晝": "zhou",
"晞": "xi",
"晟": "sheng",
"晠": "sheng",
"晡": "bu",
"晢": "zhe",
"晣": "zhe",
"晤": "wu",
"晥": "wan",
"晦": "hui",
"晧": "hao",
"晨": "chen",
"晩": "wan",
"晪": "tian",
"晫": "zhuo",
"晬": "zui",
"晭": "zhou",
"普": "pu",
"景": "jing",
"晰": "xi",
"晱": "shan",
"晲": "ni",
"晳": "xi",
"晴": "qing",
"晵": "qi",
"晶": "jing",
"晷": "gui",
"晸": "zheng",
"晹": "yi",
"智": "zhi",
"晻": "an",
"晼": "wan",
"晽": "lin",
"晾": "liang",
"晿": "cheng",
"暀": "wang",
"暁": "xiao",
"暂": "zan",
"暃": "fei",
"暄": "xuan",
"暅": "xuan",
"暆": "yi",
"暇": "xia",
"暈": "yun",
"暉": "hui",
"暊": "xu",
"暋": "min",
"暌": "kui",
"暍": "ye",
"暎": "ying",
"暏": "shu",
"暐": "wei",
"暑": "shu",
"暒": "qing",
"暓": "mao",
"暔": "nan",
"暕": "jian",
"暖": "nuan",
"暗": "an",
"暘": "yang",
"暙": "chun",
"暚": "yao",
"暛": "suo",
"暜": "pu",
"暝": "ming",
"暞": "jiao",
"暟": "kai",
"暠": "hao",
"暡": "weng",
"暢": "chang",
"暣": "qi",
"暤": "hao",
"暥": "yan",
"暦": "li",
"暧": "ai",
"暨": "ji",
"暩": "ji",
"暪": "men",
"暫": "zan",
"暬": "xie",
"暭": "hao",
"暮": "mu",
"暯": "mu",
"暰": "cong",
"暱": "ni",
"暲": "zhang",
"暳": "hui",
"暴": "bao",
"暵": "han",
"暶": "xuan",
"暷": "chuan",
"暸": "liao",
"暹": "xian",
"暺": "tan",
"暻": "jing",
"暼": "pie",
"暽": "lin",
"暾": "tun",
"暿": "xi",
"曀": "yi",
"曁": "ji",
"曂": "huang",
"曃": "dai",
"曄": "ye",
"曅": "ye",
"曆": "li",
"曇": "tan",
"曈": "tong",
"曉": "xiao",
"曊": "fei",
"曋": "shen",
"曌": "zhao",
"曍": "hao",
"曎": "yi",
"曏": "xiang",
"曐": "xing",
"曑": "shen",
"曒": "jiao",
"曓": "bao",
"曔": "jing",
"曕": "yan",
"曖": "ai",
"曗": "ye",
"曘": "ru",
"曙": "shu",
"曚": "meng",
"曛": "xun",
"曜": "yao",
"曝": "pu",
"曞": "li",
"曟": "chen",
"曠": "kuang",
"曡": "die",
"曢": "liao",
"曣": "yan",
"曤": "huo",
"曥": "lu",
"曦": "xi",
"曧": "rong",
"曨": "long",
"曩": "nang",
"曪": "luo",
"曫": "luan",
"曬": "shai",
"曭": "tang",
"曮": "yan",
"曯": "zhu",
"曰": "yue",
"曱": "yue",
"曲": "qu",
"曳": "ye",
"更": "geng",
"曵": "ye",
"曶": "hu",
"曷": "he",
"書": "shu",
"曹": "cao",
"曺": "cao",
"曻": "sheng",
"曼": "man",
"曽": "zeng",
"曾": "zeng",
"替": "ti",
"最": "zui",
"朁": "can",
"朂": "xu",
"會": "hui",
"朄": "yin",
"朅": "qie",
"朆": "fen",
"朇": "bi",
"月": "yue",
"有": "you",
"朊": "ruan",
"朋": "peng",
"朌": "fen",
"服": "fu",
"朎": "ling",
"朏": "fei",
"朐": "qu",
"朑": "ti",
"朒": "nv",
"朓": "tiao",
"朔": "shuo",
"朕": "zhen",
"朖": "lang",
"朗": "lang",
"朘": "juan",
"朙": "ming",
"朚": "huang",
"望": "wang",
"朜": "tun",
"朝": "zhao",
"朞": "ji",
"期": "qi",
"朠": "ying",
"朡": "zong",
"朢": "wang",
"朣": "tong",
"朤": "lang",
"朥": "lao",
"朦": "meng",
"朧": "long",
"木": "mu",
"未": "wei",
"末": "mo",
"本": "ben",
"札": "zha",
"朮": "shu",
"术": "shu",
"朱": "zhu",
"朲": "ren",
"朳": "ba",
"朴": "pu",
"朵": "duo",
"朶": "duo",
"朷": "dao",
"朸": "li",
"朹": "qiu",
"机": "ji",
"朻": "jiu",
"朼": "bi",
"朽": "xiu",
"朾": "cheng",
"朿": "ci",
"杀": "sha",
"杁": "ru",
"杂": "za",
"权": "quan",
"杄": "qian",
"杅": "yu",
"杆": "gan",
"杇": "wu",
"杈": "cha",
"杉": "shan",
"杊": "xun",
"杋": "fan",
"杌": "wu",
"杍": "zi",
"李": "li",
"杏": "xing",
"材": "cai",
"村": "cun",
"杒": "ren",
"杓": "shao",
"杔": "tuo",
"杕": "di",
"杖": "zhang",
"杗": "mang",
"杘": "chi",
"杙": "yi",
"杚": "gu",
"杛": "gong",
"杜": "du",
"杝": "yi",
"杞": "qi",
"束": "shu",
"杠": "gang",
"条": "tiao",
"杢": "jie",
"杣": "mian",
"杤": "wan",
"来": "lai",
"杦": "jiu",
"杧": "mang",
"杨": "yang",
"杩": "ma",
"杪": "miao",
"杫": "si",
"杬": "yuan",
"杭": "hang",
"杮": "fei",
"杯": "bei",
"杰": "jie",
"東": "dong",
"杲": "gao",
"杳": "yao",
"杴": "xian",
"杵": "chu",
"杶": "chun",
"杷": "pa",
"杸": "shu",
"杹": "hua",
"杺": "xin",
"杻": "niu",
"杼": "zhu",
"杽": "chou",
"松": "song",
"板": "ban",
"枀": "song",
"极": "ji",
"枂": "wo",
"枃": "jin",
"构": "gou",
"枅": "ji",
"枆": "mao",
"枇": "pi",
"枈": "pi",
"枉": "wang",
"枊": "ang",
"枋": "fang",
"枌": "fen",
"枍": "yi",
"枎": "fu",
"枏": "nan",
"析": "xi",
"枑": "hu",
"枒": "ya",
"枓": "dou",
"枔": "xin",
"枕": "zhen",
"枖": "yao",
"林": "lin",
"枘": "rui",
"枙": "e",
"枚": "mei",
"枛": "zhao",
"果": "guo",
"枝": "zhi",
"枞": "cong",
"枟": "yun",
"枠": "hua",
"枡": "sheng",
"枢": "shu",
"枣": "zao",
"枤": "di",
"枥": "li",
"枦": "lu",
"枧": "jian",
"枨": "cheng",
"枩": "song",
"枪": "qiang",
"枫": "feng",
"枬": "zhan",
"枭": "xiao",
"枮": "xian",
"枯": "ku",
"枰": "ping",
"枱": "si",
"枲": "xi",
"枳": "zhi",
"枴": "guai",
"枵": "xiao",
"架": "jia",
"枷": "jia",
"枸": "ju",
"枹": "bao",
"枺": "mo",
"枻": "yi",
"枼": "ye",
"枽": "ye",
"枾": "shi",
"枿": "nie",
"柀": "bi",
"柁": "tuo",
"柂": "yi",
"柃": "ling",
"柄": "bing",
"柅": "ni",
"柆": "la",
"柇": "he",
"柈": "pan",
"柉": "fan",
"柊": "zhong",
"柋": "dai",
"柌": "ci",
"柍": "yang",
"柎": "fu",
"柏": "bai",
"某": "mou",
"柑": "gan",
"柒": "qi",
"染": "ran",
"柔": "rou",
"柕": "mao",
"柖": "shao",
"柗": "song",
"柘": "zhe",
"柙": "xia",
"柚": "you",
"柛": "shen",
"柜": "gui",
"柝": "tuo",
"柞": "zuo",
"柟": "nan",
"柠": "ning",
"柡": "yong",
"柢": "di",
"柣": "zhi",
"柤": "zha",
"查": "cha",
"柦": "dan",
"柧": "gu",
"柨": "bu",
"柩": "jiu",
"柪": "ao",
"柫": "fu",
"柬": "jian",
"柭": "ba",
"柮": "duo",
"柯": "ke",
"柰": "nai",
"柱": "zhu",
"柲": "bi",
"柳": "liu",
"柴": "chai",
"柵": "shan",
"柶": "si",
"柷": "zhu",
"柸": "bei",
"柹": "shi",
"柺": "guai",
"査": "cha",
"柼": "yao",
"柽": "cheng",
"柾": "jiu",
"柿": "shi",
"栀": "zhi",
"栁": "liu",
"栂": "mei",
"栃": "li",
"栄": "rong",
"栅": "zha",
"栆": "zao",
"标": "biao",
"栈": "zhan",
"栉": "zhi",
"栊": "long",
"栋": "dong",
"栌": "lu",
"栎": "li",
"栏": "lan",
"栐": "yong",
"树": "shu",
"栒": "xun",
"栓": "shuan",
"栔": "qi",
"栕": "chen",
"栖": "qi",
"栗": "li",
"栘": "yi",
"栙": "xiang",
"栚": "zhen",
"栛": "li",
"栜": "se",
"栝": "gua",
"栞": "kan",
"栟": "ben",
"栠": "ren",
"校": "xiao",
"栢": "bai",
"栣": "ren",
"栤": "bing",
"栥": "zi",
"栦": "chou",
"栧": "yi",
"栨": "ci",
"栩": "xu",
"株": "zhu",
"栫": "jian",
"栬": "zui",
"栭": "er",
"栮": "er",
"栯": "you",
"栰": "fa",
"栱": "gong",
"栲": "kao",
"栳": "lao",
"栴": "zhan",
"栵": "lie",
"栶": "yin",
"样": "yang",
"核": "he",
"根": "gen",
"栺": "zhi",
"栻": "shi",
"格": "ge",
"栽": "zai",
"栾": "luan",
"栿": "fu",
"桀": "jie",
"桁": "heng",
"桂": "gui",
"桃": "tao",
"桄": "guang",
"桅": "wei",
"框": "kuang",
"桇": "ru",
"案": "an",
"桉": "an",
"桊": "juan",
"桋": "yi",
"桌": "zhuo",
"桍": "ku",
"桎": "zhi",
"桏": "qiong",
"桐": "tong",
"桑": "sang",
"桒": "sang",
"桓": "huan",
"桔": "jie",
"桕": "jiu",
"桖": "xue",
"桗": "duo",
"桘": "chui",
"桙": "yu",
"桚": "za",
"桜": "ying",
"桝": "jie",
"桞": "liu",
"桟": "zhan",
"桠": "ya",
"桡": "rao",
"桢": "zhen",
"档": "dang",
"桤": "qi",
"桥": "qiao",
"桦": "hua",
"桧": "gui",
"桨": "jiang",
"桩": "zhuang",
"桪": "xun",
"桫": "suo",
"桬": "sha",
"桭": "chen",
"桮": "bei",
"桯": "ting",
"桰": "gua",
"桱": "jing",
"桲": "bo",
"桳": "ben",
"桴": "fu",
"桵": "rui",
"桶": "tong",
"桷": "jue",
"桸": "xi",
"桹": "lang",
"桺": "liu",
"桻": "feng",
"桼": "qi",
"桽": "wen",
"桾": "jun",
"桿": "gan",
"梀": "su",
"梁": "liang",
"梂": "qiu",
"梃": "ting",
"梄": "you",
"梅": "mei",
"梆": "bang",
"梇": "long",
"梈": "peng",
"梉": "zhuang",
"梊": "di",
"梋": "xuan",
"梌": "tu",
"梍": "zao",
"梎": "ao",
"梏": "gu",
"梐": "bi",
"梑": "di",
"梒": "han",
"梓": "zi",
"梔": "zhi",
"梕": "ren",
"梖": "bei",
"梗": "geng",
"梘": "jian",
"梙": "huan",
"梚": "wan",
"梛": "nuo",
"梜": "jia",
"條": "tiao",
"梞": "ji",
"梟": "xiao",
"梠": "lv",
"梡": "kuan",
"梢": "shao",
"梣": "chen",
"梤": "fen",
"梥": "song",
"梦": "meng",
"梧": "wu",
"梨": "li",
"梩": "si",
"梪": "dou",
"梫": "qin",
"梬": "ying",
"梭": "suo",
"梮": "ju",
"梯": "ti",
"械": "xie",
"梱": "kun",
"梲": "zhuo",
"梳": "shu",
"梴": "chan",
"梵": "fan",
"梶": "wei",
"梷": "jing",
"梸": "li",
"梹": "bin",
"梺": "xia",
"梻": "fo",
"梼": "chou",
"梽": "zhi",
"梾": "lai",
"梿": "lian",
"检": "jian",
"棁": "zhuo",
"棂": "ling",
"棃": "li",
"棄": "qi",
"棅": "bing",
"棆": "lun",
"棇": "cong",
"棈": "qian",
"棉": "mian",
"棊": "qi",
"棋": "qi",
"棌": "cai",
"棍": "gun",
"棎": "chan",
"棏": "de",
"棐": "fei",
"棑": "pai",
"棒": "bang",
"棓": "bang",
"棔": "hun",
"棕": "zong",
"棖": "cheng",
"棗": "zao",
"棘": "ji",
"棙": "li",
"棚": "peng",
"棛": "yu",
"棜": "yu",
"棝": "gu",
"棞": "jun",
"棟": "dong",
"棠": "tang",
"棡": "gang",
"棢": "wang",
"棣": "di",
"棤": "que",
"棥": "fan",
"棦": "cheng",
"棧": "zhan",
"棨": "qi",
"棩": "yuan",
"棪": "yan",
"棫": "yu",
"棬": "quan",
"棭": "yi",
"森": "sen",
"棯": "ren",
"棰": "chui",
"棱": "leng",
"棲": "qi",
"棳": "zhuo",
"棴": "fu",
"棵": "ke",
"棶": "lai",
"棷": "zou",
"棸": "zou",
"棹": "zhao",
"棺": "guan",
"棻": "fen",
"棼": "fen",
"棽": "chen",
"棾": "qing",
"棿": "ni",
"椀": "wan",
"椁": "guo",
"椂": "lu",
"椃": "hao",
"椄": "jie",
"椅": "yi",
"椆": "chou",
"椇": "ju",
"椈": "ju",
"椉": "cheng",
"椊": "zu",
"椋": "liang",
"椌": "qiang",
"植": "zhi",
"椎": "zhui",
"椏": "ya",
"椐": "ju",
"椑": "bei",
"椒": "jiao",
"椓": "zhuo",
"椔": "zi",
"椕": "bin",
"椖": "peng",
"椗": "ding",
"椘": "chu",
"椙": "chang",
"椚": "men",
"椛": "hua",
"検": "jian",
"椝": "gui",
"椞": "xi",
"椟": "du",
"椠": "qian",
"椡": "dao",
"椢": "gui",
"椣": "dian",
"椤": "luo",
"椥": "zhi",
"椦": "quan",
"椨": "fu",
"椩": "geng",
"椪": "peng",
"椫": "shan",
"椬": "yi",
"椭": "tuo",
"椮": "sen",
"椯": "duo",
"椰": "ye",
"椱": "fu",
"椲": "wei",
"椳": "wei",
"椴": "duan",
"椵": "jia",
"椶": "zong",
"椷": "jian",
"椸": "yi",
"椹": "zhen",
"椺": "xi",
"椻": "yan",
"椼": "yan",
"椽": "chuan",
"椾": "jian",
"椿": "chun",
"楀": "yu",
"楁": "he",
"楂": "zha",
"楃": "wo",
"楄": "pian",
"楅": "bi",
"楆": "yao",
"楇": "guo",
"楈": "xu",
"楉": "ruo",
"楊": "yang",
"楋": "la",
"楌": "yan",
"楍": "ben",
"楎": "hui",
"楏": "kui",
"楐": "jie",
"楑": "kui",
"楒": "si",
"楓": "feng",
"楔": "xie",
"楕": "tuo",
"楖": "ji",
"楗": "jian",
"楘": "mu",
"楙": "mao",
"楚": "chu",
"楛": "ku",
"楜": "hu",
"楝": "lian",
"楞": "leng",
"楟": "ting",
"楠": "nan",
"楡": "yu",
"楢": "you",
"楣": "mei",
"楤": "song",
"楥": "xuan",
"楦": "xuan",
"楧": "yang",
"楨": "zhen",
"楩": "pian",
"楪": "die",
"楫": "ji",
"楬": "jie",
"業": "ye",
"楮": "chu",
"楯": "shun",
"楰": "yu",
"楱": "cou",
"楲": "wei",
"楳": "mei",
"楴": "di",
"極": "ji",
"楶": "jie",
"楷": "kai",
"楸": "qiu",
"楹": "ying",
"楺": "rou",
"楻": "huang",
"楼": "lou",
"楽": "le",
"楾": "quan",
"楿": "xiang",
"榀": "pin",
"榁": "shi",
"概": "gai",
"榃": "tan",
"榄": "lan",
"榅": "wen",
"榆": "yu",
"榇": "chen",
"榈": "lv",
"榉": "ju",
"榊": "shen",
"榋": "chu",
"榌": "bi",
"榍": "xie",
"榎": "jia",
"榏": "yi",
"榐": "zhan",
"榑": "fu",
"榒": "nuo",
"榓": "mi",
"榔": "lang",
"榕": "rong",
"榖": "gu",
"榗": "jian",
"榘": "ju",
"榙": "ta",
"榚": "yao",
"榛": "zhen",
"榜": "bang",
"榝": "sha",
"榞": "yuan",
"榟": "zi",
"榠": "ming",
"榡": "su",
"榢": "jia",
"榣": "yao",
"榤": "jie",
"榥": "huang",
"榦": "gan",
"榧": "fei",
"榨": "zha",
"榩": "qian",
"榪": "ma",
"榫": "sun",
"榬": "yuan",
"榭": "xie",
"榮": "rong",
"榯": "shi",
"榰": "zhi",
"榱": "cui",
"榲": "wen",
"榳": "ting",
"榴": "liu",
"榵": "rong",
"榶": "tang",
"榷": "que",
"榸": "zhai",
"榹": "si",
"榺": "sheng",
"榻": "ta",
"榼": "ke",
"榽": "xi",
"榾": "gu",
"榿": "qi",
"槀": "gao",
"槁": "gao",
"槂": "sun",
"槃": "pan",
"槄": "tao",
"槅": "ge",
"槆": "chun",
"槇": "dian",
"槈": "nou",
"槉": "ji",
"槊": "shuo",
"構": "gou",
"槌": "chui",
"槍": "qiang",
"槎": "cha",
"槏": "qian",
"槐": "huai",
"槑": "mei",
"槒": "xu",
"槓": "gang",
"槔": "gao",
"槕": "zhuo",
"槖": "tuo",
"槗": "qiao",
"様": "yang",
"槙": "dian",
"槚": "jia",
"槛": "jian",
"槜": "zui",
"槝": "dao",
"槞": "long",
"槟": "bin",
"槠": "zhu",
"槡": "sang",
"槢": "xi",
"槣": "ji",
"槤": "lian",
"槥": "hui",
"槦": "rong",
"槧": "qian",
"槨": "guo",
"槩": "gai",
"槪": "gai",
"槫": "tuan",
"槬": "hua",
"槭": "qi",
"槮": "sen",
"槯": "cui",
"槰": "peng",
"槱": "you",
"槲": "hu",
"槳": "jiang",
"槴": "hu",
"槵": "huan",
"槶": "gui",
"槷": "nie",
"槸": "yi",
"槹": "gao",
"槺": "kang",
"槻": "gui",
"槼": "gui",
"槽": "cao",
"槾": "man",
"槿": "jin",
"樀": "di",
"樁": "zhuang",
"樂": "le",
"樃": "lang",
"樄": "chen",
"樅": "cong",
"樆": "li",
"樇": "xiu",
"樈": "qing",
"樉": "shang",
"樊": "fan",
"樋": "tong",
"樌": "guan",
"樍": "ze",
"樎": "su",
"樏": "lei",
"樐": "lu",
"樑": "liang",
"樒": "mi",
"樓": "lou",
"樔": "chao",
"樕": "su",
"樖": "ke",
"樗": "chu",
"樘": "tang",
"標": "biao",
"樚": "lu",
"樛": "jiu",
"樜": "zhe",
"樝": "zha",
"樞": "shu",
"樟": "zhang",
"樠": "man",
"模": "mo",
"樢": "niao",
"樣": "yang",
"樤": "tiao",
"樥": "peng",
"樦": "zhu",
"樧": "sha",
"樨": "xi",
"権": "quan",
"横": "heng",
"樫": "jian",
"樬": "cong",
"樭": "ji",
"樮": "yan",
"樯": "qiang",
"樰": "xue",
"樱": "ying",
"樲": "er",
"樳": "xun",
"樴": "zhi",
"樵": "qiao",
"樶": "zui",
"樷": "cong",
"樸": "pu",
"樹": "shu",
"樺": "hua",
"樻": "gui",
"樼": "zhen",
"樽": "zun",
"樾": "yue",
"樿": "shan",
"橀": "xi",
"橁": "chun",
"橂": "dian",
"橃": "fa",
"橄": "gan",
"橅": "mo",
"橆": "wu",
"橇": "qiao",
"橈": "rao",
"橉": "lin",
"橊": "liu",
"橋": "qiao",
"橌": "xian",
"橍": "run",
"橎": "fan",
"橏": "zhan",
"橐": "tuo",
"橑": "liao",
"橒": "yun",
"橓": "shun",
"橔": "tui",
"橕": "cheng",
"橖": "tang",
"橗": "meng",
"橘": "ju",
"橙": "cheng",
"橚": "su",
"橛": "jue",
"橜": "jue",
"橝": "tan",
"橞": "hui",
"機": "ji",
"橠": "nuo",
"橡": "xiang",
"橢": "tuo",
"橣": "ning",
"橤": "rui",
"橥": "zhu",
"橦": "tong",
"橧": "zeng",
"橨": "fen",
"橩": "qiong",
"橪": "ran",
"橫": "heng",
"橬": "qian",
"橭": "gu",
"橮": "liu",
"橯": "lao",
"橰": "gao",
"橱": "chu",
"橲": "xi",
"橳": "sheng",
"橴": "zi",
"橵": "zan",
"橶": "ji",
"橷": "dou",
"橸": "jing",
"橹": "lu",
"橺": "xian",
"橻": "cu",
"橼": "yuan",
"橽": "ta",
"橾": "shu",
"橿": "jiang",
"檀": "tan",
"檁": "lin",
"檂": "nong",
"檃": "yin",
"檄": "xi",
"檅": "hui",
"檆": "shan",
"檇": "zui",
"檈": "xuan",
"檉": "cheng",
"檊": "gan",
"檋": "ju",
"檌": "zui",
"檍": "yi",
"檎": "qin",
"檏": "pu",
"檐": "yan",
"檑": "lei",
"檒": "feng",
"檓": "hui",
"檔": "dang",
"檕": "ji",
"檖": "sui",
"檗": "bo",
"檘": "ping",
"檙": "cheng",
"檚": "chu",
"檛": "zhua",
"檜": "gui",
"檝": "ji",
"檞": "jie",
"檟": "jia",
"檠": "qing",
"檡": "zhai",
"檢": "jian",
"檣": "qiang",
"檤": "dao",
"檥": "yi",
"檦": "biao",
"檧": "song",
"檨": "she",
"檩": "lin",
"檪": "li",
"檫": "cha",
"檬": "meng",
"檭": "yin",
"檮": "chou",
"檯": "tai",
"檰": "mian",
"檱": "qi",
"檲": "tuan",
"檳": "bin",
"檴": "huo",
"檵": "ji",
"檶": "qian",
"檷": "ni",
"檸": "ning",
"檹": "yi",
"檺": "gao",
"檻": "jian",
"檼": "yin",
"檽": "nou",
"檾": "qing",
"檿": "yan",
"櫀": "qi",
"櫁": "mi",
"櫂": "zhao",
"櫃": "gui",
"櫄": "chun",
"櫅": "ji",
"櫆": "kui",
"櫇": "po",
"櫈": "deng",
"櫉": "chu",
"櫊": "ge",
"櫋": "mian",
"櫌": "you",
"櫍": "zhi",
"櫎": "huang",
"櫏": "qian",
"櫐": "lei",
"櫑": "lei",
"櫒": "sa",
"櫓": "lu",
"櫔": "li",
"櫕": "cuan",
"櫖": "lv",
"櫗": "mie",
"櫘": "hui",
"櫙": "ou",
"櫚": "lv",
"櫛": "zhi",
"櫜": "gao",
"櫝": "du",
"櫞": "yuan",
"櫟": "li",
"櫠": "fei",
"櫡": "zhuo",
"櫢": "sou",
"櫣": "lian",
"櫤": "jiang",
"櫥": "chu",
"櫦": "qing",
"櫧": "zhu",
"櫨": "lu",
"櫩": "yan",
"櫪": "li",
"櫫": "zhu",
"櫬": "chen",
"櫭": "jue",
"櫮": "e",
"櫯": "su",
"櫰": "huai",
"櫱": "nie",
"櫲": "yu",
"櫳": "long",
"櫴": "la",
"櫵": "qiao",
"櫶": "xian",
"櫷": "gui",
"櫸": "ju",
"櫹": "xiao",
"櫺": "ling",
"櫻": "ying",
"櫼": "jian",
"櫽": "yin",
"櫾": "you",
"櫿": "ying",
"欀": "xiang",
"欁": "nong",
"欂": "bo",
"欃": "chan",
"欄": "lan",
"欅": "ju",
"欆": "shuang",
"欇": "she",
"欈": "wei",
"欉": "cong",
"權": "quan",
"欋": "qu",
"欌": "cang",
"欍": "jiu",
"欎": "yu",
"欏": "luo",
"欐": "li",
"欑": "cuan",
"欒": "luan",
"欓": "dang",
"欔": "qu",
"欕": "yan",
"欖": "lan",
"欗": "lan",
"欘": "zhu",
"欙": "lei",
"欚": "li",
"欛": "ba",
"欜": "nang",
"欝": "yu",
"欞": "ling",
"欟": "guan",
"欠": "qian",
"次": "ci",
"欢": "huan",
"欣": "xin",
"欤": "yu",
"欥": "yu",
"欦": "qian",
"欧": "ou",
"欨": "xu",
"欩": "chao",
"欪": "chu",
"欫": "qi",
"欬": "kai",
"欭": "yi",
"欮": "jue",
"欯": "xi",
"欰": "xu",
"欱": "he",
"欲": "yu",
"欳": "kuai",
"欴": "lang",
"欵": "kuan",
"欶": "shuo",
"欷": "xi",
"欸": "ei",
"欹": "qi",
"欺": "qi",
"欻": "xu",
"欼": "chi",
"欽": "qin",
"款": "kuan",
"欿": "kan",
"歀": "kuan",
"歁": "kan",
"歂": "chuan",
"歃": "sha",
"歄": "gua",
"歅": "yan",
"歆": "xin",
"歇": "xie",
"歈": "yu",
"歉": "qian",
"歊": "xiao",
"歋": "ye",
"歌": "ge",
"歍": "wu",
"歎": "tan",
"歏": "jin",
"歐": "ou",
"歑": "hu",
"歒": "ti",
"歓": "huan",
"歔": "xu",
"歕": "pen",
"歖": "xi",
"歗": "xiao",
"歘": "xu",
"歙": "xi",
"歚": "shan",
"歛": "lian",
"歜": "chu",
"歝": "yi",
"歞": "e",
"歟": "yu",
"歠": "chuo",
"歡": "huan",
"止": "zhi",
"正": "zheng",
"此": "ci",
"步": "bu",
"武": "wu",
"歧": "qi",
"歨": "bu",
"歩": "bu",
"歪": "wai",
"歫": "ju",
"歬": "qian",
"歭": "zhi",
"歮": "se",
"歯": "chi",
"歰": "se",
"歱": "zhong",
"歲": "sui",
"歳": "sui",
"歴": "li",
"歵": "ze",
"歶": "yu",
"歷": "li",
"歸": "gui",
"歹": "dai",
"歺": "e",
"死": "si",
"歼": "jian",
"歽": "zhe",
"歾": "mo",
"歿": "mo",
"殀": "yao",
"殁": "mo",
"殂": "cu",
"殃": "yang",
"殄": "tian",
"殅": "sheng",
"殆": "dai",
"殇": "shang",
"殈": "xu",
"殉": "xun",
"殊": "shu",
"残": "can",
"殌": "jing",
"殍": "piao",
"殎": "qia",
"殏": "qiu",
"殐": "su",
"殑": "qing",
"殒": "yun",
"殓": "lian",
"殔": "yi",
"殕": "fou",
"殖": "zhi",
"殗": "ye",
"殘": "can",
"殙": "hun",
"殚": "dan",
"殛": "ji",
"殜": "die",
"殝": "zhen",
"殞": "yun",
"殟": "wen",
"殠": "chou",
"殡": "bin",
"殢": "ti",
"殣": "jin",
"殤": "shang",
"殥": "yin",
"殦": "chi",
"殧": "jiu",
"殨": "kui",
"殩": "cuan",
"殪": "yi",
"殫": "dan",
"殬": "du",
"殭": "jiang",
"殮": "lian",
"殯": "bin",
"殰": "du",
"殱": "jian",
"殲": "jian",
"殳": "shu",
"殴": "ou",
"段": "duan",
"殶": "zhu",
"殷": "yin",
"殸": "qing",
"殹": "yi",
"殺": "sha",
"殻": "ke",
"殼": "ke",
"殽": "xiao",
"殾": "xun",
"殿": "dian",
"毀": "hui",
"毁": "hui",
"毂": "gu",
"毃": "qiao",
"毄": "ji",
"毅": "yi",
"毆": "ou",
"毇": "hui",
"毈": "duan",
"毉": "yi",
"毊": "xiao",
"毋": "wu",
"毌": "guan",
"母": "mu",
"毎": "mei",
"每": "mei",
"毐": "ai",
"毑": "jie",
"毒": "du",
"毓": "yu",
"比": "bi",
"毕": "bi",
"毖": "bi",
"毗": "pi",
"毘": "pi",
"毙": "bi",
"毚": "chan",
"毛": "mao",
"毜": "hao",
"毝": "cai",
"毞": "bi",
"毟": "lie",
"毠": "jia",
"毡": "zhan",
"毢": "sai",
"毣": "mu",
"毤": "tuo",
"毥": "xun",
"毦": "er",
"毧": "rong",
"毨": "xian",
"毩": "ju",
"毪": "mu",
"毫": "hao",
"毬": "qiu",
"毭": "dou",
"毮": "sha",
"毯": "tan",
"毰": "pei",
"毱": "ju",
"毲": "duo",
"毳": "cui",
"毴": "bi",
"毵": "san",
"毶": "san",
"毷": "mao",
"毸": "sai",
"毹": "shu",
"毺": "shu",
"毻": "tuo",
"毼": "he",
"毽": "jian",
"毾": "ta",
"毿": "san",
"氀": "lv",
"氁": "mu",
"氂": "mao",
"氃": "tong",
"氄": "rong",
"氅": "chang",
"氆": "pu",
"氇": "lu",
"氈": "zhan",
"氉": "sao",
"氊": "zhan",
"氋": "meng",
"氌": "lu",
"氍": "qu",
"氎": "die",
"氏": "shi",
"氐": "di",
"民": "min",
"氒": "jue",
"氓": "meng",
"气": "qi",
"氕": "pie",
"氖": "nai",
"気": "qi",
"氘": "dao",
"氙": "xian",
"氚": "chuan",
"氛": "fen",
"氜": "yang",
"氝": "nei",
"氞": "nei",
"氟": "fu",
"氠": "shen",
"氡": "dong",
"氢": "qing",
"氣": "qi",
"氤": "yin",
"氥": "xi",
"氦": "hai",
"氧": "yang",
"氨": "an",
"氩": "ya",
"氪": "ke",
"氫": "qing",
"氬": "ya",
"氭": "dong",
"氮": "dan",
"氯": "lv",
"氰": "qing",
"氱": "yang",
"氲": "yun",
"氳": "yun",
"水": "shui",
"氵": "shui",
"氶": "zheng",
"氷": "bing",
"永": "yong",
"氹": "dang",
"氺": "shui",
"氻": "le",
"氼": "ni",
"氽": "tun",
"氾": "fan",
"氿": "gui",
"汀": "ting",
"汁": "zhi",
"求": "qiu",
"汃": "bin",
"汄": "ze",
"汅": "mian",
"汆": "cuan",
"汇": "hui",
"汈": "diao",
"汉": "han",
"汊": "cha",
"汋": "zhuo",
"汌": "chuan",
"汍": "wan",
"汎": "fan",
"汏": "tai",
"汐": "xi",
"汑": "tuo",
"汒": "mang",
"汓": "qiu",
"汔": "qi",
"汕": "shan",
"汖": "pin",
"汗": "han",
"汘": "qian",
"汙": "wu",
"汚": "wu",
"汛": "xun",
"汜": "si",
"汝": "ru",
"汞": "gong",
"江": "jiang",
"池": "chi",
"污": "wu",
"汢": "tu",
"汣": "jiu",
"汤": "tang",
"汥": "zhi",
"汦": "zhi",
"汧": "qian",
"汨": "mi",
"汩": "gu",
"汪": "wang",
"汫": "jing",
"汬": "jing",
"汭": "rui",
"汮": "jun",
"汯": "hong",
"汰": "tai",
"汱": "tai",
"汲": "ji",
"汳": "bian",
"汴": "bian",
"汵": "gan",
"汶": "wen",
"汷": "zhong",
"汸": "fang",
"汹": "xiong",
"決": "jue",
"汻": "hu",
"汼": "niu",
"汽": "qi",
"汾": "fen",
"汿": "xu",
"沀": "xu",
"沁": "qin",
"沂": "yi",
"沃": "wo",
"沄": "yun",
"沅": "yuan",
"沆": "hang",
"沇": "yan",
"沈": "shen",
"沉": "chen",
"沊": "dan",
"沋": "you",
"沌": "dun",
"沍": "hu",
"沎": "huo",
"沏": "qi",
"沐": "mu",
"沑": "nv",
"沒": "mei",
"沓": "ta",
"沔": "mian",
"沕": "mi",
"沖": "chong",
"沗": "hong",
"沘": "bi",
"沙": "sha",
"沚": "zhi",
"沛": "pei",
"沜": "pan",
"沝": "zhui",
"沞": "za",
"沟": "gou",
"沠": "pai",
"没": "mei",
"沢": "ze",
"沣": "feng",
"沤": "ou",
"沥": "li",
"沦": "lun",
"沧": "cang",
"沨": "feng",
"沩": "wei",
"沪": "hu",
"沫": "mo",
"沬": "mei",
"沭": "shu",
"沮": "ju",
"沯": "za",
"沰": "tuo",
"沱": "tuo",
"沲": "tuo",
"河": "he",
"沴": "li",
"沵": "mi",
"沶": "yi",
"沷": "fa",
"沸": "fei",
"油": "you",
"沺": "tian",
"治": "zhi",
"沼": "zhao",
"沽": "gu",
"沾": "zhan",
"沿": "yan",
"泀": "si",
"況": "kuang",
"泂": "jiong",
"泃": "ju",
"泄": "xie",
"泅": "qiu",
"泆": "yi",
"泇": "jia",
"泈": "zhong",
"泉": "quan",
"泊": "bo",
"泋": "hui",
"泌": "mi",
"泍": "ben",
"泎": "ze",
"泏": "chu",
"泐": "le",
"泑": "you",
"泒": "gu",
"泓": "hong",
"泔": "gan",
"法": "fa",
"泖": "mao",
"泗": "si",
"泘": "hu",
"泙": "peng",
"泚": "ci",
"泛": "fan",
"泜": "zhi",
"泝": "su",
"泞": "ning",
"泟": "cheng",
"泠": "ling",
"泡": "pao",
"波": "bo",
"泣": "qi",
"泤": "si",
"泥": "ni",
"泦": "ju",
"泧": "yue",
"注": "zhu",
"泩": "sheng",
"泪": "lei",
"泫": "xuan",
"泬": "jue",
"泭": "fu",
"泮": "pan",
"泯": "min",
"泰": "tai",
"泱": "yang",
"泲": "ji",
"泳": "yong",
"泴": "guan",
"泵": "beng",
"泶": "xue",
"泷": "long",
"泸": "lu",
"泹": "dan",
"泺": "luo",
"泻": "xie",
"泼": "po",
"泽": "ze",
"泾": "jing",
"泿": "yin",
"洀": "pan",
"洁": "jie",
"洂": "ye",
"洃": "hui",
"洄": "hui",
"洅": "zai",
"洆": "cheng",
"洇": "yin",
"洈": "wei",
"洉": "hou",
"洊": "jian",
"洋": "yang",
"洌": "lie",
"洍": "si",
"洎": "ji",
"洏": "er",
"洐": "xing",
"洑": "fu",
"洒": "sa",
"洓": "se",
"洔": "zhi",
"洕": "yin",
"洖": "wu",
"洗": "xi",
"洘": "kao",
"洙": "zhu",
"洚": "jiang",
"洛": "luo",
"洜": "luo",
"洝": "an",
"洞": "dong",
"洟": "yi",
"洠": "si",
"洡": "lei",
"洢": "yi",
"洣": "mi",
"洤": "quan",
"津": "jin",
"洦": "po",
"洧": "wei",
"洨": "xiao",
"洩": "xie",
"洪": "hong",
"洫": "xu",
"洬": "su",
"洭": "kuang",
"洮": "tao",
"洯": "qie",
"洰": "ju",
"洱": "er",
"洲": "zhou",
"洳": "ru",
"洴": "ping",
"洵": "xun",
"洶": "xiong",
"洷": "zhi",
"洸": "guang",
"洹": "huan",
"洺": "ming",
"活": "huo",
"洼": "wa",
"洽": "qia",
"派": "pai",
"洿": "wu",
"浀": "qu",
"流": "liu",
"浂": "yi",
"浃": "jia",
"浄": "jing",
"浅": "qian",
"浆": "jiang",
"浇": "jiao",
"浈": "zhen",
"浉": "shi",
"浊": "zhuo",
"测": "ce",
"浌": "fa",
"浍": "kuai",
"济": "ji",
"浏": "liu",
"浐": "chan",
"浑": "hun",
"浒": "hu",
"浓": "nong",
"浔": "xun",
"浕": "jin",
"浖": "lie",
"浗": "qiu",
"浘": "wei",
"浙": "zhe",
"浚": "jun",
"浛": "han",
"浜": "bang",
"浝": "mang",
"浞": "zhuo",
"浟": "you",
"浠": "xi",
"浡": "bo",
"浢": "dou",
"浣": "huan",
"浤": "hong",
"浥": "yi",
"浦": "pu",
"浧": "ying",
"浨": "lan",
"浩": "hao",
"浪": "lang",
"浫": "han",
"浬": "li",
"浭": "geng",
"浮": "fu",
"浯": "wu",
"浰": "li",
"浱": "chun",
"浲": "feng",
"浳": "yi",
"浴": "yu",
"浵": "tong",
"浶": "lao",
"海": "hai",
"浸": "jin",
"浹": "jia",
"浺": "chong",
"浻": "jiong",
"浼": "mei",
"浽": "sui",
"浾": "cheng",
"浿": "pei",
"涀": "xian",
"涁": "shen",
"涂": "tu",
"涃": "kun",
"涄": "ping",
"涅": "nie",
"涆": "han",
"涇": "jing",
"消": "xiao",
"涉": "she",
"涊": "nian",
"涋": "tu",
"涌": "yong",
"涍": "xiao",
"涎": "xian",
"涏": "ting",
"涐": "e",
"涑": "su",
"涒": "tun",
"涓": "juan",
"涔": "cen",
"涕": "ti",
"涖": "li",
"涗": "shui",
"涘": "si",
"涙": "lei",
"涚": "shui",
"涛": "tao",
"涜": "du",
"涝": "lao",
"涞": "lai",
"涟": "lian",
"涠": "wei",
"涡": "wo",
"涢": "yun",
"涣": "huan",
"涤": "di",
"涥": "heng",
"润": "run",
"涧": "jian",
"涨": "zhang",
"涩": "se",
"涪": "fu",
"涫": "guan",
"涬": "xing",
"涭": "shou",
"涮": "shuan",
"涯": "ya",
"涰": "chuo",
"涱": "zhang",
"液": "ye",
"涳": "kong",
"涴": "wan",
"涵": "han",
"涶": "tuo",
"涷": "dong",
"涸": "he",
"涹": "wo",
"涺": "ju",
"涻": "she",
"涼": "liang",
"涽": "hun",
"涾": "ta",
"涿": "zhuo",
"淀": "dian",
"淁": "qie",
"淂": "de",
"淃": "juan",
"淄": "zi",
"淅": "xi",
"淆": "xiao",
"淇": "qi",
"淈": "gu",
"淉": "guo",
"淊": "yan",
"淋": "lin",
"淌": "tang",
"淍": "zhou",
"淎": "peng",
"淏": "hao",
"淐": "chang",
"淑": "shu",
"淒": "qi",
"淓": "fang",
"淔": "zhi",
"淕": "lu",
"淖": "nao",
"淗": "ju",
"淘": "tao",
"淙": "cong",
"淚": "lei",
"淛": "zhe",
"淜": "ping",
"淝": "fei",
"淞": "song",
"淟": "tian",
"淠": "pi",
"淡": "dan",
"淢": "yu",
"淣": "ni",
"淤": "yu",
"淥": "lu",
"淦": "gan",
"淧": "mi",
"淨": "jing",
"淩": "ling",
"淪": "lun",
"淫": "yin",
"淬": "cui",
"淭": "qu",
"淮": "huai",
"淯": "yu",
"淰": "nian",
"深": "shen",
"淲": "biao",
"淳": "chun",
"淴": "hu",
"淵": "yuan",
"淶": "lai",
"混": "hun",
"淸": "qing",
"淹": "yan",
"淺": "qian",
"添": "tian",
"淼": "miao",
"淽": "zhi",
"淾": "yin",
"淿": "bo",
"渀": "ben",
"渁": "yuan",
"渂": "wen",
"渃": "ruo",
"渄": "fei",
"清": "qing",
"渆": "yuan",
"渇": "ke",
"済": "ji",
"渉": "she",
"渊": "yuan",
"渋": "se",
"渌": "lu",
"渍": "zi",
"渎": "du",
"渏": "yi",
"渐": "jian",
"渑": "mian",
"渒": "pai",
"渓": "xi",
"渔": "yu",
"渕": "yuan",
"渖": "shen",
"渗": "shen",
"渘": "rou",
"渙": "huan",
"渚": "zhu",
"減": "jian",
"渜": "nuan",
"渝": "yu",
"渞": "qiu",
"渟": "ting",
"渠": "qu",
"渡": "du",
"渢": "feng",
"渣": "zha",
"渤": "bo",
"渥": "wo",
"渦": "wo",
"渧": "ti",
"渨": "wei",
"温": "wen",
"渪": "ru",
"渫": "xie",
"測": "ce",
"渭": "wei",
"渮": "he",
"港": "gang",
"渰": "yan",
"渱": "hong",
"渲": "xuan",
"渳": "mi",
"渴": "ke",
"渵": "mao",
"渶": "ying",
"渷": "yan",
"游": "you",
"渹": "hong",
"渺": "miao",
"渻": "sheng",
"渼": "mei",
"渽": "zai",
"渾": "hun",
"渿": "nai",
"湀": "gui",
"湁": "chi",
"湂": "e",
"湃": "pai",
"湄": "mei",
"湅": "lian",
"湆": "qi",
"湇": "qi",
"湈": "mei",
"湉": "tian",
"湊": "cou",
"湋": "wei",
"湌": "can",
"湍": "tuan",
"湎": "mian",
"湏": "hui",
"湐": "po",
"湑": "xu",
"湒": "ji",
"湓": "pen",
"湔": "jian",
"湕": "jian",
"湖": "hu",
"湗": "feng",
"湘": "xiang",
"湙": "yi",
"湚": "yin",
"湛": "zhan",
"湜": "shi",
"湝": "jie",
"湞": "zhen",
"湟": "huang",
"湠": "tan",
"湡": "yu",
"湢": "bi",
"湣": "min",
"湤": "shi",
"湥": "tu",
"湦": "sheng",
"湧": "yong",
"湨": "ju",
"湩": "dong",
"湪": "tuan",
"湫": "qiu",
"湬": "qiu",
"湭": "qiu",
"湮": "yan",
"湯": "tang",
"湰": "long",
"湱": "huo",
"湲": "yuan",
"湳": "nan",
"湴": "ban",
"湵": "you",
"湶": "quan",
"湷": "zhuang",
"湸": "liang",
"湹": "chan",
"湺": "xian",
"湻": "chun",
"湼": "nie",
"湽": "zi",
"湾": "wan",
"湿": "shi",
"満": "man",
"溁": "ying",
"溂": "la",
"溃": "kui",
"溄": "feng",
"溅": "jian",
"溆": "xu",
"溇": "lou",
"溈": "wei",
"溉": "gai",
"溊": "bo",
"溋": "ying",
"溌": "po",
"溍": "jin",
"溎": "yan",
"溏": "tang",
"源": "yuan",
"溑": "suo",
"溒": "yuan",
"溓": "lian",
"溔": "yao",
"溕": "meng",
"準": "zhun",
"溗": "cheng",
"溘": "ke",
"溙": "tai",
"溚": "da",
"溛": "wa",
"溜": "liu",
"溝": "gou",
"溞": "sao",
"溟": "ming",
"溠": "zha",
"溡": "shi",
"溢": "yi",
"溣": "lun",
"溤": "ma",
"溥": "pu",
"溦": "wei",
"溧": "li",
"溨": "zai",
"溩": "wu",
"溪": "xi",
"溫": "wen",
"溬": "qiang",
"溭": "ze",
"溮": "shi",
"溯": "su",
"溰": "ai",
"溱": "zhen",
"溲": "sou",
"溳": "yun",
"溴": "xiu",
"溵": "yin",
"溶": "rong",
"溷": "hun",
"溸": "su",
"溹": "suo",
"溺": "ni",
"溻": "ta",
"溼": "shi",
"溽": "ru",
"溾": "ai",
"溿": "pan",
"滀": "chu",
"滁": "chu",
"滂": "pang",
"滃": "weng",
"滄": "cang",
"滅": "mie",
"滆": "ge",
"滇": "dian",
"滈": "hao",
"滉": "huang",
"滊": "qi",
"滋": "zi",
"滌": "di",
"滍": "zhi",
"滎": "xing",
"滏": "fu",
"滐": "jie",
"滑": "hua",
"滒": "ge",
"滓": "zi",
"滔": "tao",
"滕": "teng",
"滖": "sui",
"滗": "bi",
"滘": "jiao",
"滙": "hui",
"滚": "gun",
"滛": "yin",
"滜": "ze",
"滝": "long",
"滞": "zhi",
"滟": "yan",
"滠": "she",
"满": "man",
"滢": "ying",
"滣": "chun",
"滤": "lv",
"滥": "lan",
"滦": "luan",
"滧": "yao",
"滨": "bin",
"滩": "tan",
"滪": "yu",
"滫": "xiu",
"滬": "hu",
"滭": "bi",
"滮": "biao",
"滯": "zhi",
"滰": "jiang",
"滱": "kou",
"滲": "shen",
"滳": "shang",
"滴": "di",
"滵": "mi",
"滶": "ao",
"滷": "lu",
"滸": "hu",
"滹": "hu",
"滺": "you",
"滻": "chan",
"滼": "fan",
"滽": "yong",
"滾": "gun",
"滿": "man",
"漀": "qing",
"漁": "yu",
"漂": "piao",
"漃": "ji",
"漄": "ya",
"漅": "chao",
"漆": "qi",
"漇": "xi",
"漈": "ji",
"漉": "lu",
"漊": "lou",
"漋": "long",
"漌": "jin",
"漍": "guo",
"漎": "cong",
"漏": "lou",
"漐": "zhi",
"漑": "gai",
"漒": "qiang",
"漓": "li",
"演": "yan",
"漕": "cao",
"漖": "jiao",
"漗": "cong",
"漘": "chun",
"漙": "tuan",
"漚": "ou",
"漛": "teng",
"漜": "ye",
"漝": "xi",
"漞": "mi",
"漟": "tang",
"漠": "mo",
"漡": "shang",
"漢": "han",
"漣": "lian",
"漤": "lan",
"漥": "wa",
"漦": "chi",
"漧": "gan",
"漨": "feng",
"漩": "xuan",
"漪": "yi",
"漫": "man",
"漬": "zi",
"漭": "mang",
"漮": "kang",
"漯": "luo",
"漰": "ben",
"漱": "shu",
"漲": "zhang",
"漳": "zhang",
"漴": "chong",
"漵": "xu",
"漶": "huan",
"漷": "huo",
"漸": "jian",
"漹": "yan",
"漺": "shuang",
"漻": "liao",
"漼": "cui",
"漽": "ti",
"漾": "yang",
"漿": "jiang",
"潀": "cong",
"潁": "ying",
"潂": "hong",
"潃": "xiu",
"潄": "shu",
"潅": "guan",
"潆": "ying",
"潇": "xiao",
"潈": "cong",
"潉": "kun",
"潊": "xu",
"潋": "lian",
"潌": "zhi",
"潍": "wei",
"潎": "pi",
"潏": "yu",
"潐": "jiao",
"潑": "po",
"潒": "dang",
"潓": "hui",
"潔": "jie",
"潕": "wu",
"潖": "pa",
"潗": "ji",
"潘": "pan",
"潙": "wei",
"潚": "su",
"潛": "qian",
"潜": "qian",
"潝": "xi",
"潞": "lu",
"潟": "xi",
"潠": "xun",
"潡": "dun",
"潢": "huang",
"潣": "min",
"潤": "run",
"潥": "su",
"潦": "lao",
"潧": "zhen",
"潨": "cong",
"潩": "yi",
"潪": "zhi",
"潫": "wan",
"潬": "tan",
"潭": "tan",
"潮": "chao",
"潯": "xun",
"潰": "kui",
"潱": "ye",
"潲": "shao",
"潳": "tu",
"潴": "zhu",
"潵": "san",
"潶": "hei",
"潷": "bi",
"潸": "shan",
"潹": "chan",
"潺": "chan",
"潻": "shu",
"潼": "tong",
"潽": "pu",
"潾": "lin",
"潿": "wei",
"澀": "se",
"澁": "se",
"澂": "cheng",
"澃": "jiong",
"澄": "cheng",
"澅": "hua",
"澆": "jiao",
"澇": "lao",
"澈": "che",
"澉": "gan",
"澊": "cun",
"澋": "jing",
"澌": "si",
"澍": "shu",
"澎": "peng",
"澏": "han",
"澐": "yun",
"澑": "liu",
"澒": "hong",
"澓": "fu",
"澔": "hao",
"澕": "he",
"澖": "xian",
"澗": "jian",
"澘": "shan",
"澙": "xi",
"澚": "ao",
"澛": "lu",
"澜": "lan",
"澝": "ning",
"澞": "yu",
"澟": "lin",
"澠": "mian",
"澡": "zao",
"澢": "dang",
"澣": "huan",
"澤": "ze",
"澥": "xie",
"澦": "yu",
"澧": "li",
"澨": "shi",
"澩": "xue",
"澪": "ling",
"澫": "wan",
"澬": "zi",
"澭": "yong",
"澮": "kuai",
"澯": "can",
"澰": "lian",
"澱": "dian",
"澲": "ye",
"澳": "ao",
"澴": "huan",
"澵": "zhen",
"澶": "chan",
"澷": "man",
"澸": "gan",
"澹": "dan",
"澺": "yi",
"澻": "sui",
"澼": "pi",
"澽": "ju",
"澾": "ta",
"澿": "qin",
"激": "ji",
"濁": "zhuo",
"濂": "lian",
"濃": "nong",
"濄": "guo",
"濅": "jin",
"濆": "fen",
"濇": "se",
"濈": "ji",
"濉": "sui",
"濊": "hui",
"濋": "chu",
"濌": "ta",
"濍": "song",
"濎": "ding",
"濏": "se",
"濐": "zhu",
"濑": "lai",
"濒": "bin",
"濓": "lian",
"濔": "mi",
"濕": "shi",
"濖": "shu",
"濗": "mi",
"濘": "ning",
"濙": "ying",
"濚": "ying",
"濛": "meng",
"濜": "jin",
"濝": "qi",
"濞": "bi",
"濟": "ji",
"濠": "hao",
"濡": "ru",
"濢": "cui",
"濣": "wo",
"濤": "tao",
"濥": "yin",
"濦": "yin",
"濧": "dui",
"濨": "ci",
"濩": "huo",
"濪": "qing",
"濫": "lan",
"濬": "jun",
"濭": "ai",
"濮": "pu",
"濯": "zhuo",
"濰": "wei",
"濱": "bin",
"濲": "gu",
"濳": "qian",
"濴": "ying",
"濵": "bin",
"濶": "kuo",
"濷": "fei",
"濸": "cang",
"濹": "me",
"濺": "jian",
"濻": "wei",
"濼": "luo",
"濽": "zan",
"濾": "lv",
"濿": "li",
"瀀": "you",
"瀁": "yang",
"瀂": "lu",
"瀃": "si",
"瀄": "zhi",
"瀅": "ying",
"瀆": "du",
"瀇": "wang",
"瀈": "hui",
"瀉": "xie",
"瀊": "pan",
"瀋": "shen",
"瀌": "biao",
"瀍": "chan",
"瀎": "mie",
"瀏": "liu",
"瀐": "jian",
"瀑": "pu",
"瀒": "se",
"瀓": "cheng",
"瀔": "gu",
"瀕": "bin",
"瀖": "huo",
"瀗": "xian",
"瀘": "lu",
"瀙": "qin",
"瀚": "han",
"瀛": "ying",
"瀜": "rong",
"瀝": "li",
"瀞": "jing",
"瀟": "xiao",
"瀠": "ying",
"瀡": "sui",
"瀢": "wei",
"瀣": "xie",
"瀤": "huai",
"瀥": "xue",
"瀦": "zhu",
"瀧": "long",
"瀨": "lai",
"瀩": "dui",
"瀪": "fan",
"瀫": "hu",
"瀬": "lai",
"瀭": "shu",
"瀮": "lian",
"瀯": "ying",
"瀰": "mi",
"瀱": "ji",
"瀲": "lian",
"瀳": "jian",
"瀴": "ying",
"瀵": "fen",
"瀶": "lin",
"瀷": "yi",
"瀸": "jian",
"瀹": "yue",
"瀺": "chan",
"瀻": "dai",
"瀼": "rang",
"瀽": "jian",
"瀾": "lan",
"瀿": "fan",
"灀": "shuang",
"灁": "yuan",
"灂": "zhuo",
"灃": "feng",
"灄": "she",
"灅": "lei",
"灆": "lan",
"灇": "cong",
"灈": "qu",
"灉": "yong",
"灊": "qian",
"灋": "fa",
"灌": "guan",
"灍": "jue",
"灎": "yan",
"灏": "hao",
"灐": "ying",
"灑": "sa",
"灒": "zan",
"灓": "luan",
"灔": "yan",
"灕": "li",
"灖": "mi",
"灗": "shan",
"灘": "tan",
"灙": "dang",
"灚": "jiao",
"灛": "chan",
"灜": "ying",
"灝": "hao",
"灞": "ba",
"灟": "zhu",
"灠": "lan",
"灡": "lan",
"灢": "nang",
"灣": "wan",
"灤": "luan",
"灥": "xun",
"灦": "xian",
"灧": "yan",
"灨": "gan",
"灩": "yan",
"灪": "yu",
"火": "huo",
"灬": "huo",
"灭": "mie",
"灮": "guang",
"灯": "deng",
"灰": "hui",
"灱": "xiao",
"灲": "xiao",
"灳": "hui",
"灴": "hong",
"灵": "ling",
"灶": "zao",
"灷": "zhuan",
"灸": "jiu",
"灹": "zha",
"灺": "xie",
"灻": "chi",
"灼": "zhuo",
"災": "zai",
"灾": "zai",
"灿": "can",
"炀": "yang",
"炁": "qi",
"炂": "zhong",
"炃": "fen",
"炄": "niu",
"炅": "jiong",
"炆": "wen",
"炇": "pu",
"炈": "yi",
"炉": "lu",
"炊": "chui",
"炋": "pi",
"炌": "kai",
"炍": "pan",
"炎": "yan",
"炏": "yan",
"炐": "pang",
"炑": "mu",
"炒": "chao",
"炓": "liao",
"炔": "que",
"炕": "kang",
"炖": "dun",
"炗": "guang",
"炘": "xin",
"炙": "zhi",
"炚": "guang",
"炛": "guang",
"炜": "wei",
"炝": "qiang",
"炞": "bian",
"炟": "da",
"炠": "xia",
"炡": "zheng",
"炢": "zhu",
"炣": "ke",
"炤": "zhao",
"炥": "fu",
"炦": "ba",
"炧": "xie",
"炨": "xie",
"炩": "ling",
"炪": "zhuo",
"炫": "xuan",
"炬": "ju",
"炭": "tan",
"炮": "pao",
"炯": "jiong",
"炰": "pao",
"炱": "tai",
"炲": "tai",
"炳": "bing",
"炴": "yang",
"炵": "tong",
"炶": "shan",
"炷": "zhu",
"炸": "zha",
"点": "dian",
"為": "wei",
"炻": "shi",
"炼": "lian",
"炽": "chi",
"炾": "huang",
"炿": "zhou",
"烀": "hu",
"烁": "shuo",
"烂": "lan",
"烃": "ting",
"烄": "jiao",
"烅": "xu",
"烆": "heng",
"烇": "quan",
"烈": "lie",
"烉": "huan",
"烊": "yang",
"烋": "xiao",
"烌": "xiu",
"烍": "xian",
"烎": "yin",
"烏": "wu",
"烐": "zhou",
"烑": "yao",
"烒": "shi",
"烓": "wei",
"烔": "tong",
"烕": "mie",
"烖": "zai",
"烗": "kai",
"烘": "hong",
"烙": "lao",
"烚": "xia",
"烛": "zhu",
"烜": "xuan",
"烝": "zheng",
"烞": "po",
"烟": "yan",
"烠": "hui",
"烡": "guang",
"烢": "che",
"烣": "hui",
"烤": "kao",
"烥": "ju",
"烦": "fan",
"烧": "shao",
"烨": "ye",
"烩": "hui",
"烫": "tang",
"烬": "jin",
"热": "re",
"烮": "lie",
"烯": "xi",
"烰": "fu",
"烱": "jiong",
"烲": "xie",
"烳": "pu",
"烴": "ting",
"烵": "zhuo",
"烶": "ting",
"烷": "wan",
"烸": "hai",
"烹": "peng",
"烺": "lang",
"烻": "yan",
"烼": "xu",
"烽": "feng",
"烾": "chi",
"烿": "rong",
"焀": "hu",
"焁": "xi",
"焂": "shu",
"焃": "he",
"焄": "xun",
"焅": "ku",
"焆": "juan",
"焇": "xiao",
"焈": "xi",
"焉": "yan",
"焊": "han",
"焋": "zhuang",
"焌": "qu",
"焍": "di",
"焎": "xie",
"焏": "ji",
"焐": "wu",
"焑": "yan",
"焒": "lv",
"焓": "han",
"焔": "yan",
"焕": "huan",
"焖": "men",
"焗": "ju",
"焘": "dao",
"焙": "bei",
"焚": "fen",
"焛": "lin",
"焜": "kun",
"焝": "hun",
"焞": "tun",
"焟": "xi",
"焠": "cui",
"無": "wu",
"焢": "hong",
"焣": "chao",
"焤": "fu",
"焥": "wo",
"焦": "jiao",
"焧": "zong",
"焨": "feng",
"焩": "ping",
"焪": "qiong",
"焫": "ruo",
"焬": "xi",
"焭": "qiong",
"焮": "xin",
"焯": "zhuo",
"焰": "yan",
"焱": "yan",
"焲": "yi",
"焳": "jue",
"焴": "yu",
"焵": "gang",
"然": "ran",
"焷": "pi",
"焸": "xiong",
"焹": "gang",
"焺": "sheng",
"焻": "chang",
"焼": "shao",
"焽": "xiong",
"焾": "nian",
"焿": "geng",
"煀": "qu",
"煁": "chen",
"煂": "he",
"煃": "kui",
"煄": "zhong",
"煅": "duan",
"煆": "xia",
"煇": "hui",
"煈": "feng",
"煉": "lian",
"煊": "xuan",
"煋": "xing",
"煌": "huang",
"煍": "jiao",
"煎": "jian",
"煏": "bi",
"煐": "ying",
"煑": "zhu",
"煒": "wei",
"煓": "tuan",
"煔": "shan",
"煕": "xi",
"煖": "nuan",
"煗": "nuan",
"煘": "chan",
"煙": "yan",
"煚": "jiong",
"煛": "jiong",
"煜": "yu",
"煝": "mei",
"煞": "sha",
"煟": "wei",
"煠": "ye",
"煡": "jin",
"煢": "qiong",
"煣": "rou",
"煤": "mei",
"煥": "huan",
"煦": "xu",
"照": "zhao",
"煨": "wei",
"煩": "fan",
"煪": "qiu",
"煫": "sui",
"煬": "yang",
"煭": "lie",
"煮": "zhu",
"煯": "jie",
"煰": "zao",
"煱": "gua",
"煲": "bao",
"煳": "hu",
"煴": "yun",
"煵": "nan",
"煶": "shi",
"煷": "huo",
"煸": "bian",
"煹": "gou",
"煺": "tui",
"煻": "tang",
"煼": "chao",
"煽": "shan",
"煾": "en",
"煿": "bo",
"熀": "huang",
"熁": "xie",
"熂": "xi",
"熃": "wu",
"熄": "xi",
"熅": "yun",
"熆": "he",
"熇": "he",
"熈": "xi",
"熉": "yun",
"熊": "xiong",
"熋": "xiong",
"熌": "shan",
"熍": "qiong",
"熎": "yao",
"熏": "xun",
"熐": "mi",
"熑": "lian",
"熒": "ying",
"熓": "wu",
"熔": "rong",
"熕": "gong",
"熖": "yan",
"熗": "qiang",
"熘": "liu",
"熙": "xi",
"熚": "bi",
"熛": "biao",
"熜": "cong",
"熝": "lu",
"熞": "jian",
"熟": "shu",
"熠": "yi",
"熡": "lou",
"熢": "peng",
"熣": "sui",
"熤": "yi",
"熥": "teng",
"熦": "jue",
"熧": "zong",
"熨": "yun",
"熩": "hu",
"熪": "yi",
"熫": "zhi",
"熬": "ao",
"熭": "wei",
"熮": "liu",
"熯": "han",
"熰": "ou",
"熱": "re",
"熲": "jiong",
"熳": "man",
"熴": "kun",
"熵": "shang",
"熶": "cuan",
"熷": "zeng",
"熸": "jian",
"熹": "xi",
"熺": "xi",
"熻": "xi",
"熼": "yi",
"熽": "xiao",
"熾": "chi",
"熿": "huang",
"燀": "chan",
"燁": "ye",
"燂": "tan",
"燃": "ran",
"燄": "yan",
"燅": "xun",
"燆": "qiao",
"燇": "jun",
"燈": "deng",
"燉": "dun",
"燊": "shen",
"燋": "jiao",
"燌": "fen",
"燍": "si",
"燎": "liao",
"燏": "yu",
"燐": "lin",
"燑": "tong",
"燒": "shao",
"燓": "fen",
"燔": "fan",
"燕": "yan",
"燖": "xun",
"燗": "lan",
"燘": "mei",
"燙": "tang",
"燚": "yi",
"燛": "jiong",
"燜": "men",
"燝": "zhu",
"燞": "jiao",
"營": "ying",
"燠": "yu",
"燡": "yi",
"燢": "xue",
"燣": "lan",
"燤": "tai",
"燥": "zao",
"燦": "can",
"燧": "sui",
"燨": "xi",
"燩": "que",
"燪": "zong",
"燫": "lian",
"燬": "hui",
"燭": "zhu",
"燮": "xie",
"燯": "ling",
"燰": "wei",
"燱": "yi",
"燲": "xie",
"燳": "zhao",
"燴": "hui",
"燵": "da",
"燶": "nong",
"燷": "lan",
"燸": "xu",
"燹": "xian",
"燺": "he",
"燻": "xun",
"燼": "jin",
"燽": "chou",
"燾": "dao",
"燿": "yao",
"爀": "he",
"爁": "lan",
"爂": "biao",
"爃": "rong",
"爄": "li",
"爅": "mo",
"爆": "bao",
"爇": "ruo",
"爈": "lv",
"爉": "la",
"爊": "ao",
"爋": "xun",
"爌": "kuang",
"爍": "shuo",
"爎": "liao",
"爏": "li",
"爐": "lu",
"爑": "jue",
"爒": "liao",
"爓": "yan",
"爔": "xi",
"爕": "xie",
"爖": "long",
"爗": "ye",
"爘": "can",
"爙": "rang",
"爚": "yue",
"爛": "lan",
"爜": "cong",
"爝": "jue",
"爞": "chong",
"爟": "guan",
"爠": "qu",
"爡": "che",
"爢": "mi",
"爣": "tang",
"爤": "lan",
"爥": "zhu",
"爦": "lan",
"爧": "ling",
"爨": "cuan",
"爩": "yu",
"爪": "zhao",
"爫": "zhao",
"爬": "pa",
"爭": "zheng",
"爮": "pao",
"爯": "cheng",
"爰": "yuan",
"爱": "ai",
"爲": "wei",
"爳": "han",
"爴": "jue",
"爵": "jue",
"父": "fu",
"爷": "ye",
"爸": "ba",
"爹": "die",
"爺": "ye",
"爻": "yao",
"爼": "zu",
"爽": "shuang",
"爾": "er",
"爿": "pan",
"牀": "chuang",
"牁": "ke",
"牂": "zang",
"牃": "die",
"牄": "qiang",
"牅": "yong",
"牆": "qiang",
"片": "pian",
"版": "ban",
"牉": "pan",
"牊": "chao",
"牋": "jian",
"牌": "pai",
"牍": "du",
"牎": "chuang",
"牏": "yu",
"牐": "zha",
"牑": "bian",
"牒": "die",
"牓": "bang",
"牔": "bo",
"牕": "chuang",
"牖": "you",
"牗": "you",
"牘": "du",
"牙": "ya",
"牚": "cheng",
"牛": "niu",
"牜": "niu",
"牝": "pin",
"牞": "jiu",
"牟": "mou",
"牠": "ta",
"牡": "mu",
"牢": "lao",
"牣": "ren",
"牤": "mang",
"牥": "fang",
"牦": "mao",
"牧": "mu",
"牨": "gang",
"物": "wu",
"牪": "yan",
"牫": "ge",
"牬": "bei",
"牭": "si",
"牮": "jian",
"牯": "gu",
"牰": "you",
"牱": "ke",
"牲": "sheng",
"牳": "mu",
"牴": "di",
"牵": "qian",
"牶": "quan",
"牷": "quan",
"牸": "zi",
"特": "te",
"牺": "xi",
"牻": "mang",
"牼": "keng",
"牽": "qian",
"牾": "wu",
"牿": "gu",
"犀": "xi",
"犁": "li",
"犂": "li",
"犃": "pou",
"犄": "ji",
"犅": "gang",
"犆": "zhi",
"犇": "ben",
"犈": "quan",
"犉": "chun",
"犊": "du",
"犋": "ju",
"犌": "jia",
"犍": "jian",
"犎": "feng",
"犏": "pian",
"犐": "ke",
"犑": "ju",
"犒": "kao",
"犓": "chu",
"犔": "xi",
"犕": "bei",
"犖": "luo",
"犗": "jie",
"犘": "ma",
"犙": "san",
"犚": "wei",
"犛": "mao",
"犜": "dun",
"犝": "tong",
"犞": "qiao",
"犟": "jiang",
"犠": "xi",
"犡": "li",
"犢": "du",
"犣": "lie",
"犤": "pai",
"犥": "piao",
"犦": "bao",
"犧": "xi",
"犨": "chou",
"犩": "wei",
"犪": "kui",
"犫": "chou",
"犬": "quan",
"犭": "quan",
"犮": "quan",
"犯": "fan",
"犰": "qiu",
"犱": "ji",
"犲": "chai",
"犳": "zhuo",
"犴": "han",
"犵": "ge",
"状": "zhuang",
"犷": "guang",
"犸": "ma",
"犹": "you",
"犺": "kang",
"犻": "pei",
"犼": "hou",
"犽": "ya",
"犾": "yin",
"犿": "huan",
"狀": "zhuang",
"狁": "yun",
"狂": "kuang",
"狃": "niu",
"狄": "di",
"狅": "kuang",
"狆": "zhong",
"狇": "mu",
"狈": "bei",
"狉": "pi",
"狊": "ju",
"狋": "yi",
"狌": "sheng",
"狍": "pao",
"狎": "xia",
"狏": "tuo",
"狐": "hu",
"狑": "ling",
"狒": "fei",
"狓": "pi",
"狔": "ni",
"狕": "yao",
"狖": "you",
"狗": "gou",
"狘": "xue",
"狙": "ju",
"狚": "dan",
"狛": "bo",
"狜": "ku",
"狝": "xian",
"狞": "ning",
"狟": "huan",
"狠": "hen",
"狡": "jiao",
"狢": "he",
"狣": "zhao",
"狤": "jie",
"狥": "xun",
"狦": "shan",
"狧": "ta",
"狨": "rong",
"狩": "shou",
"狪": "tong",
"狫": "lao",
"独": "du",
"狭": "xia",
"狮": "shi",
"狯": "kuai",
"狰": "zheng",
"狱": "yu",
"狲": "sun",
"狳": "yu",
"狴": "bi",
"狵": "mang",
"狶": "xi",
"狷": "juan",
"狸": "li",
"狹": "xia",
"狺": "yin",
"狻": "suan",
"狼": "lang",
"狽": "bei",
"狾": "zhi",
"狿": "yan",
"猀": "sha",
"猁": "li",
"猂": "han",
"猃": "xian",
"猄": "jing",
"猅": "pai",
"猆": "fei",
"猇": "xiao",
"猈": "bai",
"猉": "qi",
"猊": "ni",
"猋": "biao",
"猌": "yin",
"猍": "lai",
"猎": "lie",
"猏": "jian",
"猐": "qiang",
"猑": "kun",
"猒": "yan",
"猓": "guo",
"猔": "zong",
"猕": "mi",
"猖": "chang",
"猗": "yi",
"猘": "zhi",
"猙": "zheng",
"猚": "ya",
"猛": "meng",
"猜": "cai",
"猝": "cu",
"猞": "she",
"猟": "lie",
"猡": "luo",
"猢": "hu",
"猣": "zong",
"猤": "gui",
"猥": "wei",
"猦": "feng",
"猧": "wo",
"猨": "yuan",
"猩": "xing",
"猪": "zhu",
"猫": "mao",
"猬": "wei",
"猭": "chuan",
"献": "xian",
"猯": "tuan",
"猰": "ya",
"猱": "nao",
"猲": "xie",
"猳": "jia",
"猴": "hou",
"猵": "bian",
"猶": "you",
"猷": "you",
"猸": "mei",
"猹": "cha",
"猺": "yao",
"猻": "sun",
"猼": "bo",
"猽": "ming",
"猾": "hua",
"猿": "yuan",
"獀": "sou",
"獁": "ma",
"獂": "huan",
"獃": "dai",
"獄": "yu",
"獅": "shi",
"獆": "hao",
"獇": "qiang",
"獈": "yi",
"獉": "zhen",
"獊": "cang",
"獋": "hao",
"獌": "man",
"獍": "jing",
"獎": "jiang",
"獏": "mo",
"獐": "zhang",
"獑": "chan",
"獒": "ao",
"獓": "ao",
"獔": "hao",
"獕": "suo",
"獖": "fen",
"獗": "jue",
"獘": "bi",
"獙": "bi",
"獚": "huang",
"獛": "pu",
"獜": "lin",
"獝": "xu",
"獞": "tong",
"獟": "yao",
"獠": "liao",
"獡": "shuo",
"獢": "xiao",
"獣": "shou",
"獤": "dun",
"獥": "jiao",
"獦": "ge",
"獧": "juan",
"獨": "du",
"獩": "hui",
"獪": "kuai",
"獫": "xian",
"獬": "xie",
"獭": "ta",
"獮": "xian",
"獯": "xun",
"獰": "ning",
"獱": "bian",
"獲": "huo",
"獳": "nou",
"獴": "meng",
"獵": "lie",
"獶": "nao",
"獷": "guang",
"獸": "shou",
"獹": "lu",
"獺": "ta",
"獻": "xian",
"獼": "mi",
"獽": "rang",
"獾": "huan",
"獿": "nao",
"玀": "luo",
"玁": "xian",
"玂": "qi",
"玃": "jue",
"玄": "xuan",
"玅": "miao",
"玆": "zi",
"率": "shuai",
"玈": "lu",
"玉": "yu",
"玊": "su",
"王": "wang",
"玌": "qiu",
"玍": "ga",
"玎": "ding",
"玏": "le",
"玐": "ba",
"玑": "ji",
"玒": "hong",
"玓": "di",
"玔": "chuan",
"玕": "gan",
"玖": "jiu",
"玗": "yu",
"玘": "qi",
"玙": "yu",
"玚": "chang",
"玛": "ma",
"玜": "hong",
"玝": "wu",
"玞": "fu",
"玟": "min",
"玠": "jie",
"玡": "ya",
"玢": "bin",
"玣": "bian",
"玤": "bang",
"玥": "yue",
"玦": "jue",
"玧": "men",
"玨": "jue",
"玩": "wan",
"玪": "jian",
"玫": "mei",
"玬": "dan",
"玭": "pin",
"玮": "wei",
"环": "huan",
"现": "xian",
"玱": "qiang",
"玲": "ling",
"玳": "dai",
"玴": "yi",
"玵": "an",
"玶": "ping",
"玷": "dian",
"玸": "fu",
"玹": "xuan",
"玺": "xi",
"玻": "bo",
"玼": "ci",
"玽": "gou",
"玾": "jia",
"玿": "shao",
"珀": "po",
"珁": "ci",
"珂": "ke",
"珃": "ran",
"珄": "sheng",
"珅": "shen",
"珆": "yi",
"珇": "zu",
"珈": "jia",
"珉": "min",
"珊": "shan",
"珋": "liu",
"珌": "bi",
"珍": "zhen",
"珎": "zhen",
"珏": "jue",
"珐": "fa",
"珑": "long",
"珒": "jin",
"珓": "jiao",
"珔": "jian",
"珕": "li",
"珖": "guang",
"珗": "xian",
"珘": "zhou",
"珙": "gong",
"珚": "yan",
"珛": "xiu",
"珜": "yang",
"珝": "xu",
"珞": "luo",
"珟": "su",
"珠": "zhu",
"珡": "qin",
"珢": "yin",
"珣": "xun",
"珤": "bao",
"珥": "er",
"珦": "xiang",
"珧": "yao",
"珨": "xia",
"珩": "heng",
"珪": "gui",
"珫": "chong",
"珬": "xu",
"班": "ban",
"珮": "pei",
"珯": "lao",
"珰": "dang",
"珱": "ying",
"珲": "hun",
"珳": "wen",
"珴": "e",
"珵": "cheng",
"珶": "di",
"珷": "wu",
"珸": "wu",
"珹": "cheng",
"珺": "jun",
"珻": "mei",
"珼": "bei",
"珽": "ting",
"現": "xian",
"珿": "chu",
"琀": "han",
"琁": "xuan",
"琂": "yan",
"球": "qiu",
"琄": "xuan",
"琅": "lang",
"理": "li",
"琇": "xiu",
"琈": "fu",
"琉": "liu",
"琊": "ya",
"琋": "xi",
"琌": "ling",
"琍": "li",
"琎": "jin",
"琏": "lian",
"琐": "suo",
"琑": "suo",
"琒": "feng",
"琓": "wan",
"琔": "dian",
"琕": "pin",
"琖": "zhan",
"琗": "cui",
"琘": "min",
"琙": "yu",
"琚": "ju",
"琛": "chen",
"琜": "lai",
"琝": "min",
"琞": "sheng",
"琟": "wei",
"琠": "tian",
"琡": "shu",
"琢": "zhuo",
"琣": "beng",
"琤": "cheng",
"琥": "hu",
"琦": "qi",
"琧": "e",
"琨": "kun",
"琩": "chang",
"琪": "qi",
"琫": "beng",
"琬": "wan",
"琭": "lu",
"琮": "cong",
"琯": "guan",
"琰": "yan",
"琱": "diao",
"琲": "bei",
"琳": "lin",
"琴": "qin",
"琵": "pi",
"琶": "pa",
"琷": "que",
"琸": "zhuo",
"琹": "qin",
"琺": "fa",
"琻": "jin",
"琼": "qiong",
"琽": "du",
"琾": "jie",
"琿": "hun",
"瑀": "yu",
"瑁": "mao",
"瑂": "mei",
"瑃": "chun",
"瑄": "xuan",
"瑅": "ti",
"瑆": "xing",
"瑇": "dai",
"瑈": "rou",
"瑉": "min",
"瑊": "jian",
"瑋": "wei",
"瑌": "ruan",
"瑍": "huan",
"瑎": "xie",
"瑏": "chuan",
"瑐": "jian",
"瑑": "zhuan",
"瑒": "chang",
"瑓": "lian",
"瑔": "quan",
"瑕": "xia",
"瑖": "duan",
"瑗": "yuan",
"瑘": "ye",
"瑙": "nao",
"瑚": "hu",
"瑛": "ying",
"瑜": "yu",
"瑝": "huang",
"瑞": "rui",
"瑟": "se",
"瑠": "liu",
"瑡": "shi",
"瑢": "rong",
"瑣": "suo",
"瑤": "yao",
"瑥": "wen",
"瑦": "wu",
"瑧": "zhen",
"瑨": "jin",
"瑩": "ying",
"瑪": "ma",
"瑫": "tao",
"瑬": "liu",
"瑭": "tang",
"瑮": "li",
"瑯": "lang",
"瑰": "gui",
"瑱": "tian",
"瑲": "qiang",
"瑳": "cuo",
"瑴": "jue",
"瑵": "zhao",
"瑶": "yao",
"瑷": "ai",
"瑸": "bin",
"瑹": "tu",
"瑺": "chang",
"瑻": "kun",
"瑼": "zhuan",
"瑽": "cong",
"瑾": "jin",
"瑿": "yi",
"璀": "cui",
"璁": "cong",
"璂": "qi",
"璃": "li",
"璄": "jing",
"璅": "zao",
"璆": "qiu",
"璇": "xuan",
"璈": "ao",
"璉": "lian",
"璊": "men",
"璋": "zhang",
"璌": "yin",
"璍": "ye",
"璎": "ying",
"璏": "zhi",
"璐": "lu",
"璑": "wu",
"璒": "deng",
"璓": "xiu",
"璔": "zeng",
"璕": "xun",
"璖": "qu",
"璗": "dang",
"璘": "lin",
"璙": "liao",
"璚": "qiong",
"璛": "su",
"璜": "huang",
"璝": "gui",
"璞": "pu",
"璟": "jing",
"璠": "fan",
"璡": "jin",
"璢": "liu",
"璣": "ji",
"璤": "hui",
"璥": "jing",
"璦": "ai",
"璧": "bi",
"璨": "can",
"璩": "qu",
"璪": "zao",
"璫": "dang",
"璬": "jiao",
"璭": "guan",
"璮": "tan",
"璯": "hui",
"環": "huan",
"璱": "se",
"璲": "sui",
"璳": "tian",
"璴": "chu",
"璵": "yu",
"璶": "jin",
"璷": "lu",
"璸": "bin",
"璹": "shu",
"璺": "wen",
"璻": "zui",
"璼": "lan",
"璽": "xi",
"璾": "ji",
"璿": "xuan",
"瓀": "ruan",
"瓁": "wo",
"瓂": "gai",
"瓃": "lei",
"瓄": "du",
"瓅": "li",
"瓆": "zhi",
"瓇": "rou",
"瓈": "li",
"瓉": "zan",
"瓊": "qiong",
"瓋": "ti",
"瓌": "gui",
"瓍": "sui",
"瓎": "la",
"瓏": "long",
"瓐": "lu",
"瓑": "li",
"瓒": "zan",
"瓓": "lan",
"瓔": "ying",
"瓕": "mi",
"瓖": "xiang",
"瓗": "qiong",
"瓘": "guan",
"瓙": "dao",
"瓚": "zan",
"瓛": "huan",
"瓜": "gua",
"瓝": "bo",
"瓞": "die",
"瓟": "bo",
"瓠": "hu",
"瓡": "zhi",
"瓢": "piao",
"瓣": "ban",
"瓤": "rang",
"瓥": "li",
"瓦": "wa",
"瓨": "xiang",
"瓪": "ban",
"瓫": "pen",
"瓬": "fang",
"瓭": "dan",
"瓮": "weng",
"瓯": "ou",
"瓳": "hu",
"瓴": "ling",
"瓵": "yi",
"瓶": "ping",
"瓷": "ci",
"瓸": "bai",
"瓹": "juan",
"瓺": "chang",
"瓻": "chi",
"瓽": "dang",
"瓾": "wa",
"瓿": "bu",
"甀": "zhui",
"甁": "ping",
"甂": "bian",
"甃": "zhou",
"甄": "zhen",
"甆": "ci",
"甇": "ying",
"甈": "qi",
"甉": "xian",
"甊": "lou",
"甋": "di",
"甌": "ou",
"甍": "meng",
"甎": "zhuan",
"甏": "beng",
"甐": "lin",
"甑": "zeng",
"甒": "wu",
"甓": "pi",
"甔": "dan",
"甕": "weng",
"甖": "ying",
"甗": "yan",
"甘": "gan",
"甙": "dai",
"甚": "shen",
"甛": "tian",
"甜": "tian",
"甝": "han",
"甞": "chang",
"生": "sheng",
"甠": "qing",
"甡": "shen",
"產": "chan",
"産": "chan",
"甤": "rui",
"甥": "sheng",
"甦": "su",
"甧": "shen",
"用": "yong",
"甩": "shuai",
"甪": "lu",
"甫": "fu",
"甬": "yong",
"甭": "beng",
"甮": "beng",
"甯": "ning",
"田": "tian",
"由": "you",
"甲": "jia",
"申": "shen",
"甴": "you",
"电": "dian",
"甶": "fu",
"男": "nan",
"甸": "dian",
"甹": "ping",
"町": "ting",
"画": "hua",
"甼": "ting",
"甽": "zhen",
"甾": "zai",
"甿": "meng",
"畀": "bi",
"畁": "bi",
"畂": "mu",
"畃": "xun",
"畄": "liu",
"畅": "chang",
"畆": "mu",
"畇": "yun",
"畈": "fan",
"畉": "fu",
"畊": "geng",
"畋": "tian",
"界": "jie",
"畍": "jie",
"畎": "quan",
"畏": "wei",
"畐": "fu",
"畑": "tian",
"畒": "mu",
"畓": "duo",
"畔": "pan",
"畕": "jiang",
"畖": "wa",
"畗": "da",
"畘": "nan",
"留": "liu",
"畚": "ben",
"畛": "zhen",
"畜": "xu",
"畝": "mu",
"畞": "mu",
"畟": "ce",
"畠": "zai",
"畡": "gai",
"畢": "bi",
"畣": "da",
"畤": "zhi",
"略": "lve",
"畦": "qi",
"畧": "lve",
"畨": "fan",
"畩": "yi",
"番": "fan",
"畫": "hua",
"畬": "she",
"畭": "she",
"畮": "mu",
"畯": "jun",
"異": "yi",
"畱": "liu",
"畲": "she",
"畳": "die",
"畴": "chou",
"畵": "hua",
"當": "dang",
"畷": "zhui",
"畸": "ji",
"畹": "wan",
"畺": "jiang",
"畻": "cheng",
"畼": "chang",
"畽": "tuan",
"畾": "lei",
"畿": "ji",
"疀": "cha",
"疁": "liu",
"疂": "die",
"疃": "tuan",
"疄": "lin",
"疅": "jiang",
"疆": "jiang",
"疇": "chou",
"疈": "pi",
"疉": "die",
"疊": "die",
"疋": "pi",
"疌": "jie",
"疍": "dan",
"疎": "shu",
"疏": "shu",
"疐": "zhi",
"疑": "yi",
"疒": "ne",
"疓": "nai",
"疔": "ding",
"疕": "bi",
"疖": "jie",
"疗": "liao",
"疘": "gang",
"疙": "ge",
"疚": "jiu",
"疛": "zhou",
"疜": "xia",
"疝": "shan",
"疞": "xu",
"疟": "nve",
"疠": "li",
"疡": "yang",
"疢": "chen",
"疣": "you",
"疤": "ba",
"疥": "jie",
"疦": "jue",
"疧": "qi",
"疨": "ya",
"疩": "cui",
"疪": "bi",
"疫": "yi",
"疬": "li",
"疭": "zong",
"疮": "chuang",
"疯": "feng",
"疰": "zhu",
"疱": "pao",
"疲": "pi",
"疳": "gan",
"疴": "ke",
"疵": "ci",
"疶": "xue",
"疷": "zhi",
"疸": "dan",
"疹": "zhen",
"疺": "fa",
"疻": "zhi",
"疼": "teng",
"疽": "ju",
"疾": "ji",
"疿": "fei",
"痀": "gou",
"痁": "shan",
"痂": "jia",
"痃": "xuan",
"痄": "zha",
"病": "bing",
"痆": "nie",
"症": "zheng",
"痈": "yong",
"痉": "jing",
"痊": "quan",
"痋": "teng",
"痌": "tong",
"痍": "yi",
"痎": "jie",
"痏": "wei",
"痐": "hui",
"痑": "tan",
"痒": "yang",
"痓": "zhi",
"痔": "zhi",
"痕": "hen",
"痖": "ya",
"痗": "mei",
"痘": "dou",
"痙": "jing",
"痚": "xiao",
"痛": "tong",
"痜": "tu",
"痝": "mang",
"痞": "pi",
"痟": "xiao",
"痠": "suan",
"痡": "pu",
"痢": "li",
"痣": "zhi",
"痤": "cuo",
"痥": "duo",
"痦": "wu",
"痧": "sha",
"痨": "lao",
"痩": "shou",
"痪": "huan",
"痫": "xian",
"痬": "yi",
"痭": "beng",
"痮": "zhang",
"痯": "guan",
"痰": "tan",
"痱": "fei",
"痲": "ma",
"痳": "ma",
"痴": "chi",
"痵": "ji",
"痶": "tian",
"痷": "an",
"痸": "chi",
"痹": "bi",
"痺": "bi",
"痻": "min",
"痼": "gu",
"痽": "dui",
"痾": "ke",
"痿": "wei",
"瘀": "yu",
"瘁": "cui",
"瘂": "ya",
"瘃": "zhu",
"瘄": "cu",
"瘅": "dan",
"瘆": "shen",
"瘇": "zhong",
"瘈": "zhi",
"瘉": "yu",
"瘊": "hou",
"瘋": "feng",
"瘌": "la",
"瘍": "yang",
"瘎": "chen",
"瘏": "tu",
"瘐": "yu",
"瘑": "guo",
"瘒": "wen",
"瘓": "huan",
"瘔": "ku",
"瘕": "jia",
"瘖": "yin",
"瘗": "yi",
"瘘": "lou",
"瘙": "sao",
"瘚": "jue",
"瘛": "chi",
"瘜": "xi",
"瘝": "guan",
"瘞": "yi",
"瘟": "wen",
"瘠": "ji",
"瘡": "chuang",
"瘢": "ban",
"瘣": "hui",
"瘤": "liu",
"瘥": "chai",
"瘦": "shou",
"瘧": "nve",
"瘨": "dian",
"瘩": "da",
"瘪": "bie",
"瘫": "tan",
"瘬": "zhang",
"瘭": "biao",
"瘮": "shen",
"瘯": "cu",
"瘰": "luo",
"瘱": "yi",
"瘲": "zong",
"瘳": "chou",
"瘴": "zhang",
"瘵": "zhai",
"瘶": "sou",
"瘷": "se",
"瘸": "que",
"瘹": "diao",
"瘺": "lou",
"瘻": "lou",
"瘼": "mo",
"瘽": "qin",
"瘾": "yin",
"瘿": "ying",
"癀": "huang",
"癁": "fu",
"療": "liao",
"癃": "long",
"癄": "qiao",
"癅": "liu",
"癆": "lao",
"癇": "xian",
"癈": "fei",
"癉": "dan",
"癊": "yin",
"癋": "he",
"癌": "ai",
"癍": "ban",
"癎": "xian",
"癏": "guan",
"癐": "gui",
"癑": "nong",
"癒": "yu",
"癓": "wei",
"癔": "yi",
"癕": "yong",
"癖": "pi",
"癗": "lei",
"癘": "li",
"癙": "shu",
"癚": "dan",
"癛": "lin",
"癜": "dian",
"癝": "lin",
"癞": "lai",
"癟": "bie",
"癠": "ji",
"癡": "chi",
"癢": "yang",
"癣": "xuan",
"癤": "jie",
"癥": "zheng",
"癦": "meng",
"癧": "li",
"癨": "huo",
"癩": "lai",
"癪": "ji",
"癫": "dian",
"癬": "xuan",
"癭": "ying",
"癮": "yin",
"癯": "qu",
"癰": "yong",
"癱": "tan",
"癲": "dian",
"癳": "luo",
"癴": "luan",
"癵": "luan",
"癶": "bo",
"癷": "bo",
"癸": "gui",
"癹": "ba",
"発": "fa",
"登": "deng",
"發": "fa",
"白": "bai",
"百": "bai",
"癿": "qie",
"皀": "ji",
"皁": "zao",
"皂": "zao",
"皃": "mao",
"的": "di",
"皅": "pa",
"皆": "jie",
"皇": "huang",
"皈": "gui",
"皉": "ci",
"皊": "ling",
"皋": "gao",
"皌": "mo",
"皍": "ji",
"皎": "jiao",
"皏": "peng",
"皐": "gao",
"皑": "ai",
"皒": "e",
"皓": "hao",
"皔": "han",
"皕": "bi",
"皖": "wan",
"皗": "chou",
"皘": "qian",
"皙": "xi",
"皚": "ai",
"皛": "xiao",
"皜": "hao",
"皝": "huang",
"皞": "hao",
"皟": "ze",
"皠": "cui",
"皡": "hao",
"皢": "xiao",
"皣": "ye",
"皤": "po",
"皥": "hao",
"皦": "jiao",
"皧": "ai",
"皨": "xing",
"皩": "huang",
"皪": "li",
"皫": "piao",
"皬": "he",
"皭": "jiao",
"皮": "pi",
"皯": "gan",
"皰": "pao",
"皱": "zhou",
"皲": "jun",
"皳": "qiu",
"皴": "cun",
"皵": "que",
"皶": "zha",
"皷": "gu",
"皸": "jun",
"皹": "jun",
"皺": "zhou",
"皻": "zha",
"皼": "gu",
"皽": "zhao",
"皾": "du",
"皿": "min",
"盀": "qi",
"盁": "ying",
"盂": "yu",
"盃": "bei",
"盄": "diao",
"盅": "zhong",
"盆": "pen",
"盇": "he",
"盈": "ying",
"盉": "he",
"益": "yi",
"盋": "bo",
"盌": "wan",
"盍": "he",
"盎": "ang",
"盏": "zhan",
"盐": "yan",
"监": "jian",
"盒": "he",
"盓": "yu",
"盔": "kui",
"盕": "fan",
"盖": "gai",
"盗": "dao",
"盘": "pan",
"盙": "fu",
"盚": "qiu",
"盛": "sheng",
"盜": "dao",
"盝": "lu",
"盞": "zhan",
"盟": "meng",
"盠": "li",
"盡": "jin",
"盢": "xu",
"監": "jian",
"盤": "pan",
"盥": "guan",
"盦": "an",
"盧": "lu",
"盨": "xu",
"盩": "zhou",
"盪": "dang",
"盫": "an",
"盬": "gu",
"盭": "li",
"目": "mu",
"盯": "ding",
"盰": "gan",
"盱": "xu",
"盲": "mang",
"盳": "mang",
"直": "zhi",
"盵": "qi",
"盶": "yuan",
"盷": "xian",
"相": "xiang",
"盹": "dun",
"盺": "xin",
"盻": "xi",
"盼": "pan",
"盽": "feng",
"盾": "dun",
"盿": "min",
"眀": "ming",
"省": "sheng",
"眂": "shi",
"眃": "yun",
"眄": "mian",
"眅": "pan",
"眆": "fang",
"眇": "miao",
"眈": "dan",
"眉": "mei",
"眊": "mao",
"看": "kan",
"県": "xian",
"眍": "kou",
"眎": "shi",
"眏": "yang",
"眐": "zheng",
"眑": "yao",
"眒": "shen",
"眓": "huo",
"眔": "da",
"眕": "zhen",
"眖": "kuang",
"眗": "ju",
"眘": "shen",
"眙": "yi",
"眚": "sheng",
"眛": "mei",
"眜": "mo",
"眝": "zhu",
"眞": "zhen",
"真": "zhen",
"眠": "mian",
"眡": "shi",
"眢": "yuan",
"眣": "die",
"眤": "ni",
"眥": "zi",
"眦": "zi",
"眧": "chao",
"眨": "zha",
"眩": "xuan",
"眪": "bing",
"眫": "pang",
"眬": "long",
"眭": "gui",
"眮": "tong",
"眯": "mi",
"眰": "die",
"眱": "di",
"眲": "ne",
"眳": "ming",
"眴": "xuan",
"眵": "chi",
"眶": "kuang",
"眷": "juan",
"眸": "mou",
"眹": "zhen",
"眺": "tiao",
"眻": "yang",
"眼": "yan",
"眽": "mo",
"眾": "zhong",
"眿": "mo",
"着": "zhuo",
"睁": "zheng",
"睂": "mei",
"睃": "suo",
"睄": "qiao",
"睅": "han",
"睆": "huan",
"睇": "di",
"睈": "cheng",
"睉": "cuo",
"睊": "juan",
"睋": "e",
"睌": "mian",
"睍": "xian",
"睎": "xi",
"睏": "kun",
"睐": "lai",
"睑": "jian",
"睒": "shan",
"睓": "tian",
"睔": "gun",
"睕": "wan",
"睖": "leng",
"睗": "shi",
"睘": "qiong",
"睙": "li",
"睚": "ya",
"睛": "jing",
"睜": "zheng",
"睝": "li",
"睞": "lai",
"睟": "sui",
"睠": "juan",
"睡": "shui",
"睢": "hui",
"督": "du",
"睤": "bi",
"睥": "bi",
"睦": "mu",
"睧": "hun",
"睨": "ni",
"睩": "lu",
"睪": "yi",
"睫": "jie",
"睬": "cai",
"睭": "zhou",
"睮": "yu",
"睯": "hun",
"睰": "ma",
"睱": "xia",
"睲": "xing",
"睳": "hui",
"睴": "hun",
"睵": "zai",
"睶": "chun",
"睷": "jian",
"睸": "mei",
"睹": "du",
"睺": "hou",
"睻": "xuan",
"睼": "ti",
"睽": "kui",
"睾": "gao",
"睿": "rui",
"瞀": "mao",
"瞁": "xu",
"瞂": "fa",
"瞃": "wo",
"瞄": "miao",
"瞅": "chou",
"瞆": "gui",
"瞇": "mi",
"瞈": "weng",
"瞉": "kou",
"瞊": "dang",
"瞋": "chen",
"瞌": "ke",
"瞍": "sou",
"瞎": "xia",
"瞏": "qiong",
"瞐": "mo",
"瞑": "ming",
"瞒": "man",
"瞓": "fen",
"瞔": "ze",
"瞕": "zhang",
"瞖": "yi",
"瞗": "diao",
"瞘": "kou",
"瞙": "mo",
"瞚": "shun",
"瞛": "cong",
"瞜": "lou",
"瞝": "chi",
"瞞": "man",
"瞟": "piao",
"瞠": "cheng",
"瞡": "gui",
"瞢": "meng",
"瞣": "wan",
"瞤": "run",
"瞥": "pie",
"瞦": "xi",
"瞧": "qiao",
"瞨": "pu",
"瞩": "zhu",
"瞪": "deng",
"瞫": "shen",
"瞬": "shun",
"瞭": "liao",
"瞮": "che",
"瞯": "xian",
"瞰": "kan",
"瞱": "ye",
"瞲": "xue",
"瞳": "tong",
"瞴": "wu",
"瞵": "lin",
"瞶": "gui",
"瞷": "jian",
"瞸": "ye",
"瞹": "ai",
"瞺": "hui",
"瞻": "zhan",
"瞼": "jian",
"瞽": "gu",
"瞾": "zhao",
"瞿": "qu",
"矀": "wei",
"矁": "chou",
"矂": "sao",
"矃": "ning",
"矄": "xun",
"矅": "yao",
"矆": "huo",
"矇": "meng",
"矈": "mian",
"矉": "pin",
"矊": "mian",
"矋": "lei",
"矌": "kuang",
"矍": "jue",
"矎": "xuan",
"矏": "mian",
"矐": "huo",
"矑": "lu",
"矒": "meng",
"矓": "long",
"矔": "guan",
"矕": "man",
"矖": "xi",
"矗": "chu",
"矘": "tang",
"矙": "kan",
"矚": "zhu",
"矛": "mao",
"矜": "jin",
"矝": "jin",
"矞": "yu",
"矟": "shuo",
"矠": "ze",
"矡": "jue",
"矢": "shi",
"矣": "yi",
"矤": "shen",
"知": "zhi",
"矦": "hou",
"矧": "shen",
"矨": "ying",
"矩": "ju",
"矪": "zhou",
"矫": "jiao",
"矬": "cuo",
"短": "duan",
"矮": "ai",
"矯": "jiao",
"矰": "zeng",
"矱": "yue",
"矲": "ba",
"石": "shi",
"矴": "ding",
"矵": "qi",
"矶": "ji",
"矷": "zi",
"矸": "gan",
"矹": "wu",
"矺": "zhe",
"矻": "ku",
"矼": "gang",
"矽": "xi",
"矾": "fan",
"矿": "kuang",
"砀": "dang",
"码": "ma",
"砂": "sha",
"砃": "dan",
"砄": "jue",
"砅": "li",
"砆": "fu",
"砇": "min",
"砈": "e",
"砉": "xu",
"砊": "kang",
"砋": "zhi",
"砌": "qi",
"砍": "kan",
"砎": "jie",
"砏": "pin",
"砐": "e",
"砑": "ya",
"砒": "pi",
"砓": "zhe",
"研": "yan",
"砕": "sui",
"砖": "zhuan",
"砗": "che",
"砘": "dun",
"砙": "wa",
"砚": "yan",
"砛": "jin",
"砜": "feng",
"砝": "fa",
"砞": "mo",
"砟": "zha",
"砠": "ju",
"砡": "yu",
"砢": "ke",
"砣": "tuo",
"砤": "tuo",
"砥": "di",
"砦": "zhai",
"砧": "zhen",
"砨": "e",
"砩": "fu",
"砪": "mu",
"砫": "zhu",
"砬": "li",
"砭": "bian",
"砮": "nu",
"砯": "ping",
"砰": "peng",
"砱": "ling",
"砲": "pao",
"砳": "le",
"破": "po",
"砵": "bo",
"砶": "po",
"砷": "shen",
"砸": "za",
"砹": "ai",
"砺": "li",
"砻": "long",
"砼": "tong",
"砽": "yong",
"砾": "li",
"砿": "kuang",
"础": "chu",
"硁": "keng",
"硂": "quan",
"硃": "zhu",
"硄": "kuang",
"硅": "gui",
"硆": "e",
"硇": "nao",
"硈": "qia",
"硉": "lu",
"硊": "wei",
"硋": "ai",
"硌": "luo",
"硍": "ken",
"硎": "xing",
"硏": "yan",
"硐": "dong",
"硑": "peng",
"硒": "xi",
"硓": "lao",
"硔": "hong",
"硕": "shuo",
"硖": "xia",
"硗": "qiao",
"硘": "qing",
"硙": "wei",
"硚": "qiao",
"硜": "keng",
"硝": "xiao",
"硞": "que",
"硟": "chan",
"硠": "lang",
"硡": "hong",
"硢": "yu",
"硣": "xiao",
"硤": "xia",
"硥": "mang",
"硦": "luo",
"硧": "yong",
"硨": "che",
"硩": "che",
"硪": "wo",
"硫": "liu",
"硬": "ying",
"硭": "mang",
"确": "que",
"硯": "yan",
"硰": "sha",
"硱": "kun",
"硲": "yu",
"硴": "hua",
"硵": "lu",
"硶": "chen",
"硷": "jian",
"硸": "nve",
"硹": "song",
"硺": "zhuo",
"硻": "keng",
"硼": "peng",
"硽": "yan",
"硾": "zhui",
"硿": "kong",
"碀": "cheng",
"碁": "qi",
"碂": "zong",
"碃": "qing",
"碄": "lin",
"碅": "jun",
"碆": "bo",
"碇": "ding",
"碈": "min",
"碉": "diao",
"碊": "jian",
"碋": "he",
"碌": "lu",
"碍": "ai",
"碎": "sui",
"碏": "que",
"碐": "leng",
"碑": "bei",
"碒": "yin",
"碓": "dui",
"碔": "wu",
"碕": "qi",
"碖": "lun",
"碗": "wan",
"碘": "dian",
"碙": "nao",
"碚": "bei",
"碛": "qi",
"碜": "chen",
"碝": "ruan",
"碞": "yan",
"碟": "die",
"碠": "ding",
"碡": "zhou",
"碢": "tuo",
"碣": "jie",
"碤": "ying",
"碥": "bian",
"碦": "ke",
"碧": "bi",
"碨": "wei",
"碩": "shuo",
"碪": "zhen",
"碫": "duan",
"碬": "xia",
"碭": "dang",
"碮": "ti",
"碯": "nao",
"碰": "peng",
"碱": "jian",
"碲": "di",
"碳": "tan",
"碴": "cha",
"碵": "tian",
"碶": "qi",
"碷": "dun",
"碸": "feng",
"碹": "xuan",
"確": "que",
"碻": "que",
"碼": "ma",
"碽": "gong",
"碾": "nian",
"碿": "su",
"磀": "e",
"磁": "ci",
"磂": "liu",
"磃": "si",
"磄": "tang",
"磅": "bang",
"磆": "hua",
"磇": "pi",
"磈": "kui",
"磉": "sang",
"磊": "lei",
"磋": "cuo",
"磌": "tian",
"磍": "xia",
"磎": "xi",
"磏": "lian",
"磐": "pan",
"磑": "ai",
"磒": "yun",
"磓": "dui",
"磔": "zhe",
"磕": "ke",
"磖": "la",
"磗": "zhuan",
"磘": "yao",
"磙": "gun",
"磚": "zhuan",
"磛": "chan",
"磜": "qi",
"磝": "ao",
"磞": "peng",
"磟": "liu",
"磠": "lu",
"磡": "kan",
"磢": "chuang",
"磣": "chen",
"磤": "yin",
"磥": "lei",
"磦": "biao",
"磧": "qi",
"磨": "mo",
"磩": "qi",
"磪": "cui",
"磫": "zong",
"磬": "qing",
"磭": "chuo",
"磮": "lun",
"磯": "ji",
"磰": "shan",
"磱": "lao",
"磲": "qu",
"磳": "zeng",
"磴": "deng",
"磵": "jian",
"磶": "xi",
"磷": "lin",
"磸": "ding",
"磹": "dian",
"磺": "huang",
"磻": "pan",
"磼": "ji",
"磽": "qiao",
"磾": "di",
"磿": "li",
"礀": "jian",
"礁": "jiao",
"礂": "xi",
"礃": "zhang",
"礄": "qiao",
"礅": "dun",
"礆": "jian",
"礇": "yu",
"礈": "zhui",
"礉": "he",
"礊": "ke",
"礋": "ze",
"礌": "lei",
"礍": "jie",
"礎": "chu",
"礏": "ye",
"礐": "que",
"礑": "dang",
"礒": "yi",
"礓": "jiang",
"礔": "pi",
"礕": "pi",
"礖": "yu",
"礗": "pin",
"礘": "e",
"礙": "ai",
"礚": "ke",
"礛": "jian",
"礜": "yu",
"礝": "ruan",
"礞": "meng",
"礟": "pao",
"礠": "ci",
"礡": "bo",
"礢": "yang",
"礣": "mie",
"礤": "ca",
"礥": "xian",
"礦": "kuang",
"礧": "lei",
"礨": "lei",
"礩": "zhi",
"礪": "li",
"礫": "li",
"礬": "fan",
"礭": "que",
"礮": "pao",
"礯": "ying",
"礰": "li",
"礱": "long",
"礲": "long",
"礳": "mo",
"礴": "bo",
"礵": "shuang",
"礶": "guan",
"礷": "jian",
"礸": "ca",
"礹": "yan",
"示": "shi",
"礻": "shi",
"礼": "li",
"礽": "reng",
"社": "she",
"礿": "yue",
"祀": "si",
"祁": "qi",
"祂": "ta",
"祃": "ma",
"祄": "xie",
"祅": "yao",
"祆": "xian",
"祇": "zhi",
"祈": "qi",
"祉": "zhi",
"祊": "beng",
"祋": "dui",
"祌": "zhong",
"祍": "ren",
"祎": "yi",
"祏": "shi",
"祐": "you",
"祑": "zhi",
"祒": "tiao",
"祓": "fu",
"祔": "fu",
"祕": "mi",
"祖": "zu",
"祗": "zhi",
"祘": "suan",
"祙": "mei",
"祚": "zuo",
"祛": "qu",
"祜": "hu",
"祝": "zhu",
"神": "shen",
"祟": "sui",
"祠": "ci",
"祡": "chai",
"祢": "mi",
"祣": "lv",
"祤": "yu",
"祥": "xiang",
"祦": "wu",
"祧": "tiao",
"票": "piao",
"祩": "zhu",
"祪": "gui",
"祫": "xia",
"祬": "zhi",
"祭": "ji",
"祮": "gao",
"祯": "zhen",
"祰": "gao",
"祱": "shui",
"祲": "jin",
"祳": "shen",
"祴": "gai",
"祵": "kun",
"祶": "di",
"祷": "dao",
"祸": "huo",
"祹": "tao",
"祺": "qi",
"祻": "gu",
"祼": "guan",
"祽": "zui",
"祾": "ling",
"祿": "lu",
"禀": "bing",
"禁": "jin",
"禂": "dao",
"禃": "zhi",
"禄": "lu",
"禅": "chan",
"禆": "bi",
"禇": "chu",
"禈": "hui",
"禉": "you",
"禊": "xi",
"禋": "yin",
"禌": "zi",
"禍": "huo",
"禎": "zhen",
"福": "fu",
"禐": "yuan",
"禑": "xu",
"禒": "xian",
"禓": "shang",
"禔": "ti",
"禕": "yi",
"禖": "mei",
"禗": "si",
"禘": "di",
"禙": "bei",
"禚": "zhuo",
"禛": "zhen",
"禜": "ying",
"禝": "ji",
"禞": "gao",
"禟": "tang",
"禠": "si",
"禡": "ma",
"禢": "ta",
"禣": "fu",
"禤": "xuan",
"禥": "qi",
"禦": "yu",
"禧": "xi",
"禨": "ji",
"禩": "si",
"禪": "shan",
"禫": "dan",
"禬": "gui",
"禭": "sui",
"禮": "li",
"禯": "nong",
"禰": "mi",
"禱": "dao",
"禲": "li",
"禳": "rang",
"禴": "yue",
"禵": "ti",
"禶": "zan",
"禷": "lei",
"禸": "rou",
"禹": "yu",
"禺": "yu",
"离": "li",
"禼": "xie",
"禽": "qin",
"禾": "he",
"禿": "tu",
"秀": "xiu",
"私": "si",
"秂": "ren",
"秃": "tu",
"秄": "zi",
"秅": "cha",
"秆": "gan",
"秇": "yi",
"秈": "xian",
"秉": "bing",
"秊": "nian",
"秋": "qiu",
"秌": "qiu",
"种": "zhong",
"秎": "fen",
"秏": "hao",
"秐": "yun",
"科": "ke",
"秒": "miao",
"秓": "zhi",
"秔": "jing",
"秕": "bi",
"秖": "zhi",
"秗": "yu",
"秘": "mi",
"秙": "ku",
"秚": "ban",
"秛": "pi",
"秜": "ni",
"秝": "li",
"秞": "you",
"租": "zu",
"秠": "pi",
"秡": "bo",
"秢": "ling",
"秣": "mo",
"秤": "cheng",
"秥": "nian",
"秦": "qin",
"秧": "yang",
"秨": "zuo",
"秩": "zhi",
"秪": "di",
"秫": "shu",
"秬": "ju",
"秭": "zi",
"秮": "huo",
"积": "ji",
"称": "cheng",
"秱": "tong",
"秲": "shi",
"秳": "huo",
"秴": "huo",
"秵": "yin",
"秶": "zi",
"秷": "zhi",
"秸": "jie",
"秹": "ren",
"秺": "du",
"移": "yi",
"秼": "zhu",
"秽": "hui",
"秾": "nong",
"秿": "fu",
"稀": "xi",
"稁": "gao",
"稂": "lang",
"稃": "fu",
"稄": "xun",
"稅": "shui",
"稆": "lv",
"稇": "kun",
"稈": "gan",
"稉": "jing",
"稊": "ti",
"程": "cheng",
"稌": "tu",
"稍": "shao",
"税": "shui",
"稏": "ya",
"稐": "lun",
"稑": "lu",
"稒": "gu",
"稓": "zuo",
"稔": "ren",
"稕": "zhun",
"稖": "bang",
"稗": "bai",
"稘": "ji",
"稙": "zhi",
"稚": "zhi",
"稛": "kun",
"稜": "leng",
"稝": "peng",
"稞": "ke",
"稟": "bing",
"稠": "chou",
"稡": "zui",
"稢": "yu",
"稣": "su",
"稤": "lve",
"稥": "xiang",
"稦": "yi",
"稧": "xi",
"稨": "bian",
"稩": "ji",
"稪": "fu",
"稫": "pi",
"稬": "nuo",
"稭": "jie",
"種": "zhong",
"稯": "zong",
"稰": "xu",
"稱": "cheng",
"稲": "dao",
"稳": "wen",
"稴": "xian",
"稵": "zi",
"稶": "yu",
"稷": "ji",
"稸": "xu",
"稹": "zhen",
"稺": "zhi",
"稻": "dao",
"稼": "jia",
"稽": "ji",
"稾": "gao",
"稿": "gao",
"穀": "gu",
"穁": "rong",
"穂": "sui",
"穃": "rong",
"穄": "ji",
"穅": "kang",
"穆": "mu",
"穇": "can",
"穈": "men",
"穉": "zhi",
"穊": "ji",
"穋": "lu",
"穌": "su",
"積": "ji",
"穎": "ying",
"穏": "wen",
"穐": "qiu",
"穑": "se",
"穓": "yi",
"穔": "huang",
"穕": "qie",
"穖": "ji",
"穗": "sui",
"穘": "xiao",
"穙": "pu",
"穚": "jiao",
"穛": "zhuo",
"穜": "tong",
"穝": "zuo",
"穞": "lu",
"穟": "sui",
"穠": "nong",
"穡": "se",
"穢": "hui",
"穣": "rang",
"穤": "nuo",
"穥": "yu",
"穦": "pin",
"穧": "ji",
"穨": "tui",
"穩": "wen",
"穪": "cheng",
"穫": "huo",
"穬": "kuang",
"穭": "lv",
"穮": "biao",
"穯": "se",
"穰": "rang",
"穱": "zhuo",
"穲": "li",
"穳": "cuan",
"穴": "xue",
"穵": "wa",
"究": "jiu",
"穷": "qiong",
"穸": "xi",
"穹": "qiong",
"空": "kong",
"穻": "yu",
"穼": "shen",
"穽": "jing",
"穾": "yao",
"穿": "chuan",
"窀": "zhun",
"突": "tu",
"窂": "lao",
"窃": "qie",
"窄": "zhai",
"窅": "yao",
"窆": "bian",
"窇": "bao",
"窈": "yao",
"窉": "bing",
"窊": "wa",
"窋": "zhu",
"窌": "jiao",
"窍": "qiao",
"窎": "diao",
"窏": "wu",
"窐": "wa",
"窑": "yao",
"窒": "zhi",
"窓": "chuang",
"窔": "yao",
"窕": "tiao",
"窖": "jiao",
"窗": "chuang",
"窘": "jiong",
"窙": "xiao",
"窚": "cheng",
"窛": "kou",
"窜": "cuan",
"窝": "wo",
"窞": "dan",
"窟": "ku",
"窠": "ke",
"窡": "zhuo",
"窢": "huo",
"窣": "su",
"窤": "guan",
"窥": "kui",
"窦": "dou",
"窧": "zhuo",
"窨": "yin",
"窩": "wo",
"窪": "wa",
"窫": "ya",
"窬": "yu",
"窭": "ju",
"窮": "qiong",
"窯": "yao",
"窰": "yao",
"窱": "tiao",
"窲": "chao",
"窳": "yu",
"窴": "tian",
"窵": "diao",
"窶": "ju",
"窷": "liao",
"窸": "xi",
"窹": "wu",
"窺": "kui",
"窻": "chuang",
"窼": "chao",
"窽": "kuan",
"窾": "kuan",
"窿": "long",
"竀": "cheng",
"竁": "cui",
"竂": "liao",
"竃": "zao",
"竄": "cuan",
"竅": "qiao",
"竆": "qiong",
"竇": "dou",
"竈": "zao",
"竉": "long",
"竊": "qie",
"立": "li",
"竌": "chu",
"竍": "shi",
"竎": "fu",
"竏": "qian",
"竐": "chu",
"竑": "hong",
"竒": "qi",
"竓": "hao",
"竔": "sheng",
"竕": "fen",
"竖": "shu",
"竗": "miao",
"竘": "qu",
"站": "zhan",
"竚": "zhu",
"竛": "ling",
"竜": "long",
"竝": "bing",
"竞": "jing",
"竟": "jing",
"章": "zhang",
"竡": "bai",
"竢": "si",
"竣": "jun",
"竤": "hong",
"童": "tong",
"竦": "song",
"竧": "jing",
"竨": "diao",
"竩": "yi",
"竪": "shu",
"竫": "jing",
"竬": "qu",
"竭": "jie",
"竮": "ping",
"端": "duan",
"竰": "li",
"竱": "zhuan",
"竲": "ceng",
"竳": "deng",
"竴": "cun",
"竵": "wai",
"競": "jing",
"竷": "kan",
"竸": "jing",
"竹": "zhu",
"竺": "zhu",
"竻": "le",
"竼": "peng",
"竽": "yu",
"竾": "chi",
"竿": "gan",
"笀": "mang",
"笁": "zhu",
"笂": "wan",
"笃": "du",
"笄": "ji",
"笅": "jiao",
"笆": "ba",
"笇": "suan",
"笈": "ji",
"笉": "qin",
"笊": "zhao",
"笋": "sun",
"笌": "ya",
"笍": "zhui",
"笎": "yuan",
"笏": "hu",
"笐": "hang",
"笑": "xiao",
"笒": "cen",
"笓": "pi",
"笔": "bi",
"笕": "jian",
"笖": "yi",
"笗": "dong",
"笘": "shan",
"笙": "sheng",
"笚": "da",
"笛": "di",
"笜": "zhu",
"笝": "na",
"笞": "chi",
"笟": "gu",
"笠": "li",
"笡": "qie",
"笢": "min",
"笣": "bao",
"笤": "tiao",
"笥": "si",
"符": "fu",
"笧": "ce",
"笨": "ben",
"笩": "fa",
"笪": "da",
"笫": "zi",
"第": "di",
"笭": "ling",
"笮": "zuo",
"笯": "nu",
"笰": "fu",
"笱": "gou",
"笲": "fan",
"笳": "jia",
"笴": "ge",
"笵": "fan",
"笶": "shi",
"笷": "mao",
"笸": "po",
"笹": "ti",
"笺": "jian",
"笻": "qiong",
"笼": "long",
"笽": "min",
"笾": "bian",
"笿": "luo",
"筀": "gui",
"筁": "qu",
"筂": "chi",
"筃": "yin",
"筄": "yao",
"筅": "xian",
"筆": "bi",
"筇": "qiong",
"筈": "kuo",
"等": "deng",
"筊": "jiao",
"筋": "jin",
"筌": "quan",
"筍": "sun",
"筎": "ru",
"筏": "fa",
"筐": "kuang",
"筑": "zhu",
"筒": "tong",
"筓": "ji",
"答": "da",
"筕": "hang",
"策": "ce",
"筗": "zhong",
"筘": "kou",
"筙": "lai",
"筚": "bi",
"筛": "shai",
"筜": "dang",
"筝": "zheng",
"筞": "ce",
"筟": "fu",
"筠": "yun",
"筡": "tu",
"筢": "pa",
"筣": "li",
"筤": "lang",
"筥": "ju",
"筦": "guan",
"筧": "jian",
"筨": "han",
"筩": "tong",
"筪": "xia",
"筫": "zhi",
"筬": "cheng",
"筭": "suan",
"筮": "shi",
"筯": "zhu",
"筰": "zuo",
"筱": "xiao",
"筲": "shao",
"筳": "ting",
"筴": "ce",
"筵": "yan",
"筶": "gao",
"筷": "kuai",
"筸": "gan",
"筹": "chou",
"筺": "kuang",
"筻": "gang",
"筼": "yun",
"筽": "o",
"签": "qian",
"筿": "xiao",
"简": "jian",
"箁": "pou",
"箂": "lai",
"箃": "zou",
"箄": "pai",
"箅": "bi",
"箆": "bi",
"箇": "ge",
"箈": "tai",
"箉": "guai",
"箊": "yu",
"箋": "jian",
"箌": "zhao",
"箍": "gu",
"箎": "chi",
"箏": "zheng",
"箐": "qing",
"箑": "sha",
"箒": "zhou",
"箓": "lu",
"箔": "bo",
"箕": "ji",
"箖": "lin",
"算": "suan",
"箘": "jun",
"箙": "fu",
"箚": "zha",
"箛": "gu",
"箜": "kong",
"箝": "qian",
"箞": "quan",
"箟": "jun",
"箠": "chui",
"管": "guan",
"箢": "wan",
"箣": "ce",
"箤": "zu",
"箥": "po",
"箦": "ze",
"箧": "qie",
"箨": "tuo",
"箩": "luo",
"箪": "dan",
"箫": "xiao",
"箬": "ruo",
"箭": "jian",
"箮": "xuan",
"箯": "bian",
"箰": "sun",
"箱": "xiang",
"箲": "xian",
"箳": "ping",
"箴": "zhen",
"箵": "xing",
"箶": "hu",
"箷": "shi",
"箸": "zhu",
"箹": "yue",
"箺": "chun",
"箻": "lv",
"箼": "wu",
"箽": "dong",
"箾": "shuo",
"箿": "ji",
"節": "jie",
"篁": "huang",
"篂": "xing",
"篃": "mei",
"範": "fan",
"篅": "chuan",
"篆": "zhuan",
"篇": "pian",
"篈": "feng",
"築": "zhu",
"篊": "hong",
"篋": "qie",
"篌": "hou",
"篍": "qiu",
"篎": "miao",
"篏": "qian",
"篐": "gu",
"篑": "kui",
"篒": "yi",
"篓": "lou",
"篔": "yun",
"篕": "he",
"篖": "tang",
"篗": "yue",
"篘": "chou",
"篙": "gao",
"篚": "fei",
"篛": "ruo",
"篜": "zheng",
"篝": "gou",
"篞": "nie",
"篟": "qian",
"篠": "xiao",
"篡": "cuan",
"篢": "gong",
"篣": "peng",
"篤": "du",
"篥": "li",
"篦": "bi",
"篧": "zhuo",
"篨": "chu",
"篩": "shai",
"篪": "chi",
"篫": "zhu",
"篬": "qiang",
"篭": "long",
"篮": "lan",
"篯": "jian",
"篰": "bu",
"篱": "li",
"篲": "hui",
"篳": "bi",
"篴": "zhu",
"篵": "cong",
"篶": "yan",
"篷": "peng",
"篸": "cen",
"篹": "zhuan",
"篺": "pi",
"篻": "piao",
"篼": "dou",
"篽": "yu",
"篾": "mie",
"篿": "tuan",
"簀": "ze",
"簁": "shai",
"簂": "guo",
"簃": "yi",
"簄": "hu",
"簅": "chan",
"簆": "kou",
"簇": "cu",
"簈": "ping",
"簉": "zao",
"簊": "ji",
"簋": "gui",
"簌": "su",
"簍": "lou",
"簎": "ce",
"簏": "lu",
"簐": "nian",
"簑": "suo",
"簒": "cuan",
"簓": "diao",
"簔": "suo",
"簕": "le",
"簖": "duan",
"簗": "zhu",
"簘": "xiao",
"簙": "bo",
"簚": "mi",
"簛": "shai",
"簜": "dang",
"簝": "liao",
"簞": "dan",
"簟": "dian",
"簠": "fu",
"簡": "jian",
"簢": "min",
"簣": "kui",
"簤": "dai",
"簥": "jiao",
"簦": "deng",
"簧": "huang",
"簨": "sun",
"簩": "lao",
"簪": "zan",
"簫": "xiao",
"簬": "lu",
"簭": "shi",
"簮": "zan",
"簯": "qi",
"簰": "pai",
"簱": "qi",
"簲": "pai",
"簳": "gan",
"簴": "ju",
"簵": "lu",
"簶": "lu",
"簷": "yan",
"簸": "bo",
"簹": "dang",
"簺": "sai",
"簻": "zhua",
"簼": "gou",
"簽": "qian",
"簾": "lian",
"簿": "bu",
"籀": "zhou",
"籁": "lai",
"籂": "shi",
"籃": "lan",
"籄": "kui",
"籅": "yu",
"籆": "yue",
"籇": "hao",
"籈": "zhen",
"籉": "tai",
"籊": "ti",
"籋": "nie",
"籌": "chou",
"籍": "ji",
"籎": "yi",
"籏": "qi",
"籐": "teng",
"籑": "zhuan",
"籒": "zhou",
"籓": "fan",
"籔": "sou",
"籕": "zhou",
"籖": "qian",
"籗": "zhuo",
"籘": "teng",
"籙": "lu",
"籚": "lu",
"籛": "jian",
"籜": "tuo",
"籝": "ying",
"籞": "yu",
"籟": "lai",
"籠": "long",
"籡": "qie",
"籢": "lian",
"籣": "lan",
"籤": "qian",
"籥": "yue",
"籦": "zhong",
"籧": "qu",
"籨": "lian",
"籩": "bian",
"籪": "duan",
"籫": "zuan",
"籬": "li",
"籭": "shai",
"籮": "luo",
"籯": "ying",
"籰": "yue",
"籱": "zhuo",
"籲": "yu",
"米": "mi",
"籴": "di",
"籵": "fan",
"籶": "shen",
"籷": "zhe",
"籸": "shen",
"籹": "nv",
"籺": "he",
"类": "lei",
"籼": "xian",
"籽": "zi",
"籾": "ni",
"籿": "cun",
"粀": "zhang",
"粁": "qian",
"粂": "zhai",
"粃": "bi",
"粄": "ban",
"粅": "wu",
"粆": "sha",
"粇": "kang",
"粈": "rou",
"粉": "fen",
"粊": "bi",
"粋": "cui",
"粌": "yin",
"粍": "zhe",
"粎": "mi",
"粏": "ta",
"粐": "hu",
"粑": "ba",
"粒": "li",
"粓": "gan",
"粔": "ju",
"粕": "po",
"粖": "yu",
"粗": "cu",
"粘": "nian",
"粙": "zhou",
"粚": "chi",
"粛": "su",
"粜": "tiao",
"粝": "li",
"粞": "xi",
"粟": "su",
"粠": "hong",
"粡": "tong",
"粢": "zi",
"粣": "ce",
"粤": "yue",
"粥": "zhou",
"粦": "lin",
"粧": "zhuang",
"粨": "bai",
"粩": "lao",
"粪": "fen",
"粫": "er",
"粬": "qu",
"粭": "he",
"粮": "liang",
"粯": "xian",
"粰": "fu",
"粱": "liang",
"粲": "can",
"粳": "jing",
"粴": "li",
"粵": "yue",
"粶": "lu",
"粷": "ju",
"粸": "qi",
"粹": "cui",
"粺": "bai",
"粻": "zhang",
"粼": "lin",
"粽": "zong",
"精": "jing",
"粿": "guo",
"糀": "hua",
"糁": "san",
"糂": "shen",
"糃": "tang",
"糄": "bian",
"糅": "rou",
"糆": "mian",
"糇": "hou",
"糈": "xu",
"糉": "zong",
"糊": "hu",
"糋": "jian",
"糌": "zan",
"糍": "ci",
"糎": "li",
"糏": "xie",
"糐": "fu",
"糑": "nuo",
"糒": "bei",
"糓": "gu",
"糔": "xiu",
"糕": "gao",
"糖": "tang",
"糗": "qiu",
"糘": "jia",
"糙": "cao",
"糚": "zhuang",
"糛": "tang",
"糜": "mi",
"糝": "san",
"糞": "fen",
"糟": "zao",
"糠": "kang",
"糡": "jiang",
"糢": "mo",
"糣": "san",
"糤": "san",
"糥": "nuo",
"糦": "xi",
"糧": "liang",
"糨": "jiang",
"糩": "kuai",
"糪": "bo",
"糫": "huan",
"糬": "shu",
"糭": "zong",
"糮": "xian",
"糯": "nuo",
"糰": "tuan",
"糱": "nie",
"糲": "li",
"糳": "zuo",
"糴": "di",
"糵": "nie",
"糶": "tiao",
"糷": "lan",
"糸": "mi",
"糹": "si",
"糺": "jiu",
"系": "xi",
"糼": "gong",
"糽": "zheng",
"糾": "jiu",
"糿": "gong",
"紀": "ji",
"紁": "cha",
"紂": "zhou",
"紃": "xun",
"約": "yue",
"紅": "hong",
"紆": "yu",
"紇": "he",
"紈": "wan",
"紉": "ren",
"紊": "wen",
"紋": "wen",
"紌": "qiu",
"納": "na",
"紎": "zi",
"紏": "tou",
"紐": "niu",
"紑": "fou",
"紒": "ji",
"紓": "shu",
"純": "chun",
"紕": "pi",
"紖": "zhen",
"紗": "sha",
"紘": "hong",
"紙": "zhi",
"級": "ji",
"紛": "fen",
"紜": "yun",
"紝": "ren",
"紞": "dan",
"紟": "jin",
"素": "su",
"紡": "fang",
"索": "suo",
"紣": "cui",
"紤": "jiu",
"紥": "zha",
"紦": "ha",
"紧": "jin",
"紨": "fu",
"紩": "zhi",
"紪": "qi",
"紫": "zi",
"紬": "chou",
"紭": "hong",
"紮": "zha",
"累": "lei",
"細": "xi",
"紱": "fu",
"紲": "xie",
"紳": "shen",
"紴": "bo",
"紵": "zhu",
"紶": "qu",
"紷": "ling",
"紸": "zhu",
"紹": "shao",
"紺": "gan",
"紻": "yang",
"紼": "fu",
"紽": "tuo",
"紾": "zhen",
"紿": "dai",
"絀": "chu",
"絁": "shi",
"終": "zhong",
"絃": "xian",
"組": "zu",
"絅": "jiong",
"絆": "ban",
"絇": "qu",
"絈": "mo",
"絉": "shu",
"絊": "zui",
"絋": "kuang",
"経": "jing",
"絍": "ren",
"絎": "hang",
"絏": "xie",
"結": "jie",
"絑": "zhu",
"絒": "chou",
"絓": "gua",
"絔": "bai",
"絕": "jue",
"絖": "kuang",
"絗": "hu",
"絘": "ci",
"絙": "huan",
"絚": "geng",
"絛": "tao",
"絜": "xie",
"絝": "ku",
"絞": "jiao",
"絟": "quan",
"絠": "gai",
"絡": "luo",
"絢": "xuan",
"絣": "beng",
"絤": "xian",
"絥": "fu",
"給": "gei",
"絧": "tong",
"絨": "rong",
"絩": "tiao",
"絪": "yin",
"絫": "lei",
"絬": "xie",
"絭": "juan",
"絮": "xu",
"絯": "gai",
"絰": "die",
"統": "tong",
"絲": "si",
"絳": "jiang",
"絴": "xiang",
"絵": "hui",
"絶": "jue",
"絷": "zhi",
"絸": "jian",
"絹": "juan",
"絺": "chi",
"絻": "mian",
"絼": "zhen",
"絽": "lv",
"絾": "cheng",
"絿": "qiu",
"綀": "shu",
"綁": "bang",
"綂": "tong",
"綃": "xiao",
"綄": "huan",
"綅": "qin",
"綆": "geng",
"綇": "xu",
"綈": "ti",
"綉": "xiu",
"綊": "xie",
"綋": "hong",
"綌": "xi",
"綍": "fu",
"綎": "ting",
"綏": "sui",
"綐": "dui",
"綑": "kun",
"綒": "fu",
"經": "jing",
"綔": "hu",
"綕": "zhi",
"綖": "yan",
"綗": "jiong",
"綘": "feng",
"継": "ji",
"続": "xu",
"綛": "ren",
"綜": "zong",
"綝": "lin",
"綞": "duo",
"綟": "li",
"綠": "lv",
"綡": "jing",
"綢": "chou",
"綣": "quan",
"綤": "shao",
"綥": "qi",
"綦": "qi",
"綧": "zhun",
"綨": "ji",
"綩": "wan",
"綪": "qian",
"綫": "xian",
"綬": "shou",
"維": "wei",
"綮": "qing",
"綯": "tao",
"綰": "wan",
"綱": "gang",
"網": "wang",
"綳": "beng",
"綴": "zhui",
"綵": "cai",
"綶": "guo",
"綷": "cui",
"綸": "lun",
"綹": "liu",
"綺": "qi",
"綻": "zhan",
"綼": "bi",
"綽": "chuo",
"綾": "ling",
"綿": "mian",
"緀": "qi",
"緁": "ji",
"緂": "tian",
"緃": "zong",
"緄": "gun",
"緅": "zou",
"緆": "xi",
"緇": "zi",
"緈": "xing",
"緉": "liang",
"緊": "jin",
"緋": "fei",
"緌": "rui",
"緍": "min",
"緎": "yu",
"総": "zong",
"緐": "fan",
"緑": "lv",
"緒": "xu",
"緓": "ying",
"緔": "shang",
"緕": "zi",
"緖": "xu",
"緗": "xiang",
"緘": "jian",
"緙": "ke",
"線": "xian",
"緛": "ruan",
"緜": "mian",
"緝": "ji",
"緞": "duan",
"緟": "chong",
"締": "di",
"緡": "min",
"緢": "miao",
"緣": "yuan",
"緤": "xie",
"緥": "bao",
"緦": "si",
"緧": "qiu",
"編": "bian",
"緩": "huan",
"緪": "geng",
"緫": "zong",
"緬": "mian",
"緭": "wei",
"緮": "fu",
"緯": "wei",
"緰": "tou",
"緱": "gou",
"緲": "miao",
"緳": "xie",
"練": "lian",
"緵": "zong",
"緶": "bian",
"緷": "gun",
"緸": "yin",
"緹": "ti",
"緺": "gua",
"緻": "zhi",
"緼": "yun",
"緽": "cheng",
"緾": "chan",
"緿": "dai",
"縀": "xie",
"縁": "yuan",
"縂": "zong",
"縃": "xu",
"縄": "sheng",
"縅": "wei",
"縆": "geng",
"縈": "ying",
"縉": "jin",
"縊": "yi",
"縋": "zhui",
"縌": "ni",
"縍": "bang",
"縎": "gu",
"縏": "pan",
"縐": "zhou",
"縑": "jian",
"縒": "ci",
"縓": "quan",
"縔": "shuang",
"縕": "yun",
"縖": "xia",
"縗": "cui",
"縘": "xi",
"縙": "rong",
"縚": "tao",
"縛": "fu",
"縜": "yun",
"縝": "zhen",
"縞": "gao",
"縟": "ru",
"縠": "hu",
"縡": "zai",
"縢": "teng",
"縣": "xian",
"縤": "su",
"縥": "zhen",
"縦": "zong",
"縧": "tao",
"縨": "huang",
"縩": "cai",
"縪": "bi",
"縫": "feng",
"縬": "cu",
"縭": "li",
"縮": "suo",
"縯": "yan",
"縰": "xi",
"縱": "zong",
"縲": "lei",
"縳": "zhuan",
"縴": "qian",
"縵": "man",
"縶": "zhi",
"縷": "lv",
"縸": "mu",
"縹": "piao",
"縺": "lian",
"縻": "mi",
"縼": "xuan",
"總": "zong",
"績": "ji",
"縿": "shan",
"繀": "sui",
"繁": "fan",
"繂": "lv",
"繃": "beng",
"繄": "yi",
"繅": "sao",
"繆": "mou",
"繇": "yao",
"繈": "qiang",
"繉": "sheng",
"繊": "xian",
"繋": "ji",
"繌": "zong",
"繍": "xiu",
"繎": "ran",
"繏": "xuan",
"繐": "sui",
"繑": "qiao",
"繒": "zeng",
"繓": "zuo",
"織": "zhi",
"繕": "shan",
"繖": "san",
"繗": "lin",
"繘": "ju",
"繙": "fan",
"繚": "liao",
"繛": "chuo",
"繜": "zun",
"繝": "jian",
"繞": "rao",
"繟": "chan",
"繠": "rui",
"繡": "xiu",
"繢": "hui",
"繣": "hua",
"繤": "zuan",
"繥": "xi",
"繦": "qiang",
"繧": "wen",
"繨": "da",
"繩": "sheng",
"繪": "hui",
"繫": "xi",
"繬": "se",
"繭": "jian",
"繮": "jiang",
"繯": "huan",
"繰": "qiao",
"繱": "cong",
"繲": "xie",
"繳": "jiao",
"繴": "bi",
"繵": "dan",
"繶": "yi",
"繷": "nong",
"繸": "sui",
"繹": "yi",
"繺": "sha",
"繻": "ru",
"繼": "ji",
"繽": "bin",
"繾": "qian",
"繿": "lan",
"纀": "pu",
"纁": "xun",
"纂": "zuan",
"纃": "zi",
"纄": "peng",
"纅": "yao",
"纆": "mo",
"纇": "lei",
"纈": "xie",
"纉": "zuan",
"纊": "kuang",
"纋": "you",
"續": "xu",
"纍": "lei",
"纎": "xian",
"纏": "chan",
"纐": "jiao",
"纑": "lu",
"纒": "chan",
"纓": "ying",
"纔": "cai",
"纕": "xiang",
"纖": "xian",
"纗": "zui",
"纘": "zuan",
"纙": "luo",
"纚": "li",
"纛": "dao",
"纜": "lan",
"纝": "lei",
"纞": "lian",
"纟": "si",
"纠": "jiu",
"纡": "yu",
"红": "hong",
"纣": "zhou",
"纤": "xian",
"纥": "he",
"约": "yue",
"级": "ji",
"纨": "wan",
"纩": "kuang",
"纪": "ji",
"纫": "ren",
"纬": "wei",
"纭": "yun",
"纮": "hong",
"纯": "chun",
"纰": "pi",
"纱": "sha",
"纲": "gang",
"纳": "na",
"纴": "ren",
"纵": "zong",
"纶": "lun",
"纷": "fen",
"纸": "zhi",
"纹": "wen",
"纺": "fang",
"纻": "zhu",
"纼": "zhen",
"纽": "niu",
"纾": "shu",
"线": "xian",
"绀": "gan",
"绁": "xie",
"绂": "fu",
"练": "lian",
"组": "zu",
"绅": "shen",
"细": "xi",
"织": "zhi",
"终": "zhong",
"绉": "zhou",
"绊": "ban",
"绋": "fu",
"绌": "chu",
"绍": "shao",
"绎": "yi",
"经": "jing",
"绐": "dai",
"绑": "bang",
"绒": "rong",
"结": "jie",
"绔": "ku",
"绕": "rao",
"绖": "die",
"绗": "hang",
"绘": "hui",
"给": "gei",
"绚": "xuan",
"绛": "jiang",
"络": "luo",
"绝": "jue",
"绞": "jiao",
"统": "tong",
"绠": "geng",
"绡": "xiao",
"绢": "juan",
"绣": "xiu",
"绤": "xi",
"绥": "sui",
"绦": "tao",
"继": "ji",
"绨": "ti",
"绩": "ji",
"绪": "xu",
"绫": "ling",
"绬": "ying",
"续": "xu",
"绮": "qi",
"绯": "fei",
"绰": "chuo",
"绱": "shang",
"绲": "gun",
"绳": "sheng",
"维": "wei",
"绵": "mian",
"绶": "shou",
"绷": "beng",
"绸": "chou",
"绹": "tao",
"绺": "liu",
"绻": "quan",
"综": "zong",
"绽": "zhan",
"绾": "wan",
"绿": "lv",
"缀": "zhui",
"缁": "zi",
"缂": "ke",
"缃": "xiang",
"缄": "jian",
"缅": "mian",
"缆": "lan",
"缇": "ti",
"缈": "miao",
"缉": "ji",
"缊": "yun",
"缋": "hui",
"缌": "si",
"缍": "duo",
"缎": "duan",
"缏": "bian",
"缐": "xian",
"缑": "gou",
"缒": "zhui",
"缓": "huan",
"缔": "di",
"缕": "lv",
"编": "bian",
"缗": "min",
"缘": "yuan",
"缙": "jin",
"缚": "fu",
"缛": "ru",
"缜": "zhen",
"缝": "feng",
"缞": "cui",
"缟": "gao",
"缠": "chan",
"缡": "li",
"缢": "yi",
"缣": "jian",
"缤": "bin",
"缥": "piao",
"缦": "man",
"缧": "lei",
"缨": "ying",
"缩": "suo",
"缪": "mou",
"缫": "sao",
"缬": "xie",
"缭": "liao",
"缮": "shan",
"缯": "zeng",
"缰": "jiang",
"缱": "qian",
"缲": "qiao",
"缳": "huan",
"缴": "jiao",
"缵": "zuan",
"缶": "fou",
"缷": "xie",
"缸": "gang",
"缹": "fou",
"缺": "que",
"缻": "fou",
"缼": "que",
"缽": "bo",
"缾": "ping",
"缿": "xiang",
"罀": "zhao",
"罁": "gang",
"罂": "ying",
"罃": "ying",
"罄": "qing",
"罅": "xia",
"罆": "guan",
"罇": "zun",
"罈": "tan",
"罉": "cheng",
"罊": "qi",
"罋": "weng",
"罌": "ying",
"罍": "lei",
"罎": "tan",
"罏": "lu",
"罐": "guan",
"网": "wang",
"罒": "wang",
"罓": "wang",
"罔": "wang",
"罕": "han",
"罖": "wang",
"罗": "luo",
"罘": "fu",
"罙": "shen",
"罚": "fa",
"罛": "gu",
"罜": "zhu",
"罝": "ju",
"罞": "mao",
"罟": "gu",
"罠": "min",
"罡": "gang",
"罢": "ba",
"罣": "gua",
"罤": "ti",
"罥": "juan",
"罦": "fu",
"罧": "shen",
"罨": "yan",
"罩": "zhao",
"罪": "zui",
"罫": "guai",
"罬": "zhuo",
"罭": "yu",
"置": "zhi",
"罯": "an",
"罰": "fa",
"罱": "lan",
"署": "shu",
"罳": "si",
"罴": "pi",
"罵": "ma",
"罶": "liu",
"罷": "ba",
"罸": "fa",
"罹": "li",
"罺": "chao",
"罻": "wei",
"罼": "bi",
"罽": "ji",
"罾": "zeng",
"罿": "chong",
"羀": "liu",
"羁": "ji",
"羂": "juan",
"羃": "mi",
"羄": "zhao",
"羅": "luo",
"羆": "pi",
"羇": "ji",
"羈": "ji",
"羉": "luan",
"羊": "yang",
"羋": "mi",
"羌": "qiang",
"羍": "da",
"美": "mei",
"羏": "yang",
"羐": "ling",
"羑": "you",
"羒": "fen",
"羓": "ba",
"羔": "gao",
"羕": "yang",
"羖": "gu",
"羗": "qiang",
"羘": "zang",
"羙": "mei",
"羚": "ling",
"羛": "yi",
"羜": "zhu",
"羝": "di",
"羞": "xiu",
"羟": "qiang",
"羠": "yi",
"羡": "xian",
"羢": "rong",
"羣": "qun",
"群": "qun",
"羥": "qiang",
"羦": "huan",
"羧": "suo",
"羨": "xian",
"義": "yi",
"羪": "you",
"羫": "qiang",
"羬": "qian",
"羭": "yu",
"羮": "geng",
"羯": "jie",
"羰": "tang",
"羱": "yuan",
"羲": "xi",
"羳": "fan",
"羴": "shan",
"羵": "fen",
"羶": "shan",
"羷": "lian",
"羸": "lei",
"羹": "geng",
"羺": "nou",
"羻": "qiang",
"羼": "chan",
"羽": "yu",
"羾": "hong",
"羿": "yi",
"翀": "chong",
"翁": "weng",
"翂": "fen",
"翃": "hong",
"翄": "chi",
"翅": "chi",
"翆": "cui",
"翇": "fu",
"翈": "xia",
"翉": "ben",
"翊": "yi",
"翋": "la",
"翌": "yi",
"翍": "pi",
"翎": "ling",
"翏": "liu",
"翐": "zhi",
"翑": "qu",
"習": "xi",
"翓": "xie",
"翔": "xiang",
"翕": "xi",
"翖": "xi",
"翗": "ke",
"翘": "qiao",
"翙": "hui",
"翚": "hui",
"翛": "xiao",
"翜": "sha",
"翝": "hong",
"翞": "jiang",
"翟": "di",
"翠": "cui",
"翡": "fei",
"翢": "dao",
"翣": "sha",
"翤": "chi",
"翥": "zhu",
"翦": "jian",
"翧": "xuan",
"翨": "chi",
"翩": "pian",
"翪": "zong",
"翫": "wan",
"翬": "hui",
"翭": "hou",
"翮": "he",
"翯": "he",
"翰": "han",
"翱": "ao",
"翲": "piao",
"翳": "yi",
"翴": "lian",
"翵": "hou",
"翶": "ao",
"翷": "lin",
"翸": "pen",
"翹": "qiao",
"翺": "ao",
"翻": "fan",
"翼": "yi",
"翽": "hui",
"翾": "xuan",
"翿": "dao",
"耀": "yao",
"老": "lao",
"耂": "lao",
"考": "kao",
"耄": "mao",
"者": "zhe",
"耆": "qi",
"耇": "gou",
"耈": "gou",
"耉": "gou",
"耊": "die",
"耋": "die",
"而": "er",
"耍": "shua",
"耎": "ruan",
"耏": "er",
"耐": "nai",
"耑": "duan",
"耒": "lei",
"耓": "ting",
"耔": "zi",
"耕": "geng",
"耖": "chao",
"耗": "hao",
"耘": "yun",
"耙": "ba",
"耚": "pi",
"耛": "si",
"耜": "si",
"耝": "qu",
"耞": "jia",
"耟": "ju",
"耠": "huo",
"耡": "chu",
"耢": "lao",
"耣": "lun",
"耤": "ji",
"耥": "tang",
"耦": "ou",
"耧": "lou",
"耨": "nou",
"耩": "jiang",
"耪": "pang",
"耫": "zha",
"耬": "lou",
"耭": "ji",
"耮": "lao",
"耯": "huo",
"耰": "you",
"耱": "mo",
"耲": "huai",
"耳": "er",
"耴": "yi",
"耵": "ding",
"耶": "ye",
"耷": "da",
"耸": "song",
"耹": "qin",
"耺": "yun",
"耻": "chi",
"耼": "dan",
"耽": "dan",
"耾": "hong",
"耿": "geng",
"聀": "zhi",
"聁": "pan",
"聂": "nie",
"聃": "dan",
"聄": "zhen",
"聅": "che",
"聆": "ling",
"聇": "zheng",
"聈": "you",
"聉": "wa",
"聊": "liao",
"聋": "long",
"职": "zhi",
"聍": "ning",
"聎": "tiao",
"聏": "er",
"聐": "ya",
"聑": "tie",
"聒": "guo",
"聓": "xu",
"联": "lian",
"聕": "hao",
"聖": "sheng",
"聗": "lie",
"聘": "pin",
"聙": "jing",
"聚": "ju",
"聛": "bi",
"聜": "di",
"聝": "guo",
"聞": "wen",
"聟": "xu",
"聠": "ping",
"聡": "cong",
"聢": "ding",
"聣": "ni",
"聤": "ting",
"聥": "ju",
"聦": "cong",
"聧": "kui",
"聨": "lian",
"聩": "kui",
"聪": "cong",
"聫": "lian",
"聬": "weng",
"聭": "kui",
"聮": "lian",
"聯": "lian",
"聰": "cong",
"聱": "ao",
"聲": "sheng",
"聳": "song",
"聴": "ting",
"聵": "kui",
"聶": "nie",
"職": "zhi",
"聸": "dan",
"聹": "ning",
"聺": "qie",
"聻": "ni",
"聼": "ting",
"聽": "ting",
"聾": "long",
"聿": "yu",
"肀": "yu",
"肁": "zhao",
"肂": "si",
"肃": "su",
"肄": "yi",
"肅": "su",
"肆": "si",
"肇": "zhao",
"肈": "zhao",
"肉": "rou",
"肊": "yi",
"肋": "lei",
"肌": "ji",
"肍": "qiu",
"肎": "ken",
"肏": "cao",
"肐": "ge",
"肑": "bo",
"肒": "huan",
"肓": "huang",
"肔": "chi",
"肕": "ren",
"肖": "xiao",
"肗": "ru",
"肘": "zhou",
"肙": "yuan",
"肚": "du",
"肛": "gang",
"肜": "rong",
"肝": "gan",
"肞": "chai",
"肟": "wo",
"肠": "chang",
"股": "gu",
"肢": "zhi",
"肣": "qin",
"肤": "fu",
"肥": "fei",
"肦": "ban",
"肧": "pei",
"肨": "pang",
"肩": "jian",
"肪": "fang",
"肫": "zhun",
"肬": "you",
"肭": "na",
"肮": "ang",
"肯": "ken",
"肰": "ran",
"肱": "gong",
"育": "yu",
"肳": "wen",
"肴": "yao",
"肵": "qi",
"肶": "pi",
"肷": "qian",
"肸": "xi",
"肹": "xi",
"肺": "fei",
"肻": "ken",
"肼": "jing",
"肽": "tai",
"肾": "shen",
"肿": "zhong",
"胀": "zhang",
"胁": "xie",
"胂": "shen",
"胃": "wei",
"胄": "zhou",
"胅": "die",
"胆": "dan",
"胇": "fei",
"胈": "ba",
"胉": "bo",
"胊": "qu",
"胋": "tian",
"背": "bei",
"胍": "gua",
"胎": "tai",
"胏": "zi",
"胐": "fei",
"胑": "zhi",
"胒": "ni",
"胓": "ping",
"胔": "zi",
"胕": "fu",
"胖": "pang",
"胗": "zhen",
"胘": "xian",
"胙": "zuo",
"胚": "pei",
"胛": "jia",
"胜": "sheng",
"胝": "zhi",
"胞": "bao",
"胟": "mu",
"胠": "qu",
"胡": "hu",
"胢": "qia",
"胣": "chi",
"胤": "yin",
"胥": "xu",
"胦": "yang",
"胧": "long",
"胨": "dong",
"胩": "ka",
"胪": "lu",
"胫": "jing",
"胬": "nu",
"胭": "yan",
"胮": "pang",
"胯": "kua",
"胰": "yi",
"胱": "guang",
"胲": "hai",
"胳": "ge",
"胴": "dong",
"胵": "chi",
"胶": "jiao",
"胷": "xiong",
"胸": "xiong",
"胹": "er",
"胺": "an",
"胻": "heng",
"胼": "pian",
"能": "neng",
"胾": "zi",
"胿": "gui",
"脀": "zheng",
"脁": "tiao",
"脂": "zhi",
"脃": "cui",
"脄": "mei",
"脅": "xie",
"脆": "cui",
"脇": "xie",
"脈": "mai",
"脉": "mai",
"脊": "ji",
"脋": "xie",
"脌": "nin",
"脍": "kuai",
"脎": "sa",
"脏": "zang",
"脐": "qi",
"脑": "nao",
"脒": "mi",
"脓": "nong",
"脔": "luan",
"脕": "wan",
"脖": "bo",
"脗": "wen",
"脘": "wan",
"脙": "xiu",
"脚": "jiao",
"脛": "jing",
"脜": "rou",
"脝": "heng",
"脞": "cuo",
"脟": "lie",
"脠": "shan",
"脡": "ting",
"脢": "mei",
"脣": "chun",
"脤": "shen",
"脥": "jia",
"脦": "te",
"脧": "juan",
"脨": "cu",
"脩": "xiu",
"脪": "xin",
"脫": "tuo",
"脬": "pao",
"脭": "cheng",
"脮": "nei",
"脯": "fu",
"脰": "dou",
"脱": "tuo",
"脲": "niao",
"脳": "nao",
"脴": "pi",
"脵": "gu",
"脶": "luo",
"脷": "li",
"脸": "lian",
"脹": "zhang",
"脺": "cui",
"脻": "jie",
"脼": "liang",
"脽": "shui",
"脾": "pi",
"脿": "biao",
"腀": "lun",
"腁": "pian",
"腂": "guo",
"腃": "juan",
"腄": "chui",
"腅": "dan",
"腆": "tian",
"腇": "nei",
"腈": "jing",
"腉": "nai",
"腊": "la",
"腋": "ye",
"腌": "a",
"腍": "ren",
"腎": "shen",
"腏": "zhui",
"腐": "fu",
"腑": "fu",
"腒": "ju",
"腓": "fei",
"腔": "qiang",
"腕": "wan",
"腖": "dong",
"腗": "pi",
"腘": "guo",
"腙": "zong",
"腚": "ding",
"腛": "wo",
"腜": "mei",
"腝": "ruan",
"腞": "zhuan",
"腟": "chi",
"腠": "cou",
"腡": "luo",
"腢": "ou",
"腣": "di",
"腤": "an",
"腥": "xing",
"腦": "nao",
"腧": "shu",
"腨": "shuan",
"腩": "nan",
"腪": "yun",
"腫": "zhong",
"腬": "rou",
"腭": "e",
"腮": "sai",
"腯": "tu",
"腰": "yao",
"腱": "jian",
"腲": "wei",
"腳": "jiao",
"腴": "yu",
"腵": "jia",
"腶": "duan",
"腷": "bi",
"腸": "chang",
"腹": "fu",
"腺": "xian",
"腻": "ni",
"腼": "mian",
"腽": "wa",
"腾": "teng",
"腿": "tui",
"膀": "bang",
"膁": "qian",
"膂": "lv",
"膃": "wa",
"膄": "shou",
"膅": "tang",
"膆": "su",
"膇": "zhui",
"膈": "ge",
"膉": "yi",
"膊": "bo",
"膋": "liao",
"膌": "ji",
"膍": "pi",
"膎": "xie",
"膏": "gao",
"膐": "lv",
"膑": "bin",
"膒": "ou",
"膓": "chang",
"膔": "lu",
"膕": "guo",
"膖": "pang",
"膗": "chuai",
"膘": "biao",
"膙": "jiang",
"膚": "fu",
"膛": "tang",
"膜": "mo",
"膝": "xi",
"膞": "zhuan",
"膟": "lv",
"膠": "jiao",
"膡": "ying",
"膢": "lv",
"膣": "zhi",
"膤": "xue",
"膥": "cun",
"膦": "lin",
"膧": "tong",
"膨": "peng",
"膩": "ni",
"膪": "chuai",
"膫": "liao",
"膬": "cui",
"膭": "kui",
"膮": "xiao",
"膯": "teng",
"膰": "fan",
"膱": "zhi",
"膲": "jiao",
"膳": "shan",
"膴": "hu",
"膵": "cui",
"膶": "run",
"膷": "xiang",
"膸": "sui",
"膹": "fen",
"膺": "ying",
"膻": "shan",
"膼": "zhua",
"膽": "dan",
"膾": "kuai",
"膿": "nong",
"臀": "tun",
"臁": "lian",
"臂": "bi",
"臃": "yong",
"臄": "jue",
"臅": "chu",
"臆": "yi",
"臇": "juan",
"臈": "la",
"臉": "lian",
"臊": "sao",
"臋": "tun",
"臌": "gu",
"臍": "qi",
"臎": "cui",
"臏": "bin",
"臐": "xun",
"臑": "nao",
"臒": "wo",
"臓": "zang",
"臔": "xian",
"臕": "biao",
"臖": "xing",
"臗": "kuan",
"臘": "la",
"臙": "yan",
"臚": "lu",
"臛": "huo",
"臜": "za",
"臝": "luo",
"臞": "qu",
"臟": "zang",
"臠": "luan",
"臡": "ni",
"臢": "za",
"臣": "chen",
"臤": "qian",
"臥": "wo",
"臦": "guang",
"臧": "zang",
"臨": "lin",
"臩": "guang",
"自": "zi",
"臫": "jiao",
"臬": "nie",
"臭": "chou",
"臮": "ji",
"臯": "gao",
"臰": "chou",
"臱": "mian",
"臲": "nie",
"至": "zhi",
"致": "zhi",
"臵": "ge",
"臶": "jian",
"臷": "die",
"臸": "zhi",
"臹": "xiu",
"臺": "tai",
"臻": "zhen",
"臼": "jiu",
"臽": "xian",
"臾": "yu",
"臿": "cha",
"舀": "yao",
"舁": "yu",
"舂": "chong",
"舃": "xi",
"舄": "xi",
"舅": "jiu",
"舆": "yu",
"與": "yu",
"興": "xing",
"舉": "ju",
"舊": "jiu",
"舋": "xin",
"舌": "she",
"舍": "she",
"舎": "she",
"舏": "jiu",
"舐": "shi",
"舑": "tan",
"舒": "shu",
"舓": "shi",
"舔": "tian",
"舕": "tan",
"舖": "pu",
"舗": "pu",
"舘": "guan",
"舙": "hua",
"舚": "tian",
"舛": "chuan",
"舜": "shun",
"舝": "xia",
"舞": "wu",
"舟": "zhou",
"舠": "dao",
"舡": "chuan",
"舢": "shan",
"舣": "yi",
"舤": "fan",
"舥": "pa",
"舦": "tai",
"舧": "fan",
"舨": "ban",
"舩": "chuan",
"航": "hang",
"舫": "fang",
"般": "ban",
"舭": "bi",
"舮": "lu",
"舯": "zhong",
"舰": "jian",
"舱": "cang",
"舲": "ling",
"舳": "zhu",
"舴": "ze",
"舵": "duo",
"舶": "bo",
"舷": "xian",
"舸": "ge",
"船": "chuan",
"舺": "xia",
"舻": "lu",
"舼": "qiong",
"舽": "pang",
"舾": "xi",
"舿": "kua",
"艀": "fu",
"艁": "zao",
"艂": "feng",
"艃": "li",
"艄": "shao",
"艅": "yu",
"艆": "lang",
"艇": "ting",
"艈": "yu",
"艉": "wei",
"艊": "bo",
"艋": "meng",
"艌": "nian",
"艍": "ju",
"艎": "huang",
"艏": "shou",
"艐": "ke",
"艑": "bian",
"艒": "mu",
"艓": "die",
"艔": "dao",
"艕": "bang",
"艖": "cha",
"艗": "yi",
"艘": "sou",
"艙": "cang",
"艚": "cao",
"艛": "lou",
"艜": "dai",
"艝": "xue",
"艞": "yao",
"艟": "chong",
"艠": "deng",
"艡": "dang",
"艢": "qiang",
"艣": "lu",
"艤": "yi",
"艥": "ji",
"艦": "jian",
"艧": "huo",
"艨": "meng",
"艩": "qi",
"艪": "lu",
"艫": "lu",
"艬": "chan",
"艭": "shuang",
"艮": "gen",
"良": "liang",
"艰": "jian",
"艱": "jian",
"色": "se",
"艳": "yan",
"艴": "fu",
"艵": "ping",
"艶": "yan",
"艷": "yan",
"艸": "cao",
"艹": "ao",
"艺": "yi",
"艻": "le",
"艼": "ding",
"艽": "jiao",
"艾": "ai",
"艿": "nai",
"芀": "tiao",
"芁": "qiu",
"节": "jie",
"芃": "peng",
"芄": "wan",
"芅": "yi",
"芆": "chai",
"芇": "mian",
"芈": "mi",
"芉": "gan",
"芊": "qian",
"芋": "yu",
"芌": "yu",
"芍": "shao",
"芎": "xiong",
"芏": "du",
"芐": "hu",
"芑": "qi",
"芒": "mang",
"芓": "zi",
"芔": "hui",
"芕": "sui",
"芖": "zhi",
"芗": "xiang",
"芘": "bi",
"芙": "fu",
"芚": "tun",
"芛": "wei",
"芜": "wu",
"芝": "zhi",
"芞": "qi",
"芟": "shan",
"芠": "wen",
"芡": "qian",
"芢": "ren",
"芣": "fu",
"芤": "kou",
"芥": "jie",
"芦": "lu",
"芧": "xu",
"芨": "ji",
"芩": "qin",
"芪": "qi",
"芫": "yuan",
"芬": "fen",
"芭": "ba",
"芮": "rui",
"芯": "xin",
"芰": "ji",
"花": "hua",
"芲": "lun",
"芳": "fang",
"芴": "wu",
"芵": "jue",
"芶": "gou",
"芷": "zhi",
"芸": "yun",
"芹": "qin",
"芺": "ao",
"芻": "chu",
"芼": "mao",
"芽": "ya",
"芾": "fei",
"芿": "reng",
"苀": "hang",
"苁": "cong",
"苂": "chan",
"苃": "you",
"苄": "bian",
"苅": "yi",
"苆": "qie",
"苇": "wei",
"苈": "li",
"苉": "pi",
"苊": "e",
"苋": "xian",
"苌": "chang",
"苍": "cang",
"苎": "zhu",
"苏": "su",
"苐": "di",
"苑": "yuan",
"苒": "ran",
"苓": "ling",
"苔": "tai",
"苕": "tiao",
"苖": "di",
"苗": "miao",
"苘": "qing",
"苙": "li",
"苚": "yong",
"苛": "ke",
"苜": "mu",
"苝": "bei",
"苞": "bao",
"苟": "gou",
"苠": "min",
"苡": "yi",
"苢": "yi",
"苣": "ju",
"苤": "pie",
"若": "ruo",
"苦": "ku",
"苧": "zhu",
"苨": "ni",
"苩": "pa",
"苪": "bing",
"苫": "shan",
"苬": "xiu",
"苭": "yao",
"苮": "xian",
"苯": "ben",
"苰": "hong",
"英": "ying",
"苲": "zuo",
"苳": "dong",
"苴": "ju",
"苵": "die",
"苶": "nie",
"苷": "gan",
"苸": "hu",
"苹": "ping",
"苺": "mei",
"苻": "fu",
"苼": "sheng",
"苽": "gu",
"苾": "bi",
"苿": "wei",
"茀": "fu",
"茁": "zhuo",
"茂": "mao",
"范": "fan",
"茄": "qie",
"茅": "mao",
"茆": "mao",
"茇": "ba",
"茈": "zi",
"茉": "mo",
"茊": "zi",
"茋": "zhi",
"茌": "chi",
"茍": "ji",
"茎": "jing",
"茏": "long",
"茐": "cong",
"茑": "niao",
"茒": "yuan",
"茓": "xue",
"茔": "ying",
"茕": "qiong",
"茖": "ge",
"茗": "ming",
"茘": "li",
"茙": "rong",
"茚": "yin",
"茛": "gen",
"茜": "qian",
"茝": "chai",
"茞": "chen",
"茟": "yu",
"茠": "hao",
"茡": "zi",
"茢": "lie",
"茣": "wu",
"茤": "ji",
"茥": "gui",
"茦": "ci",
"茧": "jian",
"茨": "ci",
"茩": "hou",
"茪": "guang",
"茫": "mang",
"茬": "cha",
"茭": "jiao",
"茮": "jiao",
"茯": "fu",
"茰": "yu",
"茱": "zhu",
"茲": "zi",
"茳": "jiang",
"茴": "hui",
"茵": "yin",
"茶": "cha",
"茷": "fa",
"茸": "rong",
"茹": "ru",
"茺": "chong",
"茻": "mang",
"茼": "tong",
"茽": "zhong",
"茾": "qian",
"茿": "zhu",
"荀": "xun",
"荁": "huan",
"荂": "fu",
"荃": "quan",
"荄": "gai",
"荅": "da",
"荆": "jing",
"荇": "xing",
"荈": "chuan",
"草": "cao",
"荊": "jing",
"荋": "er",
"荌": "an",
"荍": "qiao",
"荎": "chi",
"荏": "ren",
"荐": "jian",
"荑": "yi",
"荒": "huang",
"荓": "ping",
"荔": "li",
"荕": "jin",
"荖": "lao",
"荗": "shu",
"荘": "zhuang",
"荙": "da",
"荚": "jia",
"荛": "rao",
"荜": "bi",
"荝": "ce",
"荞": "qiao",
"荟": "hui",
"荠": "ji",
"荡": "dang",
"荢": "zi",
"荣": "rong",
"荤": "hun",
"荥": "xing",
"荦": "luo",
"荧": "ying",
"荨": "qian",
"荩": "jin",
"荪": "sun",
"荫": "yin",
"荬": "mai",
"荭": "hong",
"荮": "zhou",
"药": "yao",
"荰": "du",
"荱": "wei",
"荲": "li",
"荳": "dou",
"荴": "fu",
"荵": "ren",
"荶": "yin",
"荷": "he",
"荸": "bi",
"荹": "bu",
"荺": "yun",
"荻": "di",
"荼": "tu",
"荽": "sui",
"荾": "sui",
"荿": "cheng",
"莀": "chen",
"莁": "wu",
"莂": "bie",
"莃": "xi",
"莄": "geng",
"莅": "li",
"莆": "pu",
"莇": "zhu",
"莈": "mo",
"莉": "li",
"莊": "zhuang",
"莋": "zuo",
"莌": "tuo",
"莍": "qiu",
"莎": "suo",
"莏": "suo",
"莐": "chen",
"莑": "peng",
"莒": "ju",
"莓": "mei",
"莔": "meng",
"莕": "xing",
"莖": "jing",
"莗": "che",
"莘": "shen",
"莙": "jun",
"莚": "yan",
"莛": "ting",
"莜": "you",
"莝": "cuo",
"莞": "guan",
"莟": "han",
"莠": "you",
"莡": "cuo",
"莢": "jia",
"莣": "wang",
"莤": "su",
"莥": "niu",
"莦": "shao",
"莧": "xian",
"莨": "lang",
"莩": "fu",
"莪": "e",
"莫": "mo",
"莬": "wen",
"莭": "jie",
"莮": "nan",
"莯": "mu",
"莰": "kan",
"莱": "lai",
"莲": "lian",
"莳": "shi",
"莴": "wo",
"莵": "tu",
"莶": "xian",
"获": "huo",
"莸": "you",
"莹": "ying",
"莺": "ying",
"莼": "chun",
"莽": "mang",
"莾": "mang",
"莿": "ci",
"菀": "wan",
"菁": "jing",
"菂": "di",
"菃": "qu",
"菄": "dong",
"菅": "jian",
"菆": "zou",
"菇": "gu",
"菈": "la",
"菉": "lu",
"菊": "ju",
"菋": "wei",
"菌": "jun",
"菍": "nie",
"菎": "kun",
"菏": "he",
"菐": "pu",
"菑": "zi",
"菒": "gao",
"菓": "guo",
"菔": "fu",
"菕": "lun",
"菖": "chang",
"菗": "chou",
"菘": "song",
"菙": "chui",
"菚": "zhan",
"菛": "men",
"菜": "cai",
"菝": "ba",
"菞": "li",
"菟": "tu",
"菠": "bo",
"菡": "han",
"菢": "bao",
"菣": "qin",
"菤": "juan",
"菥": "xi",
"菦": "qin",
"菧": "di",
"菨": "jie",
"菩": "pu",
"菪": "dang",
"菫": "jin",
"菬": "qiao",
"菭": "tai",
"菮": "geng",
"華": "hua",
"菰": "gu",
"菱": "ling",
"菲": "fei",
"菳": "qin",
"菴": "an",
"菵": "wang",
"菶": "beng",
"菷": "zhou",
"菸": "yan",
"菹": "zu",
"菺": "jian",
"菻": "lin",
"菼": "tan",
"菽": "shu",
"菾": "tian",
"菿": "dao",
"萀": "hu",
"萁": "qi",
"萂": "he",
"萃": "cui",
"萄": "tao",
"萅": "chun",
"萆": "bi",
"萇": "chang",
"萈": "huan",
"萉": "fei",
"萊": "lai",
"萋": "qi",
"萌": "meng",
"萍": "ping",
"萎": "wei",
"萏": "dan",
"萐": "sha",
"萑": "huan",
"萒": "yan",
"萓": "yi",
"萔": "tiao",
"萕": "qi",
"萖": "wan",
"萗": "ce",
"萘": "nai",
"萙": "zhen",
"萚": "tuo",
"萛": "jiu",
"萜": "tie",
"萝": "luo",
"萞": "bi",
"萟": "yi",
"萠": "pan",
"萡": "bo",
"萢": "pao",
"萣": "ding",
"萤": "ying",
"营": "ying",
"萦": "ying",
"萧": "xiao",
"萨": "sa",
"萩": "qiu",
"萪": "ke",
"萫": "xiang",
"萬": "wan",
"萭": "yu",
"萮": "yu",
"萯": "fu",
"萰": "lian",
"萱": "xuan",
"萲": "xuan",
"萳": "nan",
"萴": "ce",
"萵": "wo",
"萶": "chun",
"萷": "shao",
"萸": "yu",
"萹": "bian",
"萺": "mao",
"萻": "an",
"萼": "e",
"落": "la",
"萾": "ying",
"萿": "kuo",
"葀": "kuo",
"葁": "jiang",
"葂": "mian",
"葃": "zuo",
"葄": "zuo",
"葅": "zu",
"葆": "bao",
"葇": "rou",
"葈": "xi",
"葉": "ye",
"葊": "an",
"葋": "qu",
"葌": "jian",
"葍": "fu",
"葎": "lv",
"葏": "jing",
"葐": "pen",
"葑": "feng",
"葒": "hong",
"葓": "hong",
"葔": "hou",
"葕": "xing",
"葖": "tu",
"著": "zhu",
"葘": "zi",
"葙": "xiang",
"葚": "shen",
"葛": "ge",
"葜": "qia",
"葝": "qing",
"葞": "mi",
"葟": "huang",
"葠": "shen",
"葡": "pu",
"葢": "gai",
"董": "dong",
"葤": "zhou",
"葥": "qian",
"葦": "wei",
"葧": "bo",
"葨": "wei",
"葩": "pa",
"葪": "ji",
"葫": "hu",
"葬": "zang",
"葭": "jia",
"葮": "duan",
"葯": "yao",
"葰": "jun",
"葱": "cong",
"葲": "quan",
"葳": "wei",
"葴": "zhen",
"葵": "kui",
"葶": "ting",
"葷": "hun",
"葸": "xi",
"葹": "shi",
"葺": "qi",
"葻": "lan",
"葼": "zong",
"葽": "yao",
"葾": "yuan",
"葿": "mei",
"蒀": "yun",
"蒁": "shu",
"蒂": "di",
"蒃": "zhuan",
"蒄": "guan",
"蒅": "ran",
"蒆": "xue",
"蒇": "chan",
"蒈": "kai",
"蒉": "kui",
"蒊": "hua",
"蒋": "jiang",
"蒌": "lou",
"蒍": "wei",
"蒎": "pai",
"蒏": "yong",
"蒐": "sou",
"蒑": "yin",
"蒒": "shi",
"蒓": "chun",
"蒔": "shi",
"蒕": "yun",
"蒖": "zhen",
"蒗": "lang",
"蒘": "ru",
"蒙": "meng",
"蒚": "li",
"蒛": "que",
"蒜": "suan",
"蒝": "yuan",
"蒞": "li",
"蒟": "ju",
"蒠": "xi",
"蒡": "bang",
"蒢": "chu",
"蒣": "xu",
"蒤": "tu",
"蒥": "liu",
"蒦": "huo",
"蒧": "dian",
"蒨": "qian",
"蒩": "zu",
"蒪": "po",
"蒫": "cuo",
"蒬": "yuan",
"蒭": "chu",
"蒮": "yu",
"蒯": "kuai",
"蒰": "pan",
"蒱": "pu",
"蒲": "pu",
"蒳": "na",
"蒴": "shuo",
"蒵": "xi",
"蒶": "fen",
"蒷": "yun",
"蒸": "zheng",
"蒹": "jian",
"蒺": "ji",
"蒻": "ruo",
"蒼": "cang",
"蒽": "en",
"蒾": "mi",
"蒿": "hao",
"蓀": "sun",
"蓁": "zhen",
"蓂": "ming",
"蓃": "sou",
"蓄": "xu",
"蓅": "liu",
"蓆": "xi",
"蓇": "gu",
"蓈": "lang",
"蓉": "rong",
"蓊": "weng",
"蓋": "gai",
"蓌": "cuo",
"蓍": "shi",
"蓎": "tang",
"蓏": "luo",
"蓐": "ru",
"蓑": "suo",
"蓒": "xuan",
"蓓": "bei",
"蓔": "yao",
"蓕": "gui",
"蓖": "bi",
"蓗": "zong",
"蓘": "gun",
"蓙": "zuo",
"蓚": "tiao",
"蓛": "ce",
"蓜": "pei",
"蓝": "lan",
"蓞": "dan",
"蓟": "ji",
"蓠": "li",
"蓡": "shen",
"蓢": "lang",
"蓣": "yu",
"蓤": "ling",
"蓥": "ying",
"蓦": "mo",
"蓧": "diao",
"蓨": "tiao",
"蓩": "mao",
"蓪": "tong",
"蓫": "zhu",
"蓬": "peng",
"蓭": "an",
"蓮": "lian",
"蓯": "cong",
"蓰": "xi",
"蓱": "ping",
"蓲": "qiu",
"蓳": "jin",
"蓴": "chun",
"蓵": "jie",
"蓶": "wei",
"蓷": "tui",
"蓸": "cao",
"蓹": "yu",
"蓺": "yi",
"蓻": "zi",
"蓼": "liao",
"蓽": "bi",
"蓾": "lu",
"蓿": "xu",
"蔀": "bu",
"蔁": "zhang",
"蔂": "lei",
"蔃": "qiang",
"蔄": "man",
"蔅": "yan",
"蔆": "ling",
"蔇": "ji",
"蔈": "biao",
"蔉": "gun",
"蔊": "han",
"蔋": "di",
"蔌": "su",
"蔍": "lu",
"蔎": "she",
"蔏": "shang",
"蔐": "di",
"蔑": "mie",
"蔒": "hun",
"蔓": "man",
"蔔": "bo",
"蔕": "di",
"蔖": "cuo",
"蔗": "zhe",
"蔘": "shen",
"蔙": "xuan",
"蔚": "wei",
"蔛": "hu",
"蔜": "ao",
"蔝": "mi",
"蔞": "lou",
"蔟": "cu",
"蔠": "zhong",
"蔡": "cai",
"蔢": "po",
"蔣": "jiang",
"蔤": "mi",
"蔥": "cong",
"蔦": "niao",
"蔧": "hui",
"蔨": "juan",
"蔩": "yin",
"蔪": "jian",
"蔫": "nian",
"蔬": "shu",
"蔭": "yin",
"蔮": "guo",
"蔯": "chen",
"蔰": "hu",
"蔱": "sha",
"蔲": "kou",
"蔳": "qian",
"蔴": "ma",
"蔵": "zang",
"蔶": "ze",
"蔷": "qiang",
"蔸": "dou",
"蔹": "lian",
"蔺": "lin",
"蔻": "kou",
"蔼": "ai",
"蔽": "bi",
"蔾": "li",
"蔿": "wei",
"蕀": "ji",
"蕁": "qian",
"蕂": "sheng",
"蕃": "fan",
"蕄": "meng",
"蕅": "ou",
"蕆": "chan",
"蕇": "dian",
"蕈": "xun",
"蕉": "jiao",
"蕊": "rui",
"蕋": "rui",
"蕌": "lei",
"蕍": "yu",
"蕎": "qiao",
"蕏": "zhu",
"蕐": "hua",
"蕑": "jian",
"蕒": "mai",
"蕓": "yun",
"蕔": "bao",
"蕕": "you",
"蕖": "qu",
"蕗": "lu",
"蕘": "rao",
"蕙": "hui",
"蕚": "e",
"蕛": "ti",
"蕜": "fei",
"蕝": "jue",
"蕞": "zui",
"蕟": "fa",
"蕠": "ru",
"蕡": "fen",
"蕢": "kui",
"蕣": "shun",
"蕤": "rui",
"蕥": "ya",
"蕦": "xu",
"蕧": "fu",
"蕨": "jue",
"蕩": "dang",
"蕪": "wu",
"蕫": "dong",
"蕬": "si",
"蕭": "xiao",
"蕮": "xi",
"蕯": "sa",
"蕰": "yun",
"蕱": "shao",
"蕲": "qi",
"蕳": "jian",
"蕴": "yun",
"蕵": "sun",
"蕶": "ling",
"蕷": "yu",
"蕸": "xia",
"蕹": "weng",
"蕺": "ji",
"蕻": "hong",
"蕼": "si",
"蕽": "nong",
"蕾": "lei",
"蕿": "xuan",
"薀": "yun",
"薁": "yu",
"薂": "xi",
"薃": "hao",
"薄": "bao",
"薅": "hao",
"薆": "ai",
"薇": "wei",
"薈": "hui",
"薉": "hui",
"薊": "ji",
"薋": "ci",
"薌": "xiang",
"薍": "wan",
"薎": "mie",
"薏": "yi",
"薐": "leng",
"薑": "jiang",
"薒": "can",
"薓": "shen",
"薔": "qiang",
"薕": "lian",
"薖": "ke",
"薗": "yuan",
"薘": "da",
"薙": "ti",
"薚": "tang",
"薛": "xue",
"薜": "bi",
"薝": "zhan",
"薞": "sun",
"薟": "xian",
"薠": "fan",
"薡": "ding",
"薢": "xie",
"薣": "gu",
"薤": "xie",
"薥": "shu",
"薦": "jian",
"薧": "hao",
"薨": "hong",
"薩": "sa",
"薪": "xin",
"薫": "xun",
"薬": "yao",
"薭": "bai",
"薮": "sou",
"薯": "shu",
"薰": "xun",
"薱": "dui",
"薲": "pin",
"薳": "yuan",
"薴": "ning",
"薵": "chou",
"薶": "mai",
"薷": "ru",
"薸": "piao",
"薹": "tai",
"薺": "ji",
"薻": "zao",
"薼": "chen",
"薽": "zhen",
"薾": "er",
"薿": "ni",
"藀": "ying",
"藁": "gao",
"藂": "cong",
"藃": "xiao",
"藄": "qi",
"藅": "fa",
"藆": "jian",
"藇": "xu",
"藈": "kui",
"藉": "jie",
"藊": "bian",
"藋": "diao",
"藌": "mi",
"藍": "lan",
"藎": "jin",
"藏": "cang",
"藐": "miao",
"藑": "qiong",
"藒": "qi",
"藓": "xian",
"藔": "liao",
"藕": "ou",
"藖": "xian",
"藗": "su",
"藘": "lv",
"藙": "yi",
"藚": "xu",
"藛": "xie",
"藜": "li",
"藝": "yi",
"藞": "la",
"藟": "lei",
"藠": "jiao",
"藡": "di",
"藢": "zhi",
"藣": "bei",
"藤": "teng",
"藥": "yao",
"藦": "mo",
"藧": "huan",
"藨": "biao",
"藩": "fan",
"藪": "sou",
"藫": "tan",
"藬": "tui",
"藭": "qiong",
"藮": "qiao",
"藯": "wei",
"藰": "liu",
"藱": "hui",
"藲": "ou",
"藳": "gao",
"藴": "yun",
"藵": "bao",
"藶": "li",
"藷": "shu",
"藸": "zhu",
"藹": "ai",
"藺": "lin",
"藻": "zao",
"藼": "xuan",
"藽": "qin",
"藾": "lai",
"藿": "huo",
"蘀": "tuo",
"蘁": "wu",
"蘂": "rui",
"蘃": "rui",
"蘄": "qi",
"蘅": "heng",
"蘆": "lu",
"蘇": "su",
"蘈": "tui",
"蘉": "mang",
"蘊": "yun",
"蘋": "pin",
"蘌": "yu",
"蘍": "xun",
"蘎": "ji",
"蘏": "jiong",
"蘐": "xuan",
"蘑": "mo",
"蘒": "qiu",
"蘓": "su",
"蘔": "jiong",
"蘕": "peng",
"蘖": "nie",
"蘗": "bo",
"蘘": "rang",
"蘙": "yi",
"蘚": "xian",
"蘛": "yu",
"蘜": "ju",
"蘝": "lian",
"蘞": "lian",
"蘟": "yin",
"蘠": "qiang",
"蘡": "ying",
"蘢": "long",
"蘣": "tou",
"蘤": "hua",
"蘥": "yue",
"蘦": "ling",
"蘧": "qu",
"蘨": "yao",
"蘩": "fan",
"蘪": "mi",
"蘫": "lan",
"蘬": "gui",
"蘭": "lan",
"蘮": "ji",
"蘯": "dang",
"蘰": "man",
"蘱": "lei",
"蘲": "lei",
"蘳": "hui",
"蘴": "feng",
"蘵": "zhi",
"蘶": "wei",
"蘷": "kui",
"蘸": "zhan",
"蘹": "huai",
"蘺": "li",
"蘻": "ji",
"蘼": "mi",
"蘽": "lei",
"蘾": "huai",
"蘿": "luo",
"虀": "ji",
"虁": "kui",
"虂": "lu",
"虃": "jian",
"虅": "teng",
"虆": "lei",
"虇": "quan",
"虈": "xiao",
"虉": "yi",
"虊": "luan",
"虋": "men",
"虌": "bie",
"虍": "hu",
"虎": "hu",
"虏": "lu",
"虐": "nve",
"虑": "lv",
"虒": "si",
"虓": "xiao",
"虔": "qian",
"處": "chu",
"虖": "hu",
"虗": "xu",
"虘": "cuo",
"虙": "fu",
"虚": "xu",
"虛": "xu",
"虜": "lu",
"虝": "hu",
"虞": "yu",
"號": "hao",
"虠": "jiao",
"虡": "ju",
"虢": "guo",
"虣": "bao",
"虤": "yan",
"虥": "zhan",
"虦": "zhan",
"虧": "kui",
"虨": "bin",
"虩": "xi",
"虪": "shu",
"虫": "chong",
"虬": "qiu",
"虭": "diao",
"虮": "ji",
"虯": "qiu",
"虰": "ding",
"虱": "shi",
"虲": "xia",
"虳": "jue",
"虴": "zhe",
"虵": "she",
"虶": "yu",
"虷": "han",
"虸": "zi",
"虹": "hong",
"虺": "hui",
"虻": "meng",
"虼": "ge",
"虽": "sui",
"虾": "xia",
"虿": "chai",
"蚀": "shi",
"蚁": "yi",
"蚂": "ma",
"蚃": "xiang",
"蚄": "fang",
"蚅": "e",
"蚆": "ba",
"蚇": "chi",
"蚈": "qian",
"蚉": "wen",
"蚊": "wen",
"蚋": "rui",
"蚌": "bang",
"蚍": "pi",
"蚎": "yue",
"蚏": "yue",
"蚐": "jun",
"蚑": "qi",
"蚒": "tong",
"蚓": "yin",
"蚔": "qi",
"蚕": "can",
"蚖": "yuan",
"蚗": "jue",
"蚘": "hui",
"蚙": "qin",
"蚚": "qi",
"蚛": "zhong",
"蚜": "ya",
"蚝": "hao",
"蚞": "mu",
"蚟": "wang",
"蚠": "fen",
"蚡": "fen",
"蚢": "hang",
"蚣": "gong",
"蚤": "zao",
"蚥": "fu",
"蚦": "ran",
"蚧": "jie",
"蚨": "fu",
"蚩": "chi",
"蚪": "dou",
"蚫": "bao",
"蚬": "xian",
"蚭": "ni",
"蚮": "dai",
"蚯": "qiu",
"蚰": "you",
"蚱": "zha",
"蚲": "ping",
"蚳": "chi",
"蚴": "you",
"蚵": "ke",
"蚶": "han",
"蚷": "ju",
"蚸": "li",
"蚹": "fu",
"蚺": "ran",
"蚻": "zha",
"蚼": "gou",
"蚽": "pi",
"蚾": "pi",
"蚿": "xian",
"蛀": "zhu",
"蛁": "diao",
"蛂": "bie",
"蛃": "bing",
"蛄": "gu",
"蛅": "zhan",
"蛆": "qu",
"蛇": "she",
"蛈": "tie",
"蛉": "ling",
"蛊": "gu",
"蛋": "dan",
"蛌": "tun",
"蛍": "ying",
"蛎": "li",
"蛏": "cheng",
"蛐": "qu",
"蛑": "mou",
"蛒": "ge",
"蛓": "ci",
"蛔": "hui",
"蛕": "hui",
"蛖": "mang",
"蛗": "fu",
"蛘": "yang",
"蛙": "wa",
"蛚": "lie",
"蛛": "zhu",
"蛜": "yi",
"蛝": "xian",
"蛞": "kuo",
"蛟": "jiao",
"蛠": "li",
"蛡": "yi",
"蛢": "ping",
"蛣": "jie",
"蛤": "ge",
"蛥": "she",
"蛦": "yi",
"蛧": "wang",
"蛨": "mo",
"蛩": "qiong",
"蛪": "qie",
"蛫": "gui",
"蛬": "qiong",
"蛭": "zhi",
"蛮": "man",
"蛯": "lao",
"蛰": "zhe",
"蛱": "jia",
"蛲": "nao",
"蛳": "si",
"蛴": "qi",
"蛵": "xing",
"蛶": "jie",
"蛷": "qiu",
"蛸": "xiao",
"蛹": "yong",
"蛺": "jia",
"蛻": "tui",
"蛼": "che",
"蛽": "bei",
"蛾": "e",
"蛿": "han",
"蜀": "shu",
"蜁": "xuan",
"蜂": "feng",
"蜃": "shen",
"蜄": "shen",
"蜅": "fu",
"蜆": "xian",
"蜇": "zhe",
"蜈": "wu",
"蜉": "fu",
"蜊": "li",
"蜋": "lang",
"蜌": "bi",
"蜍": "chu",
"蜎": "yuan",
"蜏": "you",
"蜐": "jie",
"蜑": "dan",
"蜒": "yan",
"蜓": "ting",
"蜔": "dian",
"蜕": "tui",
"蜖": "hui",
"蜗": "wo",
"蜘": "zhi",
"蜙": "zhong",
"蜚": "fei",
"蜛": "ju",
"蜜": "mi",
"蜝": "qi",
"蜞": "qi",
"蜟": "yu",
"蜠": "jun",
"蜡": "la",
"蜢": "meng",
"蜣": "qiang",
"蜤": "si",
"蜥": "xi",
"蜦": "lun",
"蜧": "li",
"蜨": "die",
"蜩": "tiao",
"蜪": "tao",
"蜫": "kun",
"蜬": "han",
"蜭": "han",
"蜮": "yu",
"蜯": "bang",
"蜰": "fei",
"蜱": "pi",
"蜲": "wei",
"蜳": "dun",
"蜴": "yi",
"蜵": "yuan",
"蜶": "suo",
"蜷": "quan",
"蜸": "qian",
"蜹": "rui",
"蜺": "ni",
"蜻": "qing",
"蜼": "wei",
"蜽": "liang",
"蜾": "guo",
"蜿": "wan",
"蝀": "dong",
"蝁": "e",
"蝂": "ban",
"蝃": "di",
"蝄": "wang",
"蝅": "can",
"蝆": "yang",
"蝇": "ying",
"蝈": "guo",
"蝉": "chan",
"蝊": "ding",
"蝋": "la",
"蝌": "ke",
"蝍": "ji",
"蝎": "xie",
"蝏": "ting",
"蝐": "mao",
"蝑": "xu",
"蝒": "mian",
"蝓": "yu",
"蝔": "jie",
"蝕": "shi",
"蝖": "xuan",
"蝗": "huang",
"蝘": "yan",
"蝙": "bian",
"蝚": "rou",
"蝛": "wei",
"蝜": "fu",
"蝝": "yuan",
"蝞": "mei",
"蝟": "wei",
"蝠": "fu",
"蝡": "ru",
"蝢": "xie",
"蝣": "you",
"蝤": "qiu",
"蝥": "mao",
"蝦": "xia",
"蝧": "ying",
"蝨": "shi",
"蝩": "chong",
"蝪": "tang",
"蝫": "zhu",
"蝬": "zong",
"蝭": "di",
"蝮": "fu",
"蝯": "yuan",
"蝰": "kui",
"蝱": "meng",
"蝲": "la",
"蝳": "dai",
"蝴": "hu",
"蝵": "qiu",
"蝶": "die",
"蝷": "li",
"蝸": "wo",
"蝹": "yun",
"蝺": "qu",
"蝻": "nan",
"蝼": "lou",
"蝽": "chun",
"蝾": "rong",
"蝿": "ying",
"螀": "jiang",
"螁": "tui",
"螂": "lang",
"螃": "pang",
"螄": "si",
"螅": "xi",
"螆": "ci",
"螇": "xi",
"螈": "yuan",
"螉": "weng",
"螊": "lian",
"螋": "sou",
"螌": "ban",
"融": "rong",
"螎": "rong",
"螏": "ji",
"螐": "wu",
"螑": "xiu",
"螒": "han",
"螓": "qin",
"螔": "yi",
"螕": "bi",
"螖": "hua",
"螗": "tang",
"螘": "yi",
"螙": "du",
"螚": "nai",
"螛": "he",
"螜": "hu",
"螝": "gui",
"螞": "ma",
"螟": "ming",
"螠": "yi",
"螡": "wen",
"螢": "ying",
"螣": "teng",
"螤": "zhong",
"螥": "cang",
"螦": "sao",
"螧": "qi",
"螨": "man",
"螩": "dao",
"螪": "shang",
"螫": "shi",
"螬": "cao",
"螭": "chi",
"螮": "di",
"螯": "ao",
"螰": "lu",
"螱": "wei",
"螲": "die",
"螳": "tang",
"螴": "chen",
"螵": "piao",
"螶": "qu",
"螷": "pi",
"螸": "yu",
"螹": "chan",
"螺": "luo",
"螻": "lou",
"螼": "qin",
"螽": "zhong",
"螾": "yin",
"螿": "jiang",
"蟀": "shuai",
"蟁": "wen",
"蟂": "xiao",
"蟃": "wan",
"蟄": "zhe",
"蟅": "zhe",
"蟆": "ma",
"蟇": "ma",
"蟈": "guo",
"蟉": "liu",
"蟊": "mao",
"蟋": "xi",
"蟌": "cong",
"蟍": "li",
"蟎": "man",
"蟏": "xiao",
"蟐": "chan",
"蟑": "zhang",
"蟒": "mang",
"蟓": "xiang",
"蟔": "mo",
"蟕": "zui",
"蟖": "si",
"蟗": "qiu",
"蟘": "te",
"蟙": "zhi",
"蟚": "peng",
"蟛": "peng",
"蟜": "jiao",
"蟝": "qu",
"蟞": "bie",
"蟟": "liao",
"蟠": "pan",
"蟡": "gui",
"蟢": "xi",
"蟣": "ji",
"蟤": "zhuan",
"蟥": "huang",
"蟦": "fei",
"蟧": "lao",
"蟨": "jue",
"蟩": "jue",
"蟪": "hui",
"蟫": "yin",
"蟬": "chan",
"蟭": "jiao",
"蟮": "shan",
"蟯": "nao",
"蟰": "xiao",
"蟱": "wu",
"蟲": "chong",
"蟳": "xun",
"蟴": "si",
"蟵": "chu",
"蟶": "cheng",
"蟷": "dang",
"蟸": "li",
"蟹": "xie",
"蟺": "shan",
"蟻": "yi",
"蟼": "jing",
"蟽": "da",
"蟾": "chan",
"蟿": "qi",
"蠀": "ci",
"蠁": "xiang",
"蠂": "she",
"蠃": "luo",
"蠄": "qin",
"蠅": "ying",
"蠆": "chai",
"蠇": "li",
"蠈": "zei",
"蠉": "xuan",
"蠊": "lian",
"蠋": "zhu",
"蠌": "ze",
"蠍": "xie",
"蠎": "mang",
"蠏": "xie",
"蠐": "qi",
"蠑": "rong",
"蠒": "jian",
"蠓": "meng",
"蠔": "hao",
"蠕": "ru",
"蠖": "huo",
"蠗": "zhuo",
"蠘": "jie",
"蠙": "pin",
"蠚": "he",
"蠛": "mie",
"蠜": "fan",
"蠝": "lei",
"蠞": "jie",
"蠟": "la",
"蠠": "min",
"蠡": "li",
"蠢": "chun",
"蠣": "li",
"蠤": "qiu",
"蠥": "nie",
"蠦": "lu",
"蠧": "du",
"蠨": "xiao",
"蠩": "zhu",
"蠪": "long",
"蠫": "li",
"蠬": "long",
"蠭": "feng",
"蠮": "ye",
"蠯": "pi",
"蠰": "nang",
"蠱": "gu",
"蠲": "juan",
"蠳": "ying",
"蠴": "shu",
"蠵": "xi",
"蠶": "can",
"蠷": "qu",
"蠸": "quan",
"蠹": "du",
"蠺": "can",
"蠻": "man",
"蠼": "qu",
"蠽": "jie",
"蠾": "zhu",
"蠿": "zhuo",
"血": "xie",
"衁": "huang",
"衂": "nv",
"衃": "pei",
"衄": "nv",
"衅": "xin",
"衆": "zhong",
"衇": "mai",
"衈": "er",
"衉": "ke",
"衊": "mie",
"衋": "xi",
"行": "xing",
"衍": "yan",
"衎": "kan",
"衏": "yuan",
"衐": "qu",
"衑": "ling",
"衒": "xuan",
"術": "shu",
"衔": "xian",
"衕": "tong",
"衖": "xiang",
"街": "jie",
"衘": "xian",
"衙": "ya",
"衚": "hu",
"衛": "wei",
"衜": "dao",
"衝": "chong",
"衞": "wei",
"衟": "dao",
"衠": "zhun",
"衡": "heng",
"衢": "qu",
"衣": "yi",
"衤": "yi",
"补": "bu",
"衦": "gan",
"衧": "yu",
"表": "biao",
"衩": "cha",
"衪": "yi",
"衫": "shan",
"衬": "chen",
"衭": "fu",
"衮": "gun",
"衯": "fen",
"衰": "shuai",
"衱": "jie",
"衲": "na",
"衳": "zhong",
"衴": "dan",
"衵": "ri",
"衶": "zhong",
"衷": "zhong",
"衸": "jie",
"衹": "zhi",
"衺": "xie",
"衻": "ran",
"衼": "zhi",
"衽": "ren",
"衾": "qin",
"衿": "jin",
"袀": "jun",
"袁": "yuan",
"袂": "mei",
"袃": "chai",
"袄": "ao",
"袅": "niao",
"袆": "hui",
"袇": "ran",
"袈": "jia",
"袉": "tuo",
"袊": "ling",
"袋": "dai",
"袌": "bao",
"袍": "pao",
"袎": "yao",
"袏": "zuo",
"袐": "bi",
"袑": "shao",
"袒": "tan",
"袓": "ju",
"袔": "he",
"袕": "xue",
"袖": "xiu",
"袗": "zhen",
"袘": "yi",
"袙": "pa",
"袚": "fu",
"袛": "di",
"袜": "wa",
"袝": "fu",
"袞": "gun",
"袟": "zhi",
"袠": "zhi",
"袡": "ran",
"袢": "pan",
"袣": "yi",
"袤": "mao",
"袥": "tuo",
"袦": "na",
"袧": "gou",
"袨": "xuan",
"袩": "zhe",
"袪": "qu",
"被": "bei",
"袬": "yu",
"袭": "xi",
"袮": "mi",
"袯": "bo",
"袰": "bo",
"袱": "fu",
"袲": "chi",
"袳": "chi",
"袴": "ku",
"袵": "ren",
"袶": "peng",
"袷": "jia",
"袸": "jian",
"袹": "bo",
"袺": "jie",
"袻": "er",
"袼": "ge",
"袽": "ru",
"袾": "zhu",
"袿": "gui",
"裀": "yin",
"裁": "cai",
"裂": "lie",
"裃": "ka",
"裄": "hang",
"装": "zhuang",
"裆": "dang",
"裇": "xu",
"裈": "kun",
"裉": "ken",
"裊": "niao",
"裋": "shu",
"裌": "jia",
"裍": "kun",
"裎": "cheng",
"裏": "li",
"裐": "juan",
"裑": "shen",
"裒": "pou",
"裓": "ge",
"裔": "yi",
"裕": "yu",
"裖": "zhen",
"裗": "liu",
"裘": "qiu",
"裙": "qun",
"裚": "ji",
"裛": "yi",
"補": "bu",
"裝": "zhuang",
"裞": "shui",
"裟": "sha",
"裠": "qun",
"裡": "li",
"裢": "lian",
"裣": "lian",
"裤": "ku",
"裥": "jian",
"裦": "bao",
"裧": "chan",
"裨": "bi",
"裩": "kun",
"裪": "tao",
"裫": "yuan",
"裬": "ling",
"裭": "chi",
"裮": "chang",
"裯": "chou",
"裰": "duo",
"裱": "biao",
"裲": "liang",
"裳": "chang",
"裴": "pei",
"裵": "pei",
"裶": "fei",
"裷": "yuan",
"裸": "luo",
"裹": "guo",
"裺": "yan",
"裻": "du",
"裼": "xi",
"製": "zhi",
"裾": "ju",
"裿": "yi",
"褀": "qi",
"褁": "guo",
"褂": "gua",
"褃": "ken",
"褄": "qi",
"褅": "ti",
"褆": "ti",
"複": "fu",
"褈": "chong",
"褉": "xie",
"褊": "bian",
"褋": "die",
"褌": "kun",
"褍": "duan",
"褎": "xiu",
"褏": "xiu",
"褐": "he",
"褑": "yuan",
"褒": "bao",
"褓": "bao",
"褔": "fu",
"褕": "yu",
"褖": "tuan",
"褗": "yan",
"褘": "hui",
"褙": "bei",
"褚": "zhu",
"褛": "lv",
"褜": "pao",
"褝": "dan",
"褞": "yun",
"褟": "ta",
"褠": "gou",
"褡": "da",
"褢": "huai",
"褣": "rong",
"褤": "yuan",
"褥": "ru",
"褦": "nai",
"褧": "jiong",
"褨": "suo",
"褩": "ban",
"褪": "tui",
"褫": "chi",
"褬": "sang",
"褭": "niao",
"褮": "ying",
"褯": "jie",
"褰": "qian",
"褱": "huai",
"褲": "ku",
"褳": "lian",
"褴": "lan",
"褵": "li",
"褶": "zhe",
"褷": "shi",
"褸": "lv",
"褹": "yi",
"褺": "die",
"褻": "xie",
"褼": "xian",
"褽": "wei",
"褾": "biao",
"褿": "cao",
"襀": "ji",
"襁": "qiang",
"襂": "sen",
"襃": "bao",
"襄": "xiang",
"襅": "bi",
"襆": "fu",
"襇": "jian",
"襈": "zhuan",
"襉": "jian",
"襊": "cui",
"襋": "ji",
"襌": "dan",
"襍": "za",
"襎": "fan",
"襏": "bo",
"襐": "xiang",
"襑": "xin",
"襒": "bie",
"襓": "rao",
"襔": "man",
"襕": "lan",
"襖": "ao",
"襗": "ze",
"襘": "gui",
"襙": "cao",
"襚": "sui",
"襛": "nong",
"襜": "chan",
"襝": "lian",
"襞": "bi",
"襟": "jin",
"襠": "dang",
"襡": "shu",
"襢": "tan",
"襣": "bi",
"襤": "lan",
"襥": "fu",
"襦": "ru",
"襧": "zhi",
"襩": "shu",
"襪": "wa",
"襫": "shi",
"襬": "bai",
"襭": "xie",
"襮": "bo",
"襯": "chen",
"襰": "lai",
"襱": "long",
"襲": "xi",
"襳": "xian",
"襴": "lan",
"襵": "zhe",
"襶": "dai",
"襷": "ju",
"襸": "zan",
"襹": "shi",
"襺": "jian",
"襻": "pan",
"襼": "yi",
"襽": "lan",
"襾": "ya",
"西": "xi",
"覀": "ya",
"要": "yao",
"覂": "feng",
"覃": "tan",
"覄": "fu",
"覅": "fiao",
"覆": "fu",
"覇": "ba",
"覈": "he",
"覉": "ji",
"覊": "ji",
"見": "jian",
"覌": "guan",
"覍": "bian",
"覎": "yan",
"規": "gui",
"覐": "jue",
"覑": "pian",
"覒": "mao",
"覓": "mi",
"覔": "mi",
"覕": "pie",
"視": "shi",
"覗": "si",
"覘": "chan",
"覙": "zhen",
"覚": "jue",
"覛": "mi",
"覜": "tiao",
"覝": "lian",
"覞": "yao",
"覟": "zhi",
"覠": "jun",
"覡": "xi",
"覢": "shan",
"覣": "wei",
"覤": "xi",
"覥": "tian",
"覦": "yu",
"覧": "lan",
"覨": "e",
"覩": "du",
"親": "qin",
"覫": "pang",
"覬": "ji",
"覭": "ming",
"覮": "ying",
"覯": "gou",
"覰": "qu",
"覱": "zhan",
"覲": "jin",
"観": "guan",
"覴": "deng",
"覵": "jian",
"覶": "luo",
"覷": "qu",
"覸": "jian",
"覹": "wei",
"覺": "jue",
"覻": "qu",
"覼": "luo",
"覽": "lan",
"覾": "shen",
"覿": "di",
"觀": "guan",
"见": "jian",
"观": "guan",
"觃": "yan",
"规": "gui",
"觅": "mi",
"视": "shi",
"觇": "chan",
"览": "lan",
"觉": "jue",
"觊": "ji",
"觋": "xi",
"觌": "di",
"觍": "tian",
"觎": "yu",
"觏": "gou",
"觐": "jin",
"觑": "qu",
"角": "jiao",
"觓": "qiu",
"觔": "jin",
"觕": "cu",
"觖": "jue",
"觗": "zhi",
"觘": "chao",
"觙": "ji",
"觚": "gu",
"觛": "dan",
"觜": "zi",
"觝": "di",
"觞": "shang",
"觟": "hua",
"觠": "quan",
"觡": "ge",
"觢": "shi",
"解": "jie",
"觤": "gui",
"觥": "gong",
"触": "chu",
"觧": "jie",
"觨": "hun",
"觩": "qiu",
"觪": "xing",
"觫": "su",
"觬": "ni",
"觭": "ji",
"觮": "jue",
"觯": "zhi",
"觰": "zha",
"觱": "bi",
"觲": "xing",
"觳": "hu",
"觴": "shang",
"觵": "gong",
"觶": "zhi",
"觷": "xue",
"觸": "chu",
"觹": "xi",
"觺": "yi",
"觻": "li",
"觼": "jue",
"觽": "xi",
"觾": "yan",
"觿": "xi",
"言": "yan",
"訁": "yan",
"訂": "ding",
"訃": "fu",
"訄": "qiu",
"訅": "qiu",
"訆": "jiao",
"訇": "hong",
"計": "ji",
"訉": "fan",
"訊": "xun",
"訋": "diao",
"訌": "hong",
"訍": "chai",
"討": "tao",
"訏": "xu",
"訐": "jie",
"訑": "dan",
"訒": "ren",
"訓": "xun",
"訔": "yin",
"訕": "shan",
"訖": "qi",
"託": "tuo",
"記": "ji",
"訙": "xun",
"訚": "yin",
"訛": "e",
"訜": "fen",
"訝": "ya",
"訞": "yao",
"訟": "song",
"訠": "shen",
"訡": "yin",
"訢": "xin",
"訣": "jue",
"訤": "xiao",
"訥": "ne",
"訦": "chen",
"訧": "you",
"訨": "zhi",
"訩": "xiong",
"訪": "fang",
"訫": "xin",
"訬": "chao",
"設": "she",
"訮": "yan",
"訯": "sa",
"訰": "zhun",
"許": "xu",
"訲": "yi",
"訳": "yi",
"訴": "su",
"訵": "chi",
"訶": "he",
"訷": "shen",
"訸": "he",
"訹": "xu",
"診": "zhen",
"註": "zhu",
"証": "zheng",
"訽": "gou",
"訾": "zi",
"訿": "zi",
"詀": "zhan",
"詁": "gu",
"詂": "fu",
"詃": "jian",
"詄": "die",
"詅": "ling",
"詆": "di",
"詇": "yang",
"詈": "li",
"詉": "nao",
"詊": "pan",
"詋": "zhou",
"詌": "gan",
"詍": "yi",
"詎": "ju",
"詏": "yao",
"詐": "zha",
"詑": "tuo",
"詒": "yi",
"詓": "qu",
"詔": "zhao",
"評": "ping",
"詖": "bi",
"詗": "xiong",
"詘": "qu",
"詙": "ba",
"詚": "da",
"詛": "zu",
"詜": "tao",
"詝": "zhu",
"詞": "ci",
"詟": "zhe",
"詠": "yong",
"詡": "xu",
"詢": "xun",
"詣": "yi",
"詤": "huang",
"詥": "he",
"試": "shi",
"詧": "cha",
"詨": "xiao",
"詩": "shi",
"詪": "hen",
"詫": "cha",
"詬": "gou",
"詭": "gui",
"詮": "quan",
"詯": "hui",
"詰": "jie",
"話": "hua",
"該": "gai",
"詳": "xiang",
"詴": "wei",
"詵": "shen",
"詶": "chou",
"詷": "tong",
"詸": "mi",
"詹": "zhan",
"詺": "ming",
"詻": "luo",
"詼": "hui",
"詽": "yan",
"詾": "xiong",
"詿": "gua",
"誀": "er",
"誁": "bing",
"誂": "tiao",
"誃": "yi",
"誄": "lei",
"誅": "zhu",
"誆": "kuang",
"誇": "kua",
"誈": "wu",
"誉": "yu",
"誊": "teng",
"誋": "ji",
"誌": "zhi",
"認": "ren",
"誎": "cu",
"誏": "lang",
"誐": "e",
"誑": "kuang",
"誒": "ei",
"誓": "shi",
"誔": "ting",
"誕": "dan",
"誖": "bei",
"誗": "chan",
"誘": "you",
"誙": "keng",
"誚": "qiao",
"誛": "qin",
"誜": "shua",
"誝": "an",
"語": "yu",
"誟": "xiao",
"誠": "cheng",
"誡": "jie",
"誢": "xian",
"誣": "wu",
"誤": "wu",
"誥": "gao",
"誦": "song",
"誧": "bu",
"誨": "hui",
"誩": "jing",
"說": "shuo",
"誫": "zhen",
"説": "shuo",
"読": "du",
"誮": "hua",
"誯": "chang",
"誰": "shui",
"誱": "jie",
"課": "ke",
"誳": "qu",
"誴": "cong",
"誵": "xiao",
"誶": "sui",
"誷": "wang",
"誸": "xian",
"誹": "fei",
"誺": "chi",
"誻": "ta",
"誼": "yi",
"誽": "ni",
"誾": "yin",
"調": "diao",
"諀": "pi",
"諁": "zhuo",
"諂": "chan",
"諃": "chen",
"諄": "zhun",
"諅": "ji",
"諆": "qi",
"談": "tan",
"諈": "zhui",
"諉": "wei",
"諊": "ju",
"請": "qing",
"諌": "dong",
"諍": "zheng",
"諎": "ze",
"諏": "zou",
"諐": "qian",
"諑": "zhuo",
"諒": "liang",
"諓": "jian",
"諔": "chu",
"諕": "xia",
"論": "lun",
"諗": "shen",
"諘": "biao",
"諙": "hua",
"諚": "bian",
"諛": "yu",
"諜": "die",
"諝": "xu",
"諞": "pian",
"諟": "shi",
"諠": "xuan",
"諡": "shi",
"諢": "hun",
"諣": "hua",
"諤": "e",
"諥": "zhong",
"諦": "di",
"諧": "xie",
"諨": "fu",
"諩": "pu",
"諪": "ting",
"諫": "jian",
"諬": "qi",
"諭": "yu",
"諮": "zi",
"諯": "zhuan",
"諰": "xi",
"諱": "hui",
"諲": "yin",
"諳": "an",
"諴": "xian",
"諵": "nan",
"諶": "chen",
"諷": "feng",
"諸": "zhu",
"諹": "yang",
"諺": "yan",
"諻": "huang",
"諼": "xuan",
"諽": "ge",
"諾": "nuo",
"諿": "xu",
"謀": "mou",
"謁": "ye",
"謂": "wei",
"謃": "xing",
"謄": "teng",
"謅": "zhou",
"謆": "shan",
"謇": "jian",
"謈": "bo",
"謉": "kui",
"謊": "huang",
"謋": "huo",
"謌": "ge",
"謍": "ying",
"謎": "mi",
"謏": "xiao",
"謐": "mi",
"謑": "xi",
"謒": "qiang",
"謓": "chen",
"謔": "xue",
"謕": "ti",
"謖": "su",
"謗": "bang",
"謘": "chi",
"謙": "qian",
"謚": "shi",
"講": "jiang",
"謜": "yuan",
"謝": "xie",
"謞": "he",
"謟": "tao",
"謠": "yao",
"謡": "yao",
"謢": "lu",
"謣": "yu",
"謤": "biao",
"謥": "cong",
"謦": "qing",
"謧": "li",
"謨": "mo",
"謩": "mo",
"謪": "shang",
"謫": "zhe",
"謬": "miu",
"謭": "jian",
"謮": "ze",
"謯": "jie",
"謰": "lian",
"謱": "lou",
"謲": "can",
"謳": "ou",
"謴": "gun",
"謵": "xi",
"謶": "zhuo",
"謷": "ao",
"謸": "ao",
"謹": "jin",
"謺": "zhe",
"謻": "yi",
"謼": "hu",
"謽": "jiang",
"謾": "man",
"謿": "chao",
"譀": "han",
"譁": "hua",
"譂": "chan",
"譃": "xu",
"譄": "zeng",
"譅": "se",
"譆": "xi",
"譇": "zha",
"譈": "dui",
"證": "zheng",
"譊": "nao",
"譋": "lan",
"譌": "e",
"譍": "ying",
"譎": "jue",
"譏": "ji",
"譐": "zun",
"譑": "jiao",
"譒": "bo",
"譓": "hui",
"譔": "zhuan",
"譕": "wu",
"譖": "zen",
"譗": "zha",
"識": "shi",
"譙": "qiao",
"譚": "tan",
"譛": "jian",
"譜": "pu",
"譝": "sheng",
"譞": "xuan",
"譟": "zao",
"譠": "tan",
"譡": "dang",
"譢": "sui",
"譣": "xian",
"譤": "ji",
"譥": "jiao",
"警": "jing",
"譧": "zhan",
"譨": "nong",
"譩": "yi",
"譪": "ai",
"譫": "zhan",
"譬": "pi",
"譭": "hui",
"譮": "hua",
"譯": "yi",
"議": "yi",
"譱": "shan",
"譲": "rang",
"譳": "rou",
"譴": "qian",
"譵": "dui",
"譶": "ta",
"護": "hu",
"譸": "zhou",
"譹": "hao",
"譺": "ai",
"譻": "ying",
"譼": "jian",
"譽": "yu",
"譾": "jian",
"譿": "hui",
"讀": "du",
"讁": "zhe",
"讂": "juan",
"讃": "zan",
"讄": "lei",
"讅": "shen",
"讆": "wei",
"讇": "chan",
"讈": "li",
"讉": "yi",
"變": "bian",
"讋": "zhe",
"讌": "yan",
"讍": "e",
"讎": "chou",
"讏": "wei",
"讐": "chou",
"讑": "yao",
"讒": "chan",
"讓": "rang",
"讔": "yin",
"讕": "lan",
"讖": "chen",
"讗": "xie",
"讘": "nie",
"讙": "huan",
"讚": "zan",
"讛": "yi",
"讜": "dang",
"讝": "zhan",
"讞": "yan",
"讟": "du",
"讠": "yan",
"计": "ji",
"订": "ding",
"讣": "fu",
"认": "ren",
"讥": "ji",
"讦": "jie",
"讧": "hong",
"讨": "tao",
"让": "rang",
"讪": "shan",
"讫": "qi",
"讬": "tuo",
"训": "xun",
"议": "yi",
"讯": "xun",
"记": "ji",
"讱": "ren",
"讲": "jiang",
"讳": "hui",
"讴": "ou",
"讵": "ju",
"讶": "ya",
"讷": "ne",
"许": "xu",
"讹": "e",
"论": "lun",
"讻": "xiong",
"讼": "song",
"讽": "feng",
"设": "she",
"访": "fang",
"诀": "jue",
"证": "zheng",
"诂": "gu",
"诃": "he",
"评": "ping",
"诅": "zu",
"识": "shi",
"诇": "xiong",
"诈": "zha",
"诉": "su",
"诊": "zhen",
"诋": "di",
"诌": "zhou",
"词": "ci",
"诎": "qu",
"诏": "zhao",
"诐": "bi",
"译": "yi",
"诒": "yi",
"诓": "kuang",
"诔": "lei",
"试": "shi",
"诖": "gua",
"诗": "shi",
"诘": "jie",
"诙": "hui",
"诚": "cheng",
"诛": "zhu",
"诜": "shen",
"话": "hua",
"诞": "dan",
"诟": "gou",
"诠": "quan",
"诡": "gui",
"询": "xun",
"诣": "yi",
"诤": "zheng",
"该": "gai",
"详": "xiang",
"诧": "cha",
"诨": "hun",
"诩": "xu",
"诪": "zhou",
"诫": "jie",
"诬": "wu",
"语": "yu",
"诮": "qiao",
"误": "wu",
"诰": "gao",
"诱": "you",
"诲": "hui",
"诳": "kuang",
"说": "shuo",
"诵": "song",
"诶": "ei",
"请": "qing",
"诸": "zhu",
"诹": "zou",
"诺": "nuo",
"读": "du",
"诼": "zhuo",
"诽": "fei",
"课": "ke",
"诿": "wei",
"谀": "yu",
"谁": "shui",
"谂": "shen",
"调": "tiao",
"谄": "chan",
"谅": "liang",
"谆": "zhun",
"谇": "sui",
"谈": "tan",
"谉": "shen",
"谊": "yi",
"谋": "mou",
"谌": "chen",
"谍": "die",
"谎": "huang",
"谏": "jian",
"谐": "xie",
"谑": "xue",
"谒": "ye",
"谓": "wei",
"谔": "e",
"谕": "yu",
"谖": "xuan",
"谗": "chan",
"谘": "zi",
"谙": "an",
"谚": "yan",
"谛": "di",
"谜": "mi",
"谝": "pian",
"谞": "xu",
"谟": "mo",
"谠": "dang",
"谡": "su",
"谢": "xie",
"谣": "yao",
"谤": "bang",
"谥": "shi",
"谦": "qian",
"谧": "mi",
"谨": "jin",
"谩": "man",
"谪": "zhe",
"谫": "jian",
"谬": "miu",
"谭": "tan",
"谮": "zen",
"谯": "qiao",
"谰": "lan",
"谱": "pu",
"谲": "jue",
"谳": "yan",
"谴": "qian",
"谵": "zhan",
"谶": "chen",
"谷": "gu",
"谸": "qian",
"谹": "hong",
"谺": "xia",
"谻": "ji",
"谼": "hong",
"谽": "han",
"谾": "hong",
"谿": "xi",
"豀": "xi",
"豁": "huo",
"豂": "liao",
"豃": "han",
"豄": "du",
"豅": "long",
"豆": "dou",
"豇": "jiang",
"豈": "qi",
"豉": "chi",
"豊": "li",
"豋": "deng",
"豌": "wan",
"豍": "bi",
"豎": "shu",
"豏": "xian",
"豐": "feng",
"豑": "zhi",
"豒": "zhi",
"豓": "yan",
"豔": "yan",
"豕": "shi",
"豖": "chu",
"豗": "hui",
"豘": "tun",
"豙": "yi",
"豚": "tun",
"豛": "yi",
"豜": "jian",
"豝": "ba",
"豞": "hou",
"豟": "e",
"豠": "chu",
"象": "xiang",
"豢": "huan",
"豣": "jian",
"豤": "ken",
"豥": "gai",
"豦": "ju",
"豧": "fu",
"豨": "xi",
"豩": "bin",
"豪": "hao",
"豫": "yu",
"豬": "zhu",
"豭": "jia",
"豮": "fen",
"豯": "xi",
"豰": "hu",
"豱": "wen",
"豲": "huan",
"豳": "bin",
"豴": "di",
"豵": "zong",
"豶": "fen",
"豷": "yi",
"豸": "zhi",
"豹": "bao",
"豺": "chai",
"豻": "an",
"豼": "pi",
"豽": "na",
"豾": "pi",
"豿": "gou",
"貀": "na",
"貁": "you",
"貂": "diao",
"貃": "mo",
"貄": "si",
"貅": "xiu",
"貆": "huan",
"貇": "ken",
"貈": "he",
"貉": "he",
"貊": "mo",
"貋": "an",
"貌": "mao",
"貍": "li",
"貎": "ni",
"貏": "bi",
"貐": "yu",
"貑": "jia",
"貒": "tuan",
"貓": "mao",
"貔": "pi",
"貕": "xi",
"貖": "yi",
"貗": "ju",
"貘": "mo",
"貙": "chu",
"貚": "tan",
"貛": "huan",
"貜": "jue",
"貝": "bei",
"貞": "zhen",
"貟": "yuan",
"負": "fu",
"財": "cai",
"貢": "gong",
"貣": "dai",
"貤": "yi",
"貥": "hang",
"貦": "wan",
"貧": "pin",
"貨": "huo",
"販": "fan",
"貪": "tan",
"貫": "guan",
"責": "ze",
"貭": "zhi",
"貮": "er",
"貯": "zhu",
"貰": "shi",
"貱": "bi",
"貲": "zi",
"貳": "er",
"貴": "gui",
"貵": "pian",
"貶": "bian",
"買": "mai",
"貸": "dai",
"貹": "sheng",
"貺": "kuang",
"費": "fei",
"貼": "tie",
"貽": "yi",
"貾": "chi",
"貿": "mao",
"賀": "he",
"賁": "bi",
"賂": "lu",
"賃": "lin",
"賄": "hui",
"賅": "gai",
"賆": "pian",
"資": "zi",
"賈": "jia",
"賉": "xu",
"賊": "zei",
"賋": "jiao",
"賌": "gai",
"賍": "zang",
"賎": "jian",
"賏": "ying",
"賐": "jun",
"賑": "zhen",
"賒": "she",
"賓": "bin",
"賔": "bin",
"賕": "qiu",
"賖": "she",
"賗": "chuan",
"賘": "zang",
"賙": "zhou",
"賚": "lai",
"賛": "zan",
"賜": "ci",
"賝": "chen",
"賞": "shang",
"賟": "tian",
"賠": "pei",
"賡": "geng",
"賢": "xian",
"賣": "mai",
"賤": "jian",
"賥": "sui",
"賦": "fu",
"賧": "dan",
"賨": "cong",
"賩": "cong",
"質": "zhi",
"賫": "ji",
"賬": "zhang",
"賭": "du",
"賮": "jin",
"賯": "xiong",
"賰": "chun",
"賱": "yun",
"賲": "bao",
"賳": "zai",
"賴": "lai",
"賵": "feng",
"賶": "cang",
"賷": "ji",
"賸": "sheng",
"賹": "ai",
"賺": "zhuan",
"賻": "fu",
"購": "gou",
"賽": "sai",
"賾": "ze",
"賿": "liao",
"贀": "yi",
"贁": "bai",
"贂": "chen",
"贃": "wan",
"贄": "zhi",
"贅": "zhui",
"贆": "biao",
"贇": "yun",
"贈": "zeng",
"贉": "dan",
"贊": "zan",
"贋": "yan",
"贌": "pu",
"贍": "shan",
"贎": "wan",
"贏": "ying",
"贐": "jin",
"贑": "gan",
"贒": "xian",
"贓": "zang",
"贔": "bi",
"贕": "du",
"贖": "shu",
"贗": "yan",
"贘": "shang",
"贙": "xuan",
"贚": "long",
"贛": "gan",
"贜": "zang",
"贝": "bei",
"贞": "zhen",
"负": "fu",
"贠": "yuan",
"贡": "gong",
"财": "cai",
"责": "ze",
"贤": "xian",
"败": "bai",
"账": "zhang",
"货": "huo",
"质": "zhi",
"贩": "fan",
"贪": "tan",
"贫": "pin",
"贬": "bian",
"购": "gou",
"贮": "zhu",
"贯": "guan",
"贰": "er",
"贱": "jian",
"贲": "bi",
"贳": "shi",
"贴": "tie",
"贵": "gui",
"贶": "kuang",
"贷": "dai",
"贸": "mao",
"费": "fei",
"贺": "he",
"贻": "yi",
"贼": "zei",
"贽": "zhi",
"贾": "gu",
"贿": "hui",
"赀": "zi",
"赁": "lin",
"赂": "lu",
"赃": "zang",
"资": "zi",
"赅": "gai",
"赆": "jin",
"赇": "qiu",
"赈": "zhen",
"赉": "lai",
"赊": "she",
"赋": "fu",
"赌": "du",
"赍": "ji",
"赎": "shu",
"赏": "shang",
"赐": "ci",
"赑": "bi",
"赒": "zhou",
"赓": "geng",
"赔": "pei",
"赕": "dan",
"赖": "lai",
"赗": "feng",
"赘": "zhui",
"赙": "fu",
"赚": "zhuan",
"赛": "sai",
"赜": "ze",
"赝": "yan",
"赞": "zan",
"赟": "yun",
"赠": "zeng",
"赡": "shan",
"赢": "ying",
"赣": "gan",
"赤": "chi",
"赥": "xi",
"赦": "she",
"赧": "nan",
"赨": "tong",
"赩": "xi",
"赪": "cheng",
"赫": "he",
"赬": "cheng",
"赭": "zhe",
"赮": "xia",
"赯": "tang",
"走": "zou",
"赱": "zou",
"赲": "li",
"赳": "jiu",
"赴": "fu",
"赵": "zhao",
"赶": "gan",
"起": "qi",
"赸": "shan",
"赹": "qiong",
"赺": "yin",
"赻": "xian",
"赼": "zi",
"赽": "jue",
"赾": "qin",
"赿": "chi",
"趀": "ci",
"趁": "chen",
"趂": "chen",
"趃": "die",
"趄": "qie",
"超": "chao",
"趆": "di",
"趇": "xi",
"趈": "zhan",
"趉": "jue",
"越": "yue",
"趋": "qu",
"趌": "ji",
"趍": "qu",
"趎": "chu",
"趏": "gua",
"趐": "xue",
"趑": "zi",
"趒": "tiao",
"趓": "duo",
"趔": "lie",
"趕": "gan",
"趖": "suo",
"趗": "cu",
"趘": "xi",
"趙": "zhao",
"趚": "su",
"趛": "yin",
"趜": "ju",
"趝": "jian",
"趞": "que",
"趟": "tang",
"趠": "chuo",
"趡": "cui",
"趢": "lu",
"趣": "qu",
"趤": "dang",
"趥": "qiu",
"趦": "zi",
"趧": "ti",
"趨": "qu",
"趩": "chi",
"趪": "huang",
"趫": "qiao",
"趬": "qiao",
"趭": "jiao",
"趮": "zao",
"趯": "ti",
"趰": "er",
"趱": "zan",
"趲": "zan",
"足": "zu",
"趴": "pa",
"趵": "bao",
"趶": "kua",
"趷": "ke",
"趸": "dun",
"趹": "jue",
"趺": "fu",
"趻": "chen",
"趼": "jian",
"趽": "fang",
"趾": "zhi",
"趿": "ta",
"跀": "yue",
"跁": "ba",
"跂": "qi",
"跃": "yue",
"跄": "qiang",
"跅": "tuo",
"跆": "tai",
"跇": "yi",
"跈": "jian",
"跉": "ling",
"跊": "mei",
"跋": "ba",
"跌": "die",
"跍": "ku",
"跎": "tuo",
"跏": "jia",
"跐": "ci",
"跑": "pao",
"跒": "qia",
"跓": "zhu",
"跔": "ju",
"跕": "dian",
"跖": "zhi",
"跗": "fu",
"跘": "pan",
"跙": "ju",
"跚": "shan",
"跛": "bo",
"跜": "ni",
"距": "ju",
"跞": "li",
"跟": "gen",
"跠": "yi",
"跡": "ji",
"跢": "dai",
"跣": "xian",
"跤": "jiao",
"跥": "duo",
"跦": "zhu",
"跧": "quan",
"跨": "kua",
"跩": "zhuai",
"跪": "gui",
"跫": "qiong",
"跬": "kui",
"跭": "xiang",
"跮": "die",
"路": "lu",
"跰": "pian",
"跱": "zhi",
"跲": "jie",
"跳": "tiao",
"跴": "cai",
"践": "jian",
"跶": "da",
"跷": "qiao",
"跸": "bi",
"跹": "xian",
"跺": "duo",
"跻": "ji",
"跼": "ju",
"跽": "ji",
"跾": "shu",
"跿": "tu",
"踀": "chuo",
"踁": "jing",
"踂": "nie",
"踃": "xiao",
"踄": "bu",
"踅": "xue",
"踆": "cun",
"踇": "mu",
"踈": "shu",
"踉": "liang",
"踊": "yong",
"踋": "jiao",
"踌": "chou",
"踍": "qiao",
"踎": "mou",
"踏": "ta",
"踐": "jian",
"踑": "ji",
"踒": "wo",
"踓": "wei",
"踔": "chuo",
"踕": "jie",
"踖": "ji",
"踗": "nie",
"踘": "ju",
"踙": "nie",
"踚": "lun",
"踛": "lu",
"踜": "leng",
"踝": "huai",
"踞": "ju",
"踟": "chi",
"踠": "wan",
"踡": "quan",
"踢": "ti",
"踣": "bo",
"踤": "zu",
"踥": "qie",
"踦": "qi",
"踧": "cu",
"踨": "zong",
"踩": "cai",
"踪": "zong",
"踫": "peng",
"踬": "zhi",
"踭": "zheng",
"踮": "dian",
"踯": "zhi",
"踰": "yu",
"踱": "duo",
"踲": "dun",
"踳": "chuan",
"踴": "yong",
"踵": "zhong",
"踶": "di",
"踷": "zhe",
"踸": "chen",
"踹": "chuai",
"踺": "jian",
"踻": "gua",
"踼": "tang",
"踽": "ju",
"踾": "fu",
"踿": "cu",
"蹀": "die",
"蹁": "pian",
"蹂": "rou",
"蹃": "nuo",
"蹄": "ti",
"蹅": "cha",
"蹆": "tui",
"蹇": "jian",
"蹈": "dao",
"蹉": "cuo",
"蹊": "qi",
"蹋": "ta",
"蹌": "qiang",
"蹍": "nian",
"蹎": "dian",
"蹏": "ti",
"蹐": "ji",
"蹑": "nie",
"蹒": "pan",
"蹓": "liu",
"蹔": "zan",
"蹕": "bi",
"蹖": "chong",
"蹗": "lu",
"蹘": "liao",
"蹙": "cu",
"蹚": "tang",
"蹛": "dai",
"蹜": "su",
"蹝": "xi",
"蹞": "kui",
"蹟": "ji",
"蹠": "zhi",
"蹡": "qiang",
"蹢": "di",
"蹣": "pan",
"蹤": "zong",
"蹥": "lian",
"蹦": "beng",
"蹧": "zao",
"蹨": "nian",
"蹩": "bie",
"蹪": "tui",
"蹫": "ju",
"蹬": "deng",
"蹭": "ceng",
"蹮": "xian",
"蹯": "fan",
"蹰": "chu",
"蹱": "zhong",
"蹲": "dun",
"蹳": "bo",
"蹴": "cu",
"蹵": "cu",
"蹶": "jue",
"蹷": "jue",
"蹸": "lin",
"蹹": "ta",
"蹺": "qiao",
"蹻": "qiao",
"蹼": "pu",
"蹽": "liao",
"蹾": "dun",
"蹿": "cuan",
"躀": "guan",
"躁": "zao",
"躂": "ta",
"躃": "bi",
"躄": "bi",
"躅": "zhu",
"躆": "ju",
"躇": "chu",
"躈": "qiao",
"躉": "dun",
"躊": "chou",
"躋": "ji",
"躌": "wu",
"躍": "yue",
"躎": "nian",
"躏": "lin",
"躐": "lie",
"躑": "zhi",
"躒": "li",
"躓": "zhi",
"躔": "chan",
"躕": "chu",
"躖": "duan",
"躗": "wei",
"躘": "long",
"躙": "lin",
"躚": "xian",
"躛": "wei",
"躜": "zuan",
"躝": "lan",
"躞": "xie",
"躟": "rang",
"躠": "sa",
"躡": "nie",
"躢": "ta",
"躣": "qu",
"躤": "ji",
"躥": "cuan",
"躦": "zuan",
"躧": "xi",
"躨": "kui",
"躩": "jue",
"躪": "lin",
"身": "shen",
"躬": "gong",
"躭": "dan",
"躮": "fen",
"躯": "qu",
"躰": "ti",
"躱": "duo",
"躲": "duo",
"躳": "gong",
"躴": "lang",
"躵": "ren",
"躶": "luo",
"躷": "ai",
"躸": "ji",
"躹": "ju",
"躺": "tang",
"躻": "kong",
"躼": "lao",
"躽": "yan",
"躾": "mei",
"躿": "kang",
"軀": "qu",
"軁": "lou",
"軂": "lao",
"軃": "duo",
"軄": "zhi",
"軅": "yan",
"軆": "ti",
"軇": "dao",
"軈": "ying",
"軉": "yu",
"車": "che",
"軋": "ya",
"軌": "gui",
"軍": "jun",
"軎": "wei",
"軏": "yue",
"軐": "xin",
"軑": "dai",
"軒": "xuan",
"軓": "fan",
"軔": "ren",
"軕": "shan",
"軖": "kuang",
"軗": "shu",
"軘": "tun",
"軙": "chen",
"軚": "dai",
"軛": "e",
"軜": "na",
"軝": "qi",
"軞": "mao",
"軟": "ruan",
"軠": "kuang",
"軡": "qian",
"転": "zhuan",
"軣": "hong",
"軤": "hu",
"軥": "qu",
"軦": "kuang",
"軧": "di",
"軨": "ling",
"軩": "dai",
"軪": "ao",
"軫": "zhen",
"軬": "fan",
"軭": "kuang",
"軮": "yang",
"軯": "peng",
"軰": "bei",
"軱": "gu",
"軲": "gu",
"軳": "pao",
"軴": "zhu",
"軵": "rong",
"軶": "e",
"軷": "ba",
"軸": "zhou",
"軹": "zhi",
"軺": "yao",
"軻": "ke",
"軼": "yi",
"軽": "qing",
"軾": "shi",
"軿": "ping",
"輀": "er",
"輁": "gong",
"輂": "ju",
"較": "jiao",
"輄": "guang",
"輅": "lu",
"輆": "kai",
"輇": "quan",
"輈": "zhou",
"載": "zai",
"輊": "zhi",
"輋": "she",
"輌": "liang",
"輍": "yu",
"輎": "shao",
"輏": "you",
"輐": "wan",
"輑": "yin",
"輒": "zhe",
"輓": "wan",
"輔": "fu",
"輕": "qing",
"輖": "zhou",
"輗": "ni",
"輘": "ling",
"輙": "zhe",
"輚": "han",
"輛": "liang",
"輜": "zi",
"輝": "hui",
"輞": "wang",
"輟": "chuo",
"輠": "guo",
"輡": "kan",
"輢": "yi",
"輣": "peng",
"輤": "qian",
"輥": "gun",
"輦": "nian",
"輧": "ping",
"輨": "guan",
"輩": "bei",
"輪": "lun",
"輫": "pai",
"輬": "liang",
"輭": "ruan",
"輮": "rou",
"輯": "ji",
"輰": "yang",
"輱": "xian",
"輲": "chuan",
"輳": "cou",
"輴": "chun",
"輵": "ge",
"輶": "you",
"輷": "hong",
"輸": "shu",
"輹": "fu",
"輺": "zi",
"輻": "fu",
"輼": "wen",
"輽": "fan",
"輾": "zhan",
"輿": "yu",
"轀": "wen",
"轁": "tao",
"轂": "gu",
"轃": "zhen",
"轄": "xia",
"轅": "yuan",
"轆": "lu",
"轇": "jiao",
"轈": "chao",
"轉": "zhuan",
"轊": "wei",
"轋": "hun",
"轌": "xue",
"轍": "zhe",
"轎": "jiao",
"轏": "zhan",
"轐": "bu",
"轑": "lao",
"轒": "fen",
"轓": "fan",
"轔": "lin",
"轕": "ge",
"轖": "se",
"轗": "kan",
"轘": "huan",
"轙": "yi",
"轚": "ji",
"轛": "dui",
"轜": "er",
"轝": "yu",
"轞": "jian",
"轟": "hong",
"轠": "lei",
"轡": "pei",
"轢": "li",
"轣": "li",
"轤": "lu",
"轥": "lin",
"车": "che",
"轧": "ya",
"轨": "gui",
"轩": "xuan",
"轪": "dai",
"轫": "ren",
"转": "zhuan",
"轭": "e",
"轮": "lun",
"软": "ruan",
"轰": "hong",
"轱": "gu",
"轲": "ke",
"轳": "lu",
"轴": "zhou",
"轵": "zhi",
"轶": "yi",
"轷": "hu",
"轸": "zhen",
"轹": "li",
"轺": "yao",
"轻": "qing",
"轼": "shi",
"载": "zai",
"轾": "zhi",
"轿": "jiao",
"辀": "zhou",
"辁": "quan",
"辂": "lu",
"较": "jiao",
"辄": "zhe",
"辅": "fu",
"辆": "liang",
"辇": "nian",
"辈": "bei",
"辉": "hui",
"辊": "gun",
"辋": "wang",
"辌": "liang",
"辍": "chuo",
"辎": "zi",
"辏": "cou",
"辐": "fu",
"辑": "ji",
"辒": "wen",
"输": "shu",
"辔": "pei",
"辕": "yuan",
"辖": "xia",
"辗": "zhan",
"辘": "lu",
"辙": "zhe",
"辚": "lin",
"辛": "xin",
"辜": "gu",
"辝": "ci",
"辞": "ci",
"辟": "bi",
"辠": "zui",
"辡": "bian",
"辢": "la",
"辣": "la",
"辤": "ci",
"辥": "xue",
"辦": "ban",
"辧": "bian",
"辨": "bian",
"辩": "bian",
"辪": "xue",
"辫": "bian",
"辬": "ban",
"辭": "ci",
"辮": "bian",
"辯": "bian",
"辰": "chen",
"辱": "ru",
"農": "nong",
"辳": "nong",
"辴": "zhen",
"辵": "chuo",
"辶": "chuo",
"辷": "yi",
"辸": "reng",
"边": "bian",
"辺": "dao",
"辻": "shi",
"込": "yu",
"辽": "liao",
"达": "da",
"辿": "chan",
"迀": "gan",
"迁": "qian",
"迂": "yu",
"迃": "yu",
"迄": "qi",
"迅": "xun",
"迆": "yi",
"过": "guo",
"迈": "mai",
"迉": "qi",
"迊": "za",
"迋": "wang",
"迌": "tu",
"迍": "zhun",
"迎": "ying",
"迏": "da",
"运": "yun",
"近": "jin",
"迒": "hang",
"迓": "ya",
"返": "fan",
"迕": "wu",
"迖": "da",
"迗": "e",
"还": "huan",
"这": "zhe",
"迚": "da",
"进": "jin",
"远": "yuan",
"违": "wei",
"连": "lian",
"迟": "chi",
"迠": "che",
"迡": "chi",
"迢": "tiao",
"迣": "zhi",
"迤": "yi",
"迥": "jiong",
"迦": "jia",
"迧": "chen",
"迨": "dai",
"迩": "er",
"迪": "di",
"迫": "po",
"迬": "zhu",
"迭": "die",
"迮": "ze",
"迯": "tao",
"述": "shu",
"迱": "yi",
"迳": "jing",
"迴": "hui",
"迵": "dong",
"迶": "you",
"迷": "mi",
"迸": "beng",
"迹": "ji",
"迺": "nai",
"迻": "yi",
"迼": "jie",
"追": "zhui",
"迾": "lie",
"迿": "xun",
"退": "tui",
"送": "song",
"适": "shi",
"逃": "tao",
"逄": "pang",
"逅": "hou",
"逆": "ni",
"逇": "dun",
"逈": "jiong",
"选": "xuan",
"逊": "xun",
"逋": "bu",
"逌": "you",
"逍": "xiao",
"逎": "qiu",
"透": "tou",
"逐": "zhu",
"逑": "qiu",
"递": "di",
"逓": "di",
"途": "tu",
"逕": "jing",
"逖": "ti",
"逗": "dou",
"逘": "yi",
"這": "zhe",
"通": "tong",
"逛": "guang",
"逜": "wu",
"逝": "shi",
"逞": "cheng",
"速": "su",
"造": "zao",
"逡": "qun",
"逢": "feng",
"連": "lian",
"逤": "suo",
"逥": "hui",
"逦": "li",
"逧": "gu",
"逨": "lai",
"逩": "ben",
"逪": "cuo",
"逫": "zhu",
"逬": "beng",
"逭": "huan",
"逮": "dai",
"逯": "lu",
"逰": "you",
"週": "zhou",
"進": "jin",
"逳": "yu",
"逴": "chuo",
"逵": "kui",
"逶": "wei",
"逷": "ti",
"逸": "yi",
"逹": "da",
"逺": "yuan",
"逻": "luo",
"逼": "bi",
"逽": "nuo",
"逾": "yu",
"逿": "dang",
"遀": "sui",
"遁": "dun",
"遂": "sui",
"遃": "yan",
"遄": "chuan",
"遅": "chi",
"遆": "di",
"遇": "yu",
"遈": "shi",
"遉": "zhen",
"遊": "you",
"運": "yun",
"遌": "e",
"遍": "bian",
"過": "guo",
"遏": "e",
"遐": "xia",
"遑": "huang",
"遒": "qiu",
"道": "dao",
"達": "da",
"違": "wei",
"遖": "nan",
"遗": "yi",
"遘": "gou",
"遙": "yao",
"遚": "chou",
"遛": "liu",
"遜": "xun",
"遝": "ta",
"遞": "di",
"遟": "chi",
"遠": "yuan",
"遡": "su",
"遢": "ta",
"遣": "qian",
"遤": "ma",
"遥": "yao",
"遦": "guan",
"遧": "zhang",
"遨": "ao",
"適": "shi",
"遪": "ca",
"遫": "chi",
"遬": "su",
"遭": "zao",
"遮": "zhe",
"遯": "dun",
"遰": "di",
"遱": "lou",
"遲": "chi",
"遳": "cuo",
"遴": "lin",
"遵": "zun",
"遶": "rao",
"遷": "qian",
"選": "xuan",
"遹": "yu",
"遺": "yi",
"遻": "e",
"遼": "liao",
"遽": "ju",
"遾": "shi",
"避": "bi",
"邀": "yao",
"邁": "mai",
"邂": "xie",
"邃": "sui",
"還": "huan",
"邅": "zhan",
"邆": "teng",
"邇": "er",
"邈": "miao",
"邉": "bian",
"邊": "bian",
"邋": "la",
"邌": "li",
"邍": "yuan",
"邎": "yao",
"邏": "luo",
"邐": "li",
"邑": "yi",
"邒": "ting",
"邓": "deng",
"邔": "qi",
"邕": "yong",
"邖": "shan",
"邗": "han",
"邘": "yu",
"邙": "mang",
"邚": "ru",
"邛": "qiong",
"邜": "xi",
"邝": "kuang",
"邞": "fu",
"邟": "kang",
"邠": "bin",
"邡": "fang",
"邢": "xing",
"那": "na",
"邤": "xin",
"邥": "shen",
"邦": "bang",
"邧": "yuan",
"邨": "cun",
"邩": "huo",
"邪": "xie",
"邫": "bang",
"邬": "wu",
"邭": "ju",
"邮": "you",
"邯": "han",
"邰": "tai",
"邱": "qiu",
"邲": "bi",
"邳": "pi",
"邴": "bing",
"邵": "shao",
"邶": "bei",
"邷": "wa",
"邸": "di",
"邹": "zou",
"邺": "ye",
"邻": "lin",
"邼": "kuang",
"邽": "gui",
"邾": "zhu",
"邿": "shi",
"郀": "ku",
"郁": "yu",
"郂": "gai",
"郃": "he",
"郄": "qie",
"郅": "zhi",
"郆": "ji",
"郇": "xun",
"郈": "hou",
"郉": "xing",
"郊": "jiao",
"郋": "xi",
"郌": "gui",
"郍": "na",
"郎": "lang",
"郏": "jia",
"郐": "kuai",
"郑": "zheng",
"郒": "lang",
"郓": "yun",
"郔": "yan",
"郕": "cheng",
"郖": "dou",
"郗": "xi",
"郘": "lv",
"郙": "fu",
"郚": "wu",
"郛": "fu",
"郜": "gao",
"郝": "hao",
"郞": "lang",
"郟": "jia",
"郠": "geng",
"郡": "jun",
"郢": "ying",
"郣": "bo",
"郤": "xi",
"郥": "bei",
"郦": "li",
"郧": "yun",
"部": "bu",
"郩": "xiao",
"郪": "qi",
"郫": "pi",
"郬": "qing",
"郭": "guo",
"郮": "zhou",
"郯": "tan",
"郰": "zou",
"郱": "ping",
"郲": "lai",
"郳": "ni",
"郴": "chen",
"郵": "you",
"郶": "bu",
"郷": "xiang",
"郸": "dan",
"郹": "ju",
"郺": "yong",
"郻": "qiao",
"郼": "yi",
"都": "du",
"郾": "yan",
"郿": "mei",
"鄀": "ruo",
"鄁": "bei",
"鄂": "e",
"鄃": "shu",
"鄄": "juan",
"鄅": "yu",
"鄆": "yun",
"鄇": "hou",
"鄈": "kui",
"鄉": "xiang",
"鄊": "xiang",
"鄋": "sou",
"鄌": "tang",
"鄍": "ming",
"鄎": "xi",
"鄏": "ru",
"鄐": "chu",
"鄑": "zi",
"鄒": "zou",
"鄓": "yi",
"鄔": "wu",
"鄕": "xiang",
"鄖": "yun",
"鄗": "hao",
"鄘": "yong",
"鄙": "bi",
"鄚": "mao",
"鄛": "chao",
"鄜": "fu",
"鄝": "liao",
"鄞": "yin",
"鄟": "zhuan",
"鄠": "hu",
"鄡": "qiao",
"鄢": "yan",
"鄣": "zhang",
"鄤": "man",
"鄥": "qiao",
"鄦": "xu",
"鄧": "deng",
"鄨": "bi",
"鄩": "xun",
"鄪": "bi",
"鄫": "zeng",
"鄬": "wei",
"鄭": "zheng",
"鄮": "mao",
"鄯": "shan",
"鄰": "lin",
"鄱": "po",
"鄲": "dan",
"鄳": "meng",
"鄴": "ye",
"鄵": "cao",
"鄶": "kuai",
"鄷": "feng",
"鄸": "meng",
"鄹": "zou",
"鄺": "kuang",
"鄻": "lian",
"鄼": "zan",
"鄽": "chan",
"鄾": "you",
"鄿": "qi",
"酀": "yan",
"酁": "chan",
"酂": "cuo",
"酃": "ling",
"酄": "huan",
"酅": "xi",
"酆": "feng",
"酇": "cuo",
"酈": "li",
"酉": "you",
"酊": "ding",
"酋": "qiu",
"酌": "zhuo",
"配": "pei",
"酎": "zhou",
"酏": "yi",
"酐": "gan",
"酑": "yu",
"酒": "jiu",
"酓": "yan",
"酔": "zui",
"酕": "mao",
"酖": "dan",
"酗": "xu",
"酘": "dou",
"酙": "zhen",
"酚": "fen",
"酛": "yuan",
"酜": "fu",
"酝": "yun",
"酞": "tai",
"酟": "tian",
"酠": "qia",
"酡": "tuo",
"酢": "zuo",
"酣": "han",
"酤": "gu",
"酥": "su",
"酦": "po",
"酧": "chou",
"酨": "zai",
"酩": "ming",
"酪": "lao",
"酫": "chuo",
"酬": "chou",
"酭": "you",
"酮": "tong",
"酯": "zhi",
"酰": "xian",
"酱": "jiang",
"酲": "cheng",
"酳": "yin",
"酴": "tu",
"酵": "jiao",
"酶": "mei",
"酷": "ku",
"酸": "suan",
"酹": "lei",
"酺": "pu",
"酻": "zui",
"酼": "hai",
"酽": "yan",
"酾": "shi",
"酿": "niang",
"醀": "wei",
"醁": "lu",
"醂": "lan",
"醃": "yan",
"醄": "tao",
"醅": "pei",
"醆": "zhan",
"醇": "chun",
"醈": "tan",
"醉": "zui",
"醊": "zhui",
"醋": "cu",
"醌": "kun",
"醍": "ti",
"醎": "xian",
"醏": "du",
"醐": "hu",
"醑": "xu",
"醒": "xing",
"醓": "tan",
"醔": "qiu",
"醕": "chun",
"醖": "yun",
"醗": "po",
"醘": "ke",
"醙": "sou",
"醚": "mi",
"醛": "quan",
"醜": "chou",
"醝": "cuo",
"醞": "yun",
"醟": "yong",
"醠": "ang",
"醡": "zha",
"醢": "hai",
"醣": "tang",
"醤": "jiang",
"醥": "piao",
"醦": "chan",
"醧": "yu",
"醨": "li",
"醩": "zao",
"醪": "lao",
"醫": "yi",
"醬": "jiang",
"醭": "bu",
"醮": "jiao",
"醯": "xi",
"醰": "tan",
"醱": "po",
"醲": "nong",
"醳": "yi",
"醴": "li",
"醵": "ju",
"醶": "yan",
"醷": "yi",
"醸": "niang",
"醹": "ru",
"醺": "xun",
"醻": "chou",
"醼": "yan",
"醽": "ling",
"醾": "mi",
"醿": "mi",
"釀": "niang",
"釁": "xin",
"釂": "jiao",
"釃": "shi",
"釄": "mi",
"釅": "yan",
"釆": "bian",
"采": "cai",
"釈": "shi",
"釉": "you",
"释": "shi",
"釋": "shi",
"里": "li",
"重": "zhong",
"野": "ye",
"量": "liang",
"釐": "li",
"金": "jin",
"釒": "jin",
"釓": "ga",
"釔": "yi",
"釕": "liao",
"釖": "dao",
"釗": "zhao",
"釘": "ding",
"釙": "po",
"釚": "qiu",
"釛": "he",
"釜": "fu",
"針": "zhen",
"釞": "zhi",
"釟": "ba",
"釠": "luan",
"釡": "fu",
"釢": "nai",
"釣": "diao",
"釤": "shan",
"釥": "qiao",
"釦": "kou",
"釧": "chuan",
"釨": "zi",
"釩": "fan",
"釪": "hua",
"釫": "hua",
"釬": "han",
"釭": "gang",
"釮": "qi",
"釯": "mang",
"釰": "ri",
"釱": "di",
"釲": "si",
"釳": "xi",
"釴": "yi",
"釵": "chai",
"釶": "shi",
"釷": "tu",
"釸": "xi",
"釹": "nv",
"釺": "qian",
"釻": "qiu",
"釼": "ri",
"釽": "pi",
"釾": "ye",
"釿": "jin",
"鈀": "ba",
"鈁": "fang",
"鈂": "chen",
"鈃": "xing",
"鈄": "dou",
"鈅": "yue",
"鈆": "qian",
"鈇": "fu",
"鈈": "bu",
"鈉": "na",
"鈊": "xin",
"鈋": "e",
"鈌": "jue",
"鈍": "dun",
"鈎": "gou",
"鈏": "yin",
"鈐": "qian",
"鈑": "ban",
"鈒": "sa",
"鈓": "ren",
"鈔": "chao",
"鈕": "niu",
"鈖": "fen",
"鈗": "yun",
"鈘": "yi",
"鈙": "qin",
"鈚": "pi",
"鈛": "guo",
"鈜": "hong",
"鈝": "yin",
"鈞": "jun",
"鈟": "diao",
"鈠": "yi",
"鈡": "zhong",
"鈢": "xi",
"鈣": "gai",
"鈤": "ri",
"鈥": "huo",
"鈦": "tai",
"鈧": "kang",
"鈨": "yuan",
"鈩": "lu",
"鈪": "e",
"鈫": "qin",
"鈬": "duo",
"鈭": "zi",
"鈮": "ni",
"鈯": "tu",
"鈰": "shi",
"鈱": "min",
"鈲": "gu",
"鈳": "ke",
"鈴": "ling",
"鈵": "bing",
"鈶": "si",
"鈷": "gu",
"鈸": "bo",
"鈹": "pi",
"鈺": "yu",
"鈻": "si",
"鈼": "zuo",
"鈽": "bu",
"鈾": "you",
"鈿": "dian",
"鉀": "jia",
"鉁": "zhen",
"鉂": "shi",
"鉃": "shi",
"鉄": "tie",
"鉅": "ju",
"鉆": "zuan",
"鉇": "shi",
"鉈": "ta",
"鉉": "xuan",
"鉊": "zhao",
"鉋": "bao",
"鉌": "he",
"鉍": "bi",
"鉎": "sheng",
"鉏": "chu",
"鉐": "shi",
"鉑": "bo",
"鉒": "zhu",
"鉓": "chi",
"鉔": "za",
"鉕": "po",
"鉖": "tong",
"鉗": "qian",
"鉘": "fu",
"鉙": "zhai",
"鉚": "mao",
"鉛": "qian",
"鉜": "fu",
"鉝": "li",
"鉞": "yue",
"鉟": "pi",
"鉠": "yang",
"鉡": "ban",
"鉢": "bo",
"鉣": "jie",
"鉤": "gou",
"鉥": "shu",
"鉦": "zheng",
"鉧": "mu",
"鉨": "xi",
"鉩": "xi",
"鉪": "di",
"鉫": "jia",
"鉬": "mu",
"鉭": "tan",
"鉮": "shen",
"鉯": "yi",
"鉰": "si",
"鉱": "kuang",
"鉲": "ka",
"鉳": "bei",
"鉴": "jian",
"鉵": "tong",
"鉶": "xing",
"鉷": "hong",
"鉸": "jiao",
"鉹": "chi",
"鉺": "er",
"鉻": "ge",
"鉼": "bing",
"鉽": "shi",
"鉾": "mao",
"鉿": "ha",
"銀": "yin",
"銁": "jun",
"銂": "zhou",
"銃": "chong",
"銄": "xiang",
"銅": "tong",
"銆": "mo",
"銇": "lei",
"銈": "ji",
"銉": "yu",
"銊": "xu",
"銋": "ren",
"銌": "zun",
"銍": "zhi",
"銎": "qiong",
"銏": "shan",
"銐": "chi",
"銑": "xian",
"銒": "xing",
"銓": "quan",
"銔": "pi",
"銕": "tie",
"銖": "zhu",
"銗": "hou",
"銘": "ming",
"銙": "kua",
"銚": "diao",
"銛": "xian",
"銜": "xian",
"銝": "xiu",
"銞": "jun",
"銟": "cha",
"銠": "lao",
"銡": "ji",
"銢": "pi",
"銣": "ru",
"銤": "mi",
"銥": "yi",
"銦": "yin",
"銧": "guang",
"銨": "an",
"銩": "diu",
"銪": "you",
"銫": "se",
"銬": "kao",
"銭": "qian",
"銮": "luan",
"銯": "si",
"銰": "ai",
"銱": "diao",
"銲": "han",
"銳": "rui",
"銴": "shi",
"銵": "keng",
"銶": "qiu",
"銷": "xiao",
"銸": "zhe",
"銹": "xiu",
"銺": "zang",
"銻": "ti",
"銼": "cuo",
"銽": "xian",
"銾": "hong",
"銿": "zhong",
"鋀": "tou",
"鋁": "lv",
"鋂": "mei",
"鋃": "lang",
"鋄": "wan",
"鋅": "xin",
"鋆": "yun",
"鋇": "bei",
"鋈": "wu",
"鋉": "su",
"鋊": "yu",
"鋋": "chan",
"鋌": "ting",
"鋍": "bo",
"鋎": "han",
"鋏": "jia",
"鋐": "hong",
"鋑": "juan",
"鋒": "feng",
"鋓": "chan",
"鋔": "wan",
"鋕": "zhi",
"鋖": "si",
"鋗": "xuan",
"鋘": "hua",
"鋙": "wu",
"鋚": "tiao",
"鋛": "kuang",
"鋜": "zhuo",
"鋝": "lve",
"鋞": "xing",
"鋟": "qin",
"鋠": "shen",
"鋡": "han",
"鋢": "lve",
"鋣": "ye",
"鋤": "chu",
"鋥": "zeng",
"鋦": "ju",
"鋧": "xian",
"鋨": "e",
"鋩": "mang",
"鋪": "pu",
"鋫": "li",
"鋬": "pan",
"鋭": "rui",
"鋮": "cheng",
"鋯": "gao",
"鋰": "li",
"鋱": "te",
"鋲": "bing",
"鋳": "zhu",
"鋴": "zhen",
"鋵": "tu",
"鋶": "liu",
"鋷": "zui",
"鋸": "ju",
"鋹": "chang",
"鋺": "yuan",
"鋻": "jian",
"鋼": "gang",
"鋽": "diao",
"鋾": "tao",
"鋿": "shang",
"錀": "lun",
"錁": "ke",
"錂": "ling",
"錃": "pi",
"錄": "lu",
"錅": "li",
"錆": "qing",
"錇": "pei",
"錈": "juan",
"錉": "min",
"錊": "zui",
"錋": "peng",
"錌": "an",
"錍": "pi",
"錎": "xian",
"錏": "ya",
"錐": "zhui",
"錑": "lei",
"錒": "a",
"錓": "kong",
"錔": "ta",
"錕": "kun",
"錖": "du",
"錗": "nei",
"錘": "chui",
"錙": "zi",
"錚": "zheng",
"錛": "ben",
"錜": "nie",
"錝": "cong",
"錞": "chun",
"錟": "tan",
"錠": "ding",
"錡": "qi",
"錢": "qian",
"錣": "zhui",
"錤": "ji",
"錥": "yu",
"錦": "jin",
"錧": "guan",
"錨": "mao",
"錩": "chang",
"錪": "tian",
"錫": "xi",
"錬": "lian",
"錭": "diao",
"錮": "gu",
"錯": "cuo",
"錰": "shu",
"錱": "zhen",
"録": "lu",
"錳": "meng",
"錴": "lu",
"錵": "hua",
"錶": "biao",
"錷": "ga",
"錸": "lai",
"錹": "ken",
"錺": "fang",
"錻": "bu",
"錼": "nai",
"錽": "wan",
"錾": "zan",
"錿": "hu",
"鍀": "de",
"鍁": "xian",
"鍂": "pian",
"鍃": "huo",
"鍄": "liang",
"鍅": "fa",
"鍆": "men",
"鍇": "kai",
"鍈": "yang",
"鍉": "chi",
"鍊": "lian",
"鍋": "guo",
"鍌": "xian",
"鍍": "du",
"鍎": "tu",
"鍏": "wei",
"鍐": "zong",
"鍑": "fu",
"鍒": "rou",
"鍓": "ji",
"鍔": "e",
"鍕": "jun",
"鍖": "chen",
"鍗": "ti",
"鍘": "zha",
"鍙": "hu",
"鍚": "yang",
"鍛": "duan",
"鍜": "xia",
"鍝": "yu",
"鍞": "keng",
"鍟": "sheng",
"鍠": "huang",
"鍡": "wei",
"鍢": "fu",
"鍣": "zhao",
"鍤": "cha",
"鍥": "qie",
"鍦": "shi",
"鍧": "hong",
"鍨": "kui",
"鍩": "nuo",
"鍪": "mou",
"鍫": "qiao",
"鍬": "qiao",
"鍭": "hou",
"鍮": "tou",
"鍯": "cong",
"鍰": "huan",
"鍱": "ye",
"鍲": "min",
"鍳": "jian",
"鍴": "duan",
"鍵": "jian",
"鍶": "si",
"鍷": "kui",
"鍸": "hu",
"鍹": "xuan",
"鍺": "zhe",
"鍻": "jie",
"鍼": "zhen",
"鍽": "bian",
"鍾": "zhong",
"鍿": "zi",
"鎀": "xiu",
"鎁": "ye",
"鎂": "mei",
"鎃": "pai",
"鎄": "ai",
"鎅": "jie",
"鎆": "qian",
"鎇": "mei",
"鎈": "cuo",
"鎉": "da",
"鎊": "bang",
"鎋": "xia",
"鎌": "lian",
"鎍": "suo",
"鎎": "kai",
"鎏": "liu",
"鎐": "yao",
"鎑": "ye",
"鎒": "nou",
"鎓": "weng",
"鎔": "rong",
"鎕": "tang",
"鎖": "suo",
"鎗": "qiang",
"鎘": "ge",
"鎙": "shuo",
"鎚": "chui",
"鎛": "bo",
"鎜": "pan",
"鎝": "da",
"鎞": "bi",
"鎟": "sang",
"鎠": "gang",
"鎡": "zi",
"鎢": "wu",
"鎣": "ying",
"鎤": "huang",
"鎥": "tiao",
"鎦": "liu",
"鎧": "kai",
"鎨": "sun",
"鎩": "sha",
"鎪": "sou",
"鎫": "wan",
"鎬": "gao",
"鎭": "zhen",
"鎮": "zhen",
"鎯": "lang",
"鎰": "yi",
"鎱": "yuan",
"鎲": "tang",
"鎳": "nie",
"鎴": "xi",
"鎵": "jia",
"鎶": "ge",
"鎷": "ma",
"鎸": "juan",
"鎹": "song",
"鎺": "zu",
"鎻": "suo",
"鎼": "xia",
"鎽": "feng",
"鎾": "wen",
"鎿": "na",
"鏀": "lu",
"鏁": "suo",
"鏂": "ou",
"鏃": "zu",
"鏄": "tuan",
"鏅": "xiu",
"鏆": "guan",
"鏇": "xuan",
"鏈": "lian",
"鏉": "shou",
"鏊": "ao",
"鏋": "man",
"鏌": "mo",
"鏍": "luo",
"鏎": "bi",
"鏏": "wei",
"鏐": "liu",
"鏑": "di",
"鏒": "san",
"鏓": "cong",
"鏔": "yi",
"鏕": "lu",
"鏖": "ao",
"鏗": "keng",
"鏘": "qiang",
"鏙": "cui",
"鏚": "qi",
"鏛": "shang",
"鏜": "tang",
"鏝": "man",
"鏞": "yong",
"鏟": "chan",
"鏠": "feng",
"鏡": "jing",
"鏢": "biao",
"鏣": "shu",
"鏤": "lou",
"鏥": "xiu",
"鏦": "cong",
"鏧": "long",
"鏨": "zan",
"鏩": "jian",
"鏪": "cao",
"鏫": "li",
"鏬": "xia",
"鏭": "xi",
"鏮": "kang",
"鏯": "shuang",
"鏰": "beng",
"鏱": "zhang",
"鏲": "qian",
"鏳": "zheng",
"鏴": "lu",
"鏵": "hua",
"鏶": "ji",
"鏷": "pu",
"鏸": "hui",
"鏹": "qiang",
"鏺": "po",
"鏻": "lin",
"鏼": "se",
"鏽": "xiu",
"鏾": "san",
"鏿": "cheng",
"鐀": "gui",
"鐁": "si",
"鐂": "liu",
"鐃": "nao",
"鐄": "huang",
"鐅": "pie",
"鐆": "sui",
"鐇": "fan",
"鐈": "qiao",
"鐉": "quan",
"鐊": "xi",
"鐋": "tang",
"鐌": "xiang",
"鐍": "jue",
"鐎": "jiao",
"鐏": "zun",
"鐐": "liao",
"鐑": "qi",
"鐒": "lao",
"鐓": "dui",
"鐔": "xin",
"鐕": "zan",
"鐖": "ji",
"鐗": "jian",
"鐘": "zhong",
"鐙": "deng",
"鐚": "ya",
"鐛": "ying",
"鐜": "dui",
"鐝": "jue",
"鐞": "nou",
"鐟": "zan",
"鐠": "pu",
"鐡": "tie",
"鐢": "fan",
"鐣": "cheng",
"鐤": "ding",
"鐥": "shan",
"鐦": "kai",
"鐧": "jian",
"鐨": "fei",
"鐩": "sui",
"鐪": "lu",
"鐫": "juan",
"鐬": "hui",
"鐭": "yu",
"鐮": "lian",
"鐯": "zhuo",
"鐰": "qiao",
"鐱": "jian",
"鐲": "zhuo",
"鐳": "lei",
"鐴": "bi",
"鐵": "tie",
"鐶": "huan",
"鐷": "ye",
"鐸": "duo",
"鐹": "guo",
"鐺": "dang",
"鐻": "ju",
"鐼": "fen",
"鐽": "da",
"鐾": "bei",
"鐿": "yi",
"鑀": "ai",
"鑁": "zong",
"鑂": "xun",
"鑃": "diao",
"鑄": "zhu",
"鑅": "heng",
"鑆": "zhui",
"鑇": "ji",
"鑈": "nie",
"鑉": "he",
"鑊": "huo",
"鑋": "qing",
"鑌": "bin",
"鑍": "ying",
"鑎": "gui",
"鑏": "ning",
"鑐": "xu",
"鑑": "jian",
"鑒": "jian",
"鑓": "qian",
"鑔": "cha",
"鑕": "zhi",
"鑖": "mie",
"鑗": "li",
"鑘": "lei",
"鑙": "ji",
"鑚": "zuan",
"鑛": "kuang",
"鑜": "shang",
"鑝": "peng",
"鑞": "la",
"鑟": "du",
"鑠": "shuo",
"鑡": "chuo",
"鑢": "lv",
"鑣": "biao",
"鑤": "bao",
"鑥": "lu",
"鑦": "xian",
"鑧": "kuan",
"鑨": "long",
"鑩": "e",
"鑪": "lu",
"鑫": "xin",
"鑬": "jian",
"鑭": "lan",
"鑮": "bo",
"鑯": "jian",
"鑰": "yue",
"鑱": "chan",
"鑲": "xiang",
"鑳": "jian",
"鑴": "xi",
"鑵": "guan",
"鑶": "cang",
"鑷": "nie",
"鑸": "lei",
"鑹": "cuan",
"鑺": "qu",
"鑻": "pan",
"鑼": "luo",
"鑽": "zuan",
"鑾": "luan",
"鑿": "zao",
"钀": "nie",
"钁": "jue",
"钂": "tang",
"钃": "zhu",
"钄": "lan",
"钅": "jin",
"钆": "ga",
"钇": "yi",
"针": "zhen",
"钉": "ding",
"钊": "zhao",
"钋": "po",
"钌": "liao",
"钍": "tu",
"钎": "qian",
"钏": "chuan",
"钐": "shan",
"钑": "sa",
"钒": "fan",
"钓": "diao",
"钔": "men",
"钕": "nv",
"钖": "yang",
"钗": "chai",
"钘": "xing",
"钙": "gai",
"钚": "bu",
"钛": "tai",
"钜": "ju",
"钝": "dun",
"钞": "chao",
"钟": "zhong",
"钠": "na",
"钡": "bei",
"钢": "gang",
"钣": "ban",
"钤": "qian",
"钥": "yue",
"钦": "qin",
"钧": "jun",
"钨": "wu",
"钩": "gou",
"钪": "kang",
"钫": "fang",
"钬": "huo",
"钭": "dou",
"钮": "niu",
"钯": "ba",
"钰": "yu",
"钱": "qian",
"钲": "zheng",
"钳": "qian",
"钴": "gu",
"钵": "bo",
"钶": "ke",
"钷": "po",
"钸": "bu",
"钹": "bo",
"钺": "yue",
"钻": "zuan",
"钼": "mu",
"钽": "tan",
"钾": "jia",
"钿": "dian",
"铀": "you",
"铁": "tie",
"铂": "bo",
"铃": "ling",
"铄": "shuo",
"铅": "qian",
"铆": "mao",
"铇": "bao",
"铈": "shi",
"铉": "xuan",
"铊": "ta",
"铋": "bi",
"铌": "ni",
"铍": "pi",
"铎": "duo",
"铏": "xing",
"铐": "kao",
"铑": "lao",
"铒": "er",
"铓": "mang",
"铔": "ya",
"铕": "you",
"铖": "cheng",
"铗": "jia",
"铘": "ye",
"铙": "nao",
"铚": "zhi",
"铛": "dang",
"铜": "tong",
"铝": "lv",
"铞": "diao",
"铟": "yin",
"铠": "kai",
"铡": "zha",
"铢": "zhu",
"铣": "xian",
"铤": "ting",
"铥": "diu",
"铦": "xian",
"铧": "hua",
"铨": "quan",
"铩": "sha",
"铪": "ha",
"铫": "diao",
"铬": "ge",
"铭": "ming",
"铮": "zheng",
"铯": "se",
"铰": "jiao",
"铱": "yi",
"铲": "chan",
"铳": "chong",
"铴": "tang",
"铵": "an",
"银": "yin",
"铷": "ru",
"铸": "zhu",
"铹": "lao",
"铺": "pu",
"铻": "wu",
"铼": "lai",
"铽": "te",
"链": "lian",
"铿": "keng",
"销": "xiao",
"锁": "suo",
"锂": "li",
"锃": "zeng",
"锄": "chu",
"锅": "guo",
"锆": "gao",
"锇": "e",
"锈": "xiu",
"锉": "cuo",
"锊": "lve",
"锋": "feng",
"锌": "xin",
"锍": "liu",
"锎": "kai",
"锏": "jian",
"锐": "rui",
"锑": "ti",
"锒": "lang",
"锓": "qin",
"锔": "ju",
"锕": "a",
"锖": "qiang",
"锗": "zhe",
"锘": "nuo",
"错": "cuo",
"锚": "mao",
"锛": "ben",
"锜": "qi",
"锝": "de",
"锞": "ke",
"锟": "kun",
"锠": "chang",
"锡": "xi",
"锢": "gu",
"锣": "luo",
"锤": "chui",
"锥": "zhui",
"锦": "jin",
"锧": "zhi",
"锨": "xian",
"锩": "juan",
"锪": "huo",
"锫": "pei",
"锬": "tan",
"锭": "ding",
"键": "jian",
"锯": "ju",
"锰": "meng",
"锱": "zi",
"锲": "qie",
"锳": "ying",
"锴": "kai",
"锵": "qiang",
"锶": "si",
"锷": "e",
"锸": "cha",
"锹": "qiao",
"锺": "zhong",
"锻": "duan",
"锼": "sou",
"锽": "huang",
"锾": "huan",
"锿": "ai",
"镀": "du",
"镁": "mei",
"镂": "lou",
"镃": "zi",
"镄": "fei",
"镅": "mei",
"镆": "mo",
"镇": "zhen",
"镈": "bo",
"镉": "ge",
"镊": "nie",
"镋": "tang",
"镌": "juan",
"镍": "nie",
"镎": "na",
"镏": "liu",
"镐": "gao",
"镑": "bang",
"镒": "yi",
"镓": "jia",
"镔": "bin",
"镕": "rong",
"镖": "biao",
"镗": "tang",
"镘": "man",
"镙": "luo",
"镚": "beng",
"镛": "yong",
"镜": "jing",
"镝": "di",
"镞": "zu",
"镟": "xuan",
"镠": "liu",
"镡": "xin",
"镢": "jue",
"镣": "liao",
"镤": "pu",
"镥": "lu",
"镦": "dui",
"镧": "lan",
"镨": "pu",
"镩": "cuan",
"镪": "qiang",
"镫": "deng",
"镬": "huo",
"镭": "lei",
"镮": "huan",
"镯": "zhuo",
"镰": "lian",
"镱": "yi",
"镲": "cha",
"镳": "biao",
"镴": "la",
"镵": "chan",
"镶": "xiang",
"長": "chang",
"镸": "chang",
"镹": "jiu",
"镺": "ao",
"镻": "die",
"镼": "jie",
"镽": "liao",
"镾": "mi",
"长": "chang",
"門": "men",
"閁": "ma",
"閂": "shuan",
"閃": "shan",
"閄": "huo",
"閅": "men",
"閆": "yan",
"閇": "bi",
"閈": "han",
"閉": "bi",
"閊": "shan",
"開": "kai",
"閌": "kang",
"閍": "beng",
"閎": "hong",
"閏": "run",
"閐": "san",
"閑": "xian",
"閒": "xian",
"間": "jian",
"閔": "min",
"閕": "xia",
"閖": "shui",
"閗": "dou",
"閘": "zha",
"閙": "nao",
"閚": "zhan",
"閛": "peng",
"閜": "xia",
"閝": "ling",
"閞": "bian",
"閟": "bi",
"閠": "run",
"閡": "he",
"関": "guan",
"閣": "ge",
"閤": "he",
"閥": "fa",
"閦": "chu",
"閧": "hong",
"閨": "gui",
"閩": "min",
"閪": "se",
"閫": "kun",
"閬": "lang",
"閭": "lv",
"閮": "ting",
"閯": "sha",
"閰": "ju",
"閱": "yue",
"閲": "yue",
"閳": "chan",
"閴": "qu",
"閵": "lin",
"閶": "chang",
"閷": "sha",
"閸": "kun",
"閹": "yan",
"閺": "wen",
"閻": "yan",
"閼": "e",
"閽": "hun",
"閾": "yu",
"閿": "wen",
"闀": "hong",
"闁": "bao",
"闂": "hong",
"闃": "qu",
"闄": "yao",
"闅": "wen",
"闆": "ban",
"闇": "an",
"闈": "wei",
"闉": "yin",
"闊": "kuo",
"闋": "que",
"闌": "lan",
"闍": "du",
"闎": "quan",
"闐": "tian",
"闑": "nie",
"闒": "ta",
"闓": "kai",
"闔": "he",
"闕": "que",
"闖": "chuang",
"闗": "guan",
"闘": "dou",
"闙": "qi",
"闚": "kui",
"闛": "tang",
"關": "guan",
"闝": "piao",
"闞": "kan",
"闟": "xi",
"闠": "hui",
"闡": "chan",
"闢": "pi",
"闣": "dang",
"闤": "huan",
"闥": "ta",
"闦": "wen",
"闧": "ta",
"门": "men",
"闩": "shuan",
"闪": "shan",
"闫": "yan",
"闬": "han",
"闭": "bi",
"问": "wen",
"闯": "chuang",
"闰": "run",
"闱": "wei",
"闲": "xian",
"闳": "hong",
"间": "jian",
"闵": "min",
"闶": "kang",
"闷": "men",
"闸": "zha",
"闹": "nao",
"闺": "gui",
"闻": "wen",
"闼": "ta",
"闽": "min",
"闾": "lv",
"闿": "kai",
"阀": "fa",
"阁": "ge",
"阂": "he",
"阃": "kun",
"阄": "jiu",
"阅": "yue",
"阆": "lang",
"阇": "du",
"阈": "yu",
"阉": "yan",
"阊": "chang",
"阋": "xi",
"阌": "wen",
"阍": "hun",
"阎": "yan",
"阏": "e",
"阐": "chan",
"阑": "lan",
"阒": "qu",
"阓": "hui",
"阔": "kuo",
"阕": "que",
"阖": "he",
"阗": "tian",
"阘": "ta",
"阙": "que",
"阚": "kan",
"阛": "huan",
"阜": "fu",
"阝": "fu",
"阞": "le",
"队": "dui",
"阠": "xin",
"阡": "qian",
"阢": "wu",
"阣": "yi",
"阤": "tuo",
"阥": "yin",
"阦": "yang",
"阧": "dou",
"阨": "e",
"阩": "sheng",
"阪": "ban",
"阫": "pei",
"阬": "keng",
"阭": "yun",
"阮": "ruan",
"阯": "zhi",
"阰": "pi",
"阱": "jing",
"防": "fang",
"阳": "yang",
"阴": "yin",
"阵": "zhen",
"阶": "jie",
"阷": "cheng",
"阸": "e",
"阹": "qu",
"阺": "di",
"阻": "zu",
"阼": "zuo",
"阽": "dian",
"阾": "lin",
"阿": "a",
"陀": "tuo",
"陁": "tuo",
"陂": "bei",
"陃": "bing",
"附": "fu",
"际": "ji",
"陆": "lu",
"陇": "long",
"陈": "chen",
"陉": "xing",
"陊": "duo",
"陋": "lou",
"陌": "mo",
"降": "jiang",
"陎": "shu",
"陏": "duo",
"限": "xian",
"陑": "er",
"陒": "gui",
"陓": "yu",
"陔": "gai",
"陕": "shan",
"陖": "jun",
"陗": "qiao",
"陘": "xing",
"陙": "chun",
"陚": "wu",
"陛": "bi",
"陜": "xia",
"陝": "shan",
"陞": "sheng",
"陟": "zhi",
"陠": "pu",
"陡": "dou",
"院": "yuan",
"陣": "zhen",
"除": "chu",
"陥": "xian",
"陦": "dao",
"陧": "nie",
"陨": "yun",
"险": "xian",
"陪": "pei",
"陫": "fei",
"陬": "zou",
"陭": "qi",
"陮": "dui",
"陯": "lun",
"陰": "yin",
"陱": "ju",
"陲": "chui",
"陳": "chen",
"陴": "pi",
"陵": "ling",
"陶": "tao",
"陷": "xian",
"陸": "lu",
"陹": "sheng",
"険": "xian",
"陻": "yin",
"陼": "zhu",
"陽": "yang",
"陾": "reng",
"陿": "xia",
"隀": "chong",
"隁": "yan",
"隂": "yin",
"隃": "yu",
"隄": "di",
"隅": "yu",
"隆": "long",
"隇": "wei",
"隈": "wei",
"隉": "nie",
"隊": "dui",
"隋": "sui",
"隌": "an",
"隍": "huang",
"階": "jie",
"随": "sui",
"隐": "yin",
"隑": "qi",
"隒": "yan",
"隓": "hui",
"隔": "ge",
"隕": "yun",
"隖": "wu",
"隗": "wei",
"隘": "ai",
"隙": "xi",
"隚": "tang",
"際": "ji",
"障": "zhang",
"隝": "dao",
"隞": "ao",
"隟": "xi",
"隠": "yin",
"隡": "sa",
"隢": "rao",
"隣": "lin",
"隤": "tui",
"隥": "deng",
"隦": "pi",
"隧": "sui",
"隨": "sui",
"隩": "ao",
"險": "xian",
"隫": "fen",
"隬": "ni",
"隭": "er",
"隮": "ji",
"隯": "dao",
"隰": "xi",
"隱": "yin",
"隲": "zhi",
"隳": "hui",
"隴": "long",
"隵": "xi",
"隶": "li",
"隷": "li",
"隸": "li",
"隹": "zhui",
"隺": "hu",
"隻": "zhi",
"隼": "sun",
"隽": "jun",
"难": "nan",
"隿": "yi",
"雀": "que",
"雁": "yan",
"雂": "qin",
"雃": "jian",
"雄": "xiong",
"雅": "ya",
"集": "ji",
"雇": "gu",
"雈": "huan",
"雉": "zhi",
"雊": "gou",
"雋": "jun",
"雌": "ci",
"雍": "yong",
"雎": "ju",
"雏": "chu",
"雐": "hu",
"雑": "za",
"雒": "luo",
"雓": "yu",
"雔": "chou",
"雕": "diao",
"雖": "sui",
"雗": "han",
"雘": "huo",
"雙": "shuang",
"雚": "guan",
"雛": "chu",
"雜": "za",
"雝": "yong",
"雞": "ji",
"雟": "gui",
"雠": "chou",
"雡": "liu",
"離": "li",
"難": "nan",
"雤": "yu",
"雥": "za",
"雦": "chou",
"雧": "ji",
"雨": "yu",
"雩": "yu",
"雪": "xue",
"雫": "na",
"雬": "fou",
"雭": "se",
"雮": "mu",
"雯": "wen",
"雰": "fen",
"雱": "pang",
"雲": "yun",
"雳": "li",
"雴": "chi",
"雵": "yang",
"零": "ling",
"雷": "lei",
"雸": "an",
"雹": "bao",
"雺": "wu",
"電": "dian",
"雼": "dang",
"雽": "hu",
"雾": "wu",
"雿": "diao",
"需": "xu",
"霁": "ji",
"霂": "mu",
"霃": "chen",
"霄": "xiao",
"霅": "zha",
"霆": "ting",
"震": "zhen",
"霈": "pei",
"霉": "mei",
"霊": "ling",
"霋": "qi",
"霌": "zhou",
"霍": "huo",
"霎": "sha",
"霏": "fei",
"霐": "hong",
"霑": "zhan",
"霒": "yin",
"霓": "ni",
"霔": "shu",
"霕": "tun",
"霖": "lin",
"霗": "ling",
"霘": "dong",
"霙": "ying",
"霚": "wu",
"霛": "ling",
"霜": "shuang",
"霝": "ling",
"霞": "xia",
"霟": "hong",
"霠": "yin",
"霡": "mai",
"霢": "mai",
"霣": "yun",
"霤": "liu",
"霥": "meng",
"霦": "bin",
"霧": "wu",
"霨": "wei",
"霩": "kuo",
"霪": "yin",
"霫": "xi",
"霬": "yi",
"霭": "ai",
"霮": "dan",
"霯": "teng",
"霰": "xian",
"霱": "yu",
"露": "lou",
"霳": "long",
"霴": "dai",
"霵": "ji",
"霶": "pang",
"霷": "yang",
"霸": "ba",
"霹": "pi",
"霺": "wei",
"霻": "feng",
"霼": "xi",
"霽": "ji",
"霾": "mai",
"霿": "meng",
"靀": "meng",
"靁": "lei",
"靂": "li",
"靃": "huo",
"靄": "ai",
"靅": "fei",
"靆": "dai",
"靇": "long",
"靈": "ling",
"靉": "ai",
"靊": "feng",
"靋": "li",
"靌": "bao",
"靍": "he",
"靎": "he",
"靏": "he",
"靐": "bing",
"靑": "qing",
"青": "qing",
"靓": "jing",
"靔": "tian",
"靕": "zheng",
"靖": "jing",
"靗": "cheng",
"靘": "qing",
"静": "jing",
"靚": "jing",
"靛": "dian",
"靜": "jing",
"靝": "tian",
"非": "fei",
"靟": "fei",
"靠": "kao",
"靡": "mi",
"面": "mian",
"靣": "mian",
"靤": "pao",
"靥": "ye",
"靦": "mian",
"靧": "hui",
"靨": "ye",
"革": "ge",
"靪": "ding",
"靫": "cha",
"靬": "jian",
"靭": "ren",
"靮": "di",
"靯": "du",
"靰": "wu",
"靱": "ren",
"靲": "qin",
"靳": "jin",
"靴": "xue",
"靵": "niu",
"靶": "ba",
"靷": "yin",
"靸": "sa",
"靹": "na",
"靺": "mo",
"靻": "zu",
"靼": "da",
"靽": "ban",
"靾": "xie",
"靿": "yao",
"鞀": "tao",
"鞁": "bei",
"鞂": "jie",
"鞃": "hong",
"鞄": "pao",
"鞅": "yang",
"鞆": "bing",
"鞇": "yin",
"鞈": "ge",
"鞉": "tao",
"鞊": "jie",
"鞋": "xie",
"鞌": "an",
"鞍": "an",
"鞎": "hen",
"鞏": "gong",
"鞐": "qia",
"鞑": "da",
"鞒": "qiao",
"鞓": "ting",
"鞔": "man",
"鞕": "bian",
"鞖": "sui",
"鞗": "tiao",
"鞘": "qiao",
"鞙": "xuan",
"鞚": "kong",
"鞛": "beng",
"鞜": "ta",
"鞝": "shang",
"鞞": "bing",
"鞟": "kuo",
"鞠": "ju",
"鞡": "la",
"鞢": "xie",
"鞣": "rou",
"鞤": "bang",
"鞥": "eng",
"鞦": "qiu",
"鞧": "qiu",
"鞨": "he",
"鞩": "qiao",
"鞪": "mu",
"鞫": "ju",
"鞬": "jian",
"鞭": "bian",
"鞮": "di",
"鞯": "jian",
"鞰": "wen",
"鞱": "tao",
"鞲": "gou",
"鞳": "ta",
"鞴": "bei",
"鞵": "xie",
"鞶": "pan",
"鞷": "ge",
"鞸": "bi",
"鞹": "kuo",
"鞺": "tang",
"鞻": "lou",
"鞼": "gui",
"鞽": "qiao",
"鞾": "xue",
"鞿": "ji",
"韀": "jian",
"韁": "jiang",
"韂": "chan",
"韃": "da",
"韄": "huo",
"韅": "xian",
"韆": "qian",
"韇": "du",
"韈": "wa",
"韉": "jian",
"韊": "lan",
"韋": "wei",
"韌": "ren",
"韍": "fu",
"韎": "mei",
"韏": "quan",
"韐": "ge",
"韑": "wei",
"韒": "qiao",
"韓": "han",
"韔": "chang",
"韕": "kuo",
"韖": "rou",
"韗": "yun",
"韘": "she",
"韙": "wei",
"韚": "ge",
"韛": "bai",
"韜": "tao",
"韝": "gou",
"韞": "yun",
"韟": "gao",
"韠": "bi",
"韡": "wei",
"韢": "sui",
"韣": "du",
"韤": "wa",
"韥": "du",
"韦": "wei",
"韧": "ren",
"韨": "fu",
"韩": "han",
"韪": "wei",
"韫": "yun",
"韬": "tao",
"韭": "jiu",
"韮": "jiu",
"韯": "xian",
"韰": "xie",
"韱": "xian",
"韲": "ji",
"音": "yin",
"韴": "za",
"韵": "yun",
"韶": "shao",
"韷": "le",
"韸": "peng",
"韹": "huang",
"韺": "ying",
"韻": "yun",
"韼": "peng",
"韽": "an",
"韾": "yin",
"響": "xiang",
"頀": "hu",
"頁": "ye",
"頂": "ding",
"頃": "qing",
"頄": "qiu",
"項": "xiang",
"順": "shun",
"頇": "han",
"須": "xu",
"頉": "yi",
"頊": "xu",
"頋": "e",
"頌": "song",
"頍": "kui",
"頎": "qi",
"頏": "hang",
"預": "yu",
"頑": "wan",
"頒": "ban",
"頓": "dun",
"頔": "di",
"頕": "dan",
"頖": "pan",
"頗": "po",
"領": "ling",
"頙": "che",
"頚": "jing",
"頛": "lei",
"頜": "he",
"頝": "qiao",
"頞": "e",
"頟": "e",
"頠": "wei",
"頡": "jie",
"頢": "kuo",
"頣": "shen",
"頤": "yi",
"頥": "yi",
"頦": "ke",
"頧": "dui",
"頨": "yu",
"頩": "ping",
"頪": "lei",
"頫": "fu",
"頬": "jia",
"頭": "tou",
"頮": "hui",
"頯": "kui",
"頰": "jia",
"頱": "luo",
"頲": "ting",
"頳": "cheng",
"頴": "ying",
"頵": "jun",
"頶": "hu",
"頷": "han",
"頸": "jing",
"頹": "tui",
"頺": "tui",
"頻": "bin",
"頼": "lai",
"頽": "tui",
"頾": "zi",
"頿": "zi",
"顀": "chui",
"顁": "ding",
"顂": "lai",
"顃": "tan",
"顄": "han",
"顅": "qian",
"顆": "ke",
"顇": "cui",
"顈": "jiong",
"顉": "qin",
"顊": "yi",
"顋": "sai",
"題": "ti",
"額": "e",
"顎": "e",
"顏": "yan",
"顐": "wen",
"顑": "kan",
"顒": "yong",
"顓": "zhuan",
"顔": "yan",
"顕": "xian",
"顖": "xin",
"顗": "yi",
"願": "yuan",
"顙": "sang",
"顚": "dian",
"顛": "dian",
"顜": "jiang",
"顝": "kui",
"類": "lei",
"顟": "lao",
"顠": "piao",
"顡": "wai",
"顢": "man",
"顣": "cu",
"顤": "yao",
"顥": "hao",
"顦": "qiao",
"顧": "gu",
"顨": "xun",
"顩": "yan",
"顪": "hui",
"顫": "chan",
"顬": "ru",
"顭": "meng",
"顮": "bin",
"顯": "xian",
"顰": "pin",
"顱": "lu",
"顲": "lan",
"顳": "nie",
"顴": "quan",
"页": "ye",
"顶": "ding",
"顷": "qing",
"顸": "han",
"项": "xiang",
"顺": "shun",
"须": "xu",
"顼": "xu",
"顽": "wan",
"顾": "gu",
"顿": "dun",
"颀": "qi",
"颁": "ban",
"颂": "song",
"颃": "hang",
"预": "yu",
"颅": "lu",
"领": "ling",
"颇": "po",
"颈": "jing",
"颉": "jie",
"颊": "jia",
"颋": "ting",
"颌": "he",
"颍": "ying",
"颎": "jiong",
"颏": "ke",
"颐": "yi",
"频": "pin",
"颒": "hui",
"颓": "tui",
"颔": "han",
"颕": "ying",
"颖": "ying",
"颗": "ke",
"题": "ti",
"颙": "yong",
"颚": "e",
"颛": "zhuan",
"颜": "yan",
"额": "e",
"颞": "nie",
"颟": "man",
"颠": "dian",
"颡": "sang",
"颢": "hao",
"颣": "lei",
"颤": "chan",
"颥": "ru",
"颦": "pin",
"颧": "quan",
"風": "feng",
"颩": "biao",
"颪": "gua",
"颫": "fu",
"颬": "xia",
"颭": "zhan",
"颮": "biao",
"颯": "sa",
"颰": "ba",
"颱": "tai",
"颲": "lie",
"颳": "gua",
"颴": "xuan",
"颵": "xiao",
"颶": "ju",
"颷": "biao",
"颸": "si",
"颹": "wei",
"颺": "yang",
"颻": "yao",
"颼": "sou",
"颽": "kai",
"颾": "sao",
"颿": "fan",
"飀": "liu",
"飁": "xi",
"飂": "liu",
"飃": "piao",
"飄": "piao",
"飅": "liu",
"飆": "biao",
"飇": "biao",
"飈": "biao",
"飉": "liao",
"飊": "biao",
"飋": "se",
"飌": "feng",
"飍": "xiu",
"风": "feng",
"飏": "yang",
"飐": "zhan",
"飑": "biao",
"飒": "sa",
"飓": "ju",
"飔": "si",
"飕": "sou",
"飖": "yao",
"飗": "liu",
"飘": "piao",
"飙": "biao",
"飚": "biao",
"飛": "fei",
"飜": "fan",
"飝": "fei",
"飞": "fei",
"食": "shi",
"飠": "shi",
"飡": "can",
"飢": "ji",
"飣": "ding",
"飤": "si",
"飥": "tuo",
"飦": "zhan",
"飧": "sun",
"飨": "xiang",
"飩": "tun",
"飪": "ren",
"飫": "yu",
"飬": "yang",
"飭": "chi",
"飮": "yin",
"飯": "fan",
"飰": "fan",
"飱": "sun",
"飲": "yin",
"飳": "zhu",
"飴": "yi",
"飵": "zuo",
"飶": "bi",
"飷": "jie",
"飸": "tao",
"飹": "bao",
"飺": "ci",
"飻": "tie",
"飼": "si",
"飽": "bao",
"飾": "shi",
"飿": "duo",
"餀": "hai",
"餁": "ren",
"餂": "tian",
"餃": "jiao",
"餄": "he",
"餅": "bing",
"餆": "yao",
"餇": "tong",
"餈": "ci",
"餉": "xiang",
"養": "yang",
"餋": "juan",
"餌": "er",
"餍": "yan",
"餎": "le",
"餏": "xi",
"餐": "can",
"餑": "bo",
"餒": "nei",
"餓": "e",
"餔": "bu",
"餕": "jun",
"餖": "dou",
"餗": "su",
"餘": "yu",
"餙": "shi",
"餚": "yao",
"餛": "hun",
"餜": "guo",
"餝": "shi",
"餞": "jian",
"餟": "chuo",
"餠": "bing",
"餡": "xian",
"餢": "bu",
"餣": "ye",
"餤": "dan",
"餥": "fei",
"餦": "zhang",
"餧": "wei",
"館": "guan",
"餩": "e",
"餪": "nuan",
"餫": "yun",
"餬": "hu",
"餭": "huang",
"餮": "tie",
"餯": "hui",
"餰": "jian",
"餱": "hou",
"餲": "ai",
"餳": "xing",
"餴": "fen",
"餵": "wei",
"餶": "gu",
"餷": "cha",
"餸": "song",
"餹": "tang",
"餺": "bo",
"餻": "gao",
"餼": "xi",
"餽": "kui",
"餾": "liu",
"餿": "sou",
"饀": "tao",
"饁": "ye",
"饂": "wen",
"饃": "mo",
"饄": "tang",
"饅": "man",
"饆": "bi",
"饇": "yu",
"饈": "xiu",
"饉": "jin",
"饊": "san",
"饋": "kui",
"饌": "zhuan",
"饍": "shan",
"饎": "xi",
"饏": "dan",
"饐": "yi",
"饑": "ji",
"饒": "rao",
"饓": "cheng",
"饔": "yong",
"饕": "tao",
"饖": "wei",
"饗": "xiang",
"饘": "zhan",
"饙": "fen",
"饚": "hai",
"饛": "meng",
"饜": "yan",
"饝": "mo",
"饞": "chan",
"饟": "xiang",
"饠": "luo",
"饡": "zan",
"饢": "nang",
"饣": "shi",
"饤": "ding",
"饥": "ji",
"饦": "tuo",
"饧": "xing",
"饨": "tun",
"饩": "xi",
"饪": "ren",
"饫": "yu",
"饬": "chi",
"饭": "fan",
"饮": "yin",
"饯": "jian",
"饰": "shi",
"饱": "bao",
"饲": "si",
"饳": "duo",
"饴": "yi",
"饵": "er",
"饶": "rao",
"饷": "xiang",
"饸": "he",
"饹": "ge",
"饺": "jiao",
"饻": "xi",
"饼": "bing",
"饽": "bo",
"饾": "dou",
"饿": "e",
"馀": "yu",
"馁": "nei",
"馂": "jun",
"馃": "guo",
"馄": "hun",
"馅": "xian",
"馆": "guan",
"馇": "cha",
"馈": "kui",
"馉": "gu",
"馊": "sou",
"馋": "chan",
"馌": "ye",
"馍": "mo",
"馎": "bo",
"馏": "liu",
"馐": "xiu",
"馑": "jin",
"馒": "man",
"馓": "san",
"馔": "zhuan",
"馕": "nang",
"首": "shou",
"馗": "kui",
"馘": "guo",
"香": "xiang",
"馚": "fen",
"馛": "bo",
"馜": "ni",
"馝": "bi",
"馞": "bo",
"馟": "tu",
"馠": "han",
"馡": "fei",
"馢": "jian",
"馣": "an",
"馤": "ai",
"馥": "fu",
"馦": "xian",
"馧": "yun",
"馨": "xin",
"馩": "fen",
"馪": "pin",
"馫": "xin",
"馬": "ma",
"馭": "yu",
"馮": "feng",
"馯": "han",
"馰": "di",
"馱": "tuo",
"馲": "tuo",
"馳": "chi",
"馴": "xun",
"馵": "zhu",
"馶": "zhi",
"馷": "pei",
"馸": "xin",
"馹": "ri",
"馺": "sa",
"馻": "yun",
"馼": "wen",
"馽": "zhi",
"馾": "dan",
"馿": "lu",
"駀": "you",
"駁": "bo",
"駂": "bao",
"駃": "jue",
"駄": "tuo",
"駅": "yi",
"駆": "qu",
"駇": "wen",
"駈": "qu",
"駉": "jiong",
"駊": "po",
"駋": "zhao",
"駌": "yuan",
"駍": "peng",
"駎": "zhou",
"駏": "ju",
"駐": "zhu",
"駑": "nu",
"駒": "ju",
"駓": "pi",
"駔": "zang",
"駕": "jia",
"駖": "ling",
"駗": "zhen",
"駘": "tai",
"駙": "fu",
"駚": "yang",
"駛": "shi",
"駜": "bi",
"駝": "tuo",
"駞": "tuo",
"駟": "si",
"駠": "liu",
"駡": "ma",
"駢": "pian",
"駣": "tao",
"駤": "zhi",
"駥": "rong",
"駦": "teng",
"駧": "dong",
"駨": "xun",
"駩": "quan",
"駪": "shen",
"駫": "jiong",
"駬": "er",
"駭": "hai",
"駮": "bo",
"駯": "zhu",
"駰": "yin",
"駱": "luo",
"駲": "zhou",
"駳": "dan",
"駴": "hai",
"駵": "liu",
"駶": "ju",
"駷": "song",
"駸": "qin",
"駹": "mang",
"駺": "liang",
"駻": "han",
"駼": "tu",
"駽": "xuan",
"駾": "tui",
"駿": "jun",
"騀": "e",
"騁": "cheng",
"騂": "xing",
"騃": "si",
"騄": "lu",
"騅": "zhui",
"騆": "zhou",
"騇": "she",
"騈": "pian",
"騉": "kun",
"騊": "tao",
"騋": "lai",
"騌": "zong",
"騍": "ke",
"騎": "qi",
"騏": "qi",
"騐": "yan",
"騑": "fei",
"騒": "sao",
"験": "yan",
"騔": "ge",
"騕": "yao",
"騖": "wu",
"騗": "pian",
"騘": "cong",
"騙": "pian",
"騚": "qian",
"騛": "fei",
"騜": "huang",
"騝": "qian",
"騞": "huo",
"騟": "yu",
"騠": "ti",
"騡": "quan",
"騢": "xia",
"騣": "zong",
"騤": "kui",
"騥": "rou",
"騦": "si",
"騧": "gua",
"騨": "tuo",
"騩": "gui",
"騪": "sou",
"騫": "qian",
"騬": "cheng",
"騭": "zhi",
"騮": "liu",
"騯": "peng",
"騰": "teng",
"騱": "xi",
"騲": "cao",
"騳": "du",
"騴": "yan",
"騵": "yuan",
"騶": "zou",
"騷": "sao",
"騸": "shan",
"騹": "qi",
"騺": "zhi",
"騻": "shuang",
"騼": "lu",
"騽": "xi",
"騾": "luo",
"騿": "zhang",
"驀": "mo",
"驁": "ao",
"驂": "can",
"驃": "piao",
"驄": "cong",
"驅": "qu",
"驆": "bi",
"驇": "zhi",
"驈": "yu",
"驉": "xu",
"驊": "hua",
"驋": "bo",
"驌": "su",
"驍": "xiao",
"驎": "lin",
"驏": "zhan",
"驐": "dun",
"驑": "liu",
"驒": "tuo",
"驓": "ceng",
"驔": "dian",
"驕": "jiao",
"驖": "tie",
"驗": "yan",
"驘": "luo",
"驙": "zhan",
"驚": "jing",
"驛": "yi",
"驜": "ye",
"驝": "tuo",
"驞": "pin",
"驟": "zhou",
"驠": "yan",
"驡": "long",
"驢": "lv",
"驣": "teng",
"驤": "xiang",
"驥": "ji",
"驦": "shuang",
"驧": "ju",
"驨": "xi",
"驩": "huan",
"驪": "li",
"驫": "biao",
"马": "ma",
"驭": "yu",
"驮": "tuo",
"驯": "xun",
"驰": "chi",
"驱": "qu",
"驲": "ri",
"驳": "bo",
"驴": "lv",
"驵": "zang",
"驶": "shi",
"驷": "si",
"驸": "fu",
"驹": "ju",
"驺": "zou",
"驻": "zhu",
"驼": "tuo",
"驽": "nu",
"驾": "jia",
"驿": "yi",
"骀": "tai",
"骁": "xiao",
"骂": "ma",
"骃": "yin",
"骄": "jiao",
"骅": "hua",
"骆": "luo",
"骇": "hai",
"骈": "pian",
"骉": "biao",
"骊": "li",
"骋": "cheng",
"验": "yan",
"骍": "xing",
"骎": "qin",
"骏": "jun",
"骐": "qi",
"骑": "qi",
"骒": "ke",
"骓": "zhui",
"骔": "zong",
"骕": "su",
"骖": "can",
"骗": "pian",
"骘": "zhi",
"骙": "kui",
"骚": "sao",
"骛": "wu",
"骜": "ao",
"骝": "liu",
"骞": "qian",
"骟": "shan",
"骠": "piao",
"骡": "luo",
"骢": "cong",
"骣": "chan",
"骤": "zhou",
"骥": "ji",
"骦": "shuang",
"骧": "xiang",
"骨": "gu",
"骩": "wei",
"骪": "wei",
"骫": "wei",
"骬": "yu",
"骭": "gan",
"骮": "yi",
"骯": "ang",
"骰": "tou",
"骱": "jie",
"骲": "bao",
"骳": "bei",
"骴": "ci",
"骵": "ti",
"骶": "di",
"骷": "ku",
"骸": "hai",
"骹": "qiao",
"骺": "hou",
"骻": "kua",
"骼": "ge",
"骽": "tui",
"骾": "geng",
"骿": "pian",
"髀": "bi",
"髁": "ke",
"髂": "qia",
"髃": "yu",
"髄": "sui",
"髅": "lou",
"髆": "bo",
"髇": "xiao",
"髈": "bang",
"髉": "bo",
"髊": "ci",
"髋": "kuan",
"髌": "bin",
"髍": "mo",
"髎": "liao",
"髏": "lou",
"髐": "xiao",
"髑": "du",
"髒": "zang",
"髓": "sui",
"體": "ti",
"髕": "bin",
"髖": "kuan",
"髗": "lu",
"高": "gao",
"髙": "gao",
"髚": "qiao",
"髛": "kao",
"髜": "qiao",
"髝": "lao",
"髞": "sao",
"髟": "biao",
"髠": "kun",
"髡": "kun",
"髢": "di",
"髣": "fang",
"髤": "xiu",
"髥": "ran",
"髦": "mao",
"髧": "dan",
"髨": "kun",
"髩": "bin",
"髪": "fa",
"髫": "tiao",
"髬": "pi",
"髭": "zi",
"髮": "fa",
"髯": "ran",
"髰": "ti",
"髱": "bao",
"髲": "bi",
"髳": "mao",
"髴": "fu",
"髵": "er",
"髶": "er",
"髷": "qu",
"髸": "gong",
"髹": "xiu",
"髺": "kuo",
"髻": "ji",
"髼": "peng",
"髽": "zhua",
"髾": "shao",
"髿": "sha",
"鬀": "ti",
"鬁": "li",
"鬂": "bin",
"鬃": "zong",
"鬄": "ti",
"鬅": "peng",
"鬆": "song",
"鬇": "zheng",
"鬈": "quan",
"鬉": "zong",
"鬊": "shun",
"鬋": "jian",
"鬌": "duo",
"鬍": "hu",
"鬎": "la",
"鬏": "jiu",
"鬐": "qi",
"鬑": "lian",
"鬒": "zhen",
"鬓": "bin",
"鬔": "peng",
"鬕": "ma",
"鬖": "san",
"鬗": "man",
"鬘": "man",
"鬙": "seng",
"鬚": "xu",
"鬛": "lie",
"鬜": "qian",
"鬝": "qian",
"鬞": "nong",
"鬟": "huan",
"鬠": "kuo",
"鬡": "ning",
"鬢": "bin",
"鬣": "lie",
"鬤": "rang",
"鬥": "dou",
"鬦": "dou",
"鬧": "nao",
"鬨": "hong",
"鬩": "xi",
"鬪": "dou",
"鬫": "kan",
"鬬": "dou",
"鬭": "dou",
"鬮": "jiu",
"鬯": "chang",
"鬰": "yu",
"鬱": "yu",
"鬲": "ge",
"鬳": "yan",
"鬴": "fu",
"鬵": "zeng",
"鬶": "gui",
"鬷": "zong",
"鬸": "liu",
"鬹": "gui",
"鬺": "shang",
"鬻": "yu",
"鬼": "gui",
"鬽": "mei",
"鬾": "ji",
"鬿": "qi",
"魀": "ga",
"魁": "kui",
"魂": "hun",
"魃": "ba",
"魄": "po",
"魅": "mei",
"魆": "xu",
"魇": "yan",
"魈": "xiao",
"魉": "liang",
"魊": "yu",
"魋": "tui",
"魌": "qi",
"魍": "wang",
"魎": "liang",
"魏": "wei",
"魐": "gan",
"魑": "chi",
"魒": "piao",
"魓": "bi",
"魔": "mo",
"魕": "ji",
"魖": "xu",
"魗": "chou",
"魘": "yan",
"魙": "zhan",
"魚": "yu",
"魛": "dao",
"魜": "ren",
"魝": "ji",
"魞": "ba",
"魟": "hong",
"魠": "tuo",
"魡": "diao",
"魢": "ji",
"魣": "yu",
"魤": "e",
"魥": "ji",
"魦": "sha",
"魧": "hang",
"魨": "tun",
"魩": "mo",
"魪": "jie",
"魫": "shen",
"魬": "ban",
"魭": "yuan",
"魮": "pi",
"魯": "lu",
"魰": "wen",
"魱": "hu",
"魲": "lu",
"魳": "za",
"魴": "fang",
"魵": "fen",
"魶": "na",
"魷": "you",
"魸": "pian",
"魹": "mo",
"魺": "he",
"魻": "xia",
"魼": "qu",
"魽": "han",
"魾": "pi",
"魿": "ling",
"鮀": "tuo",
"鮁": "ba",
"鮂": "qiu",
"鮃": "ping",
"鮄": "fu",
"鮅": "bi",
"鮆": "ci",
"鮇": "wei",
"鮈": "ju",
"鮉": "diao",
"鮊": "bo",
"鮋": "you",
"鮌": "gun",
"鮍": "pi",
"鮎": "nian",
"鮏": "xing",
"鮐": "tai",
"鮑": "bao",
"鮒": "fu",
"鮓": "zha",
"鮔": "ju",
"鮕": "gu",
"鮖": "shi",
"鮗": "dong",
"鮘": "chou",
"鮙": "ta",
"鮚": "jie",
"鮛": "shu",
"鮜": "hou",
"鮝": "xiang",
"鮞": "er",
"鮟": "an",
"鮠": "wei",
"鮡": "zhao",
"鮢": "zhu",
"鮣": "yin",
"鮤": "lie",
"鮥": "luo",
"鮦": "tong",
"鮧": "yi",
"鮨": "yi",
"鮩": "bing",
"鮪": "wei",
"鮫": "jiao",
"鮬": "ku",
"鮭": "gui",
"鮮": "xian",
"鮯": "ge",
"鮰": "hui",
"鮱": "lao",
"鮲": "fu",
"鮳": "kao",
"鮴": "xiu",
"鮵": "tuo",
"鮶": "jun",
"鮷": "ti",
"鮸": "mian",
"鮹": "shao",
"鮺": "zha",
"鮻": "suo",
"鮼": "qin",
"鮽": "yu",
"鮾": "nei",
"鮿": "zhe",
"鯀": "gun",
"鯁": "geng",
"鯂": "su",
"鯃": "wu",
"鯄": "qiu",
"鯅": "shan",
"鯆": "pu",
"鯇": "huan",
"鯈": "tiao",
"鯉": "li",
"鯊": "sha",
"鯋": "sha",
"鯌": "kao",
"鯍": "meng",
"鯎": "cheng",
"鯏": "li",
"鯐": "zou",
"鯑": "xi",
"鯒": "yong",
"鯓": "shen",
"鯔": "zi",
"鯕": "qi",
"鯖": "qing",
"鯗": "xiang",
"鯘": "nei",
"鯙": "chun",
"鯚": "ji",
"鯛": "diao",
"鯜": "qie",
"鯝": "gu",
"鯞": "zhou",
"鯟": "dong",
"鯠": "lai",
"鯡": "fei",
"鯢": "ni",
"鯣": "yi",
"鯤": "kun",
"鯥": "lu",
"鯦": "jiu",
"鯧": "chang",
"鯨": "jing",
"鯩": "lun",
"鯪": "ling",
"鯫": "zou",
"鯬": "li",
"鯭": "meng",
"鯮": "zong",
"鯯": "zhi",
"鯰": "nian",
"鯱": "hu",
"鯲": "yu",
"鯳": "di",
"鯴": "shi",
"鯵": "shen",
"鯶": "huan",
"鯷": "ti",
"鯸": "hou",
"鯹": "xing",
"鯺": "zhu",
"鯻": "la",
"鯼": "zong",
"鯽": "ji",
"鯾": "bian",
"鯿": "bian",
"鰀": "huan",
"鰁": "quan",
"鰂": "zei",
"鰃": "wei",
"鰄": "wei",
"鰅": "yu",
"鰆": "chun",
"鰇": "rou",
"鰈": "die",
"鰉": "huang",
"鰊": "lian",
"鰋": "yan",
"鰌": "qiu",
"鰍": "qiu",
"鰎": "jian",
"鰏": "bi",
"鰐": "e",
"鰑": "yang",
"鰒": "fu",
"鰓": "sai",
"鰔": "jian",
"鰕": "xia",
"鰖": "tuo",
"鰗": "hu",
"鰘": "shi",
"鰙": "ruo",
"鰚": "xuan",
"鰛": "wen",
"鰜": "jian",
"鰝": "hao",
"鰞": "wu",
"鰟": "pang",
"鰠": "sao",
"鰡": "liu",
"鰢": "ma",
"鰣": "shi",
"鰤": "shi",
"鰥": "guan",
"鰦": "zi",
"鰧": "teng",
"鰨": "ta",
"鰩": "yao",
"鰪": "e",
"鰫": "yong",
"鰬": "qian",
"鰭": "qi",
"鰮": "wen",
"鰯": "ruo",
"鰰": "shen",
"鰱": "lian",
"鰲": "ao",
"鰳": "le",
"鰴": "hui",
"鰵": "min",
"鰶": "ji",
"鰷": "tiao",
"鰸": "qu",
"鰹": "jian",
"鰺": "shen",
"鰻": "man",
"鰼": "xi",
"鰽": "qiu",
"鰾": "piao",
"鰿": "ji",
"鱀": "ji",
"鱁": "zhu",
"鱂": "jiang",
"鱃": "xiu",
"鱄": "zhuan",
"鱅": "yong",
"鱆": "zhang",
"鱇": "kang",
"鱈": "xue",
"鱉": "bie",
"鱊": "yu",
"鱋": "qu",
"鱌": "xiang",
"鱍": "bo",
"鱎": "jiao",
"鱏": "xun",
"鱐": "su",
"鱑": "huang",
"鱒": "zun",
"鱓": "shan",
"鱔": "shan",
"鱕": "fan",
"鱖": "gui",
"鱗": "lin",
"鱘": "xun",
"鱙": "yao",
"鱚": "xi",
"鱛": "zeng",
"鱜": "xiang",
"鱝": "fen",
"鱞": "guan",
"鱟": "hou",
"鱠": "kuai",
"鱡": "zei",
"鱢": "sao",
"鱣": "zhan",
"鱤": "gan",
"鱥": "gui",
"鱦": "ying",
"鱧": "li",
"鱨": "chang",
"鱩": "lei",
"鱪": "shu",
"鱫": "ai",
"鱬": "ru",
"鱭": "ji",
"鱮": "xu",
"鱯": "hu",
"鱰": "shu",
"鱱": "li",
"鱲": "lie",
"鱳": "le",
"鱴": "mie",
"鱵": "zhen",
"鱶": "xiang",
"鱷": "e",
"鱸": "lu",
"鱹": "guan",
"鱺": "li",
"鱻": "xian",
"鱼": "yu",
"鱽": "dao",
"鱾": "ji",
"鱿": "you",
"鲀": "tun",
"鲁": "lu",
"鲂": "fang",
"鲃": "ba",
"鲄": "he",
"鲅": "ba",
"鲆": "ping",
"鲇": "nian",
"鲈": "lu",
"鲉": "you",
"鲊": "zha",
"鲋": "fu",
"鲌": "bo",
"鲍": "bao",
"鲎": "hou",
"鲏": "pi",
"鲐": "tai",
"鲑": "gui",
"鲒": "jie",
"鲓": "kao",
"鲔": "wei",
"鲕": "er",
"鲖": "tong",
"鲗": "zei",
"鲘": "hou",
"鲙": "kuai",
"鲚": "ji",
"鲛": "jiao",
"鲜": "xian",
"鲝": "zha",
"鲞": "xiang",
"鲟": "xun",
"鲠": "geng",
"鲡": "li",
"鲢": "lian",
"鲣": "jian",
"鲤": "li",
"鲥": "shi",
"鲦": "tiao",
"鲧": "gun",
"鲨": "sha",
"鲩": "huan",
"鲪": "jun",
"鲫": "ji",
"鲬": "yong",
"鲭": "qing",
"鲮": "ling",
"鲯": "qi",
"鲰": "zou",
"鲱": "fei",
"鲲": "kun",
"鲳": "chang",
"鲴": "gu",
"鲵": "ni",
"鲶": "nian",
"鲷": "diao",
"鲸": "jing",
"鲹": "shen",
"鲺": "shi",
"鲻": "zi",
"鲼": "fen",
"鲽": "die",
"鲾": "bi",
"鲿": "chang",
"鳀": "ti",
"鳁": "wen",
"鳂": "wei",
"鳃": "sai",
"鳄": "e",
"鳅": "qiu",
"鳆": "fu",
"鳇": "huang",
"鳈": "quan",
"鳉": "jiang",
"鳊": "bian",
"鳋": "sao",
"鳌": "ao",
"鳍": "qi",
"鳎": "ta",
"鳏": "guan",
"鳐": "yao",
"鳑": "pang",
"鳒": "jian",
"鳓": "le",
"鳔": "biao",
"鳕": "xue",
"鳖": "bie",
"鳗": "man",
"鳘": "min",
"鳙": "yong",
"鳚": "wei",
"鳛": "xi",
"鳜": "gui",
"鳝": "shan",
"鳞": "lin",
"鳟": "zun",
"鳠": "hu",
"鳡": "gan",
"鳢": "li",
"鳣": "zhan",
"鳤": "guan",
"鳥": "niao",
"鳦": "yi",
"鳧": "fu",
"鳨": "li",
"鳩": "jiu",
"鳪": "bu",
"鳫": "yan",
"鳬": "fu",
"鳭": "diao",
"鳮": "ji",
"鳯": "feng",
"鳰": "ru",
"鳱": "gan",
"鳲": "shi",
"鳳": "feng",
"鳴": "ming",
"鳵": "bao",
"鳶": "yuan",
"鳷": "zhi",
"鳸": "hu",
"鳹": "qin",
"鳺": "fu",
"鳻": "ban",
"鳼": "wen",
"鳽": "jian",
"鳾": "shi",
"鳿": "yu",
"鴀": "fou",
"鴁": "yao",
"鴂": "jue",
"鴃": "jue",
"鴄": "pi",
"鴅": "huan",
"鴆": "zhen",
"鴇": "bao",
"鴈": "yan",
"鴉": "ya",
"鴊": "zheng",
"鴋": "fang",
"鴌": "feng",
"鴍": "wen",
"鴎": "ou",
"鴏": "dai",
"鴐": "jia",
"鴑": "ru",
"鴒": "ling",
"鴓": "mie",
"鴔": "fu",
"鴕": "tuo",
"鴖": "min",
"鴗": "li",
"鴘": "bian",
"鴙": "zhi",
"鴚": "ge",
"鴛": "yuan",
"鴜": "ci",
"鴝": "qu",
"鴞": "xiao",
"鴟": "chi",
"鴠": "dan",
"鴡": "ju",
"鴢": "yao",
"鴣": "gu",
"鴤": "zhong",
"鴥": "yu",
"鴦": "yang",
"鴧": "yu",
"鴨": "ya",
"鴩": "die",
"鴪": "yu",
"鴫": "tian",
"鴬": "ying",
"鴭": "dui",
"鴮": "wu",
"鴯": "er",
"鴰": "gua",
"鴱": "ai",
"鴲": "zhi",
"鴳": "yan",
"鴴": "heng",
"鴵": "xiao",
"鴶": "jia",
"鴷": "lie",
"鴸": "zhu",
"鴹": "yang",
"鴺": "yi",
"鴻": "hong",
"鴼": "lu",
"鴽": "ru",
"鴾": "mou",
"鴿": "ge",
"鵀": "ren",
"鵁": "jiao",
"鵂": "xiu",
"鵃": "zhou",
"鵄": "chi",
"鵅": "luo",
"鵆": "heng",
"鵇": "nian",
"鵈": "e",
"鵉": "luan",
"鵊": "jia",
"鵋": "ji",
"鵌": "tu",
"鵍": "huan",
"鵎": "tuo",
"鵏": "bu",
"鵐": "wu",
"鵑": "jian",
"鵒": "yu",
"鵓": "bo",
"鵔": "jun",
"鵕": "jun",
"鵖": "bi",
"鵗": "xi",
"鵘": "jun",
"鵙": "ju",
"鵚": "tu",
"鵛": "jing",
"鵜": "ti",
"鵝": "e",
"鵞": "e",
"鵟": "kuang",
"鵠": "hu",
"鵡": "wu",
"鵢": "shen",
"鵣": "lai",
"鵤": "zan",
"鵥": "pan",
"鵦": "lu",
"鵧": "pi",
"鵨": "shu",
"鵩": "fu",
"鵪": "an",
"鵫": "zhuo",
"鵬": "peng",
"鵭": "qin",
"鵮": "qian",
"鵯": "bei",
"鵰": "diao",
"鵱": "lu",
"鵲": "que",
"鵳": "jian",
"鵴": "ju",
"鵵": "tu",
"鵶": "ya",
"鵷": "yuan",
"鵸": "qi",
"鵹": "li",
"鵺": "ye",
"鵻": "zhui",
"鵼": "kong",
"鵽": "duo",
"鵾": "kun",
"鵿": "sheng",
"鶀": "qi",
"鶁": "jing",
"鶂": "yi",
"鶃": "yi",
"鶄": "jing",
"鶅": "zi",
"鶆": "lai",
"鶇": "dong",
"鶈": "qi",
"鶉": "chun",
"鶊": "geng",
"鶋": "ju",
"鶌": "qu",
"鶍": "yi",
"鶎": "zun",
"鶏": "ji",
"鶐": "shu",
"鶑": "ying",
"鶒": "chi",
"鶓": "miao",
"鶔": "rou",
"鶕": "an",
"鶖": "qiu",
"鶗": "ti",
"鶘": "hu",
"鶙": "ti",
"鶚": "e",
"鶛": "jie",
"鶜": "mao",
"鶝": "fu",
"鶞": "chun",
"鶟": "tu",
"鶠": "yan",
"鶡": "he",
"鶢": "yuan",
"鶣": "pian",
"鶤": "kun",
"鶥": "mei",
"鶦": "hu",
"鶧": "ying",
"鶨": "chuan",
"鶩": "wu",
"鶪": "ju",
"鶫": "dong",
"鶬": "cang",
"鶭": "fang",
"鶮": "he",
"鶯": "ying",
"鶰": "yuan",
"鶱": "xian",
"鶲": "weng",
"鶳": "shi",
"鶴": "he",
"鶵": "chu",
"鶶": "tang",
"鶷": "xia",
"鶸": "ruo",
"鶹": "liu",
"鶺": "ji",
"鶻": "gu",
"鶼": "jian",
"鶽": "sun",
"鶾": "han",
"鶿": "ci",
"鷀": "ci",
"鷁": "yi",
"鷂": "yao",
"鷃": "yan",
"鷄": "ji",
"鷅": "li",
"鷆": "tian",
"鷇": "kou",
"鷈": "ti",
"鷉": "ti",
"鷊": "yi",
"鷋": "tu",
"鷌": "ma",
"鷍": "xiao",
"鷎": "gao",
"鷏": "tian",
"鷐": "chen",
"鷑": "ji",
"鷒": "tuan",
"鷓": "zhe",
"鷔": "ao",
"鷕": "yao",
"鷖": "yi",
"鷗": "ou",
"鷘": "chi",
"鷙": "zhi",
"鷚": "liu",
"鷛": "yong",
"鷜": "lou",
"鷝": "bi",
"鷞": "shuang",
"鷟": "zhuo",
"鷠": "yu",
"鷡": "wu",
"鷢": "jue",
"鷣": "yin",
"鷤": "ti",
"鷥": "si",
"鷦": "jiao",
"鷧": "yi",
"鷨": "hua",
"鷩": "bi",
"鷪": "ying",
"鷫": "su",
"鷬": "huang",
"鷭": "fan",
"鷮": "jiao",
"鷯": "liao",
"鷰": "yan",
"鷱": "gao",
"鷲": "jiu",
"鷳": "xian",
"鷴": "xian",
"鷵": "tu",
"鷶": "mai",
"鷷": "zun",
"鷸": "yu",
"鷹": "ying",
"鷺": "lu",
"鷻": "tuan",
"鷼": "xian",
"鷽": "xue",
"鷾": "yi",
"鷿": "pi",
"鸀": "zhu",
"鸁": "luo",
"鸂": "xi",
"鸃": "yi",
"鸄": "ji",
"鸅": "ze",
"鸆": "yu",
"鸇": "zhan",
"鸈": "ye",
"鸉": "yang",
"鸊": "pi",
"鸋": "ning",
"鸌": "hu",
"鸍": "mi",
"鸎": "ying",
"鸏": "meng",
"鸐": "di",
"鸑": "yue",
"鸒": "yu",
"鸓": "lei",
"鸔": "bu",
"鸕": "lu",
"鸖": "he",
"鸗": "long",
"鸘": "shuang",
"鸙": "yue",
"鸚": "ying",
"鸛": "guan",
"鸜": "qu",
"鸝": "li",
"鸞": "luan",
"鸟": "niao",
"鸠": "jiu",
"鸡": "ji",
"鸢": "yuan",
"鸣": "ming",
"鸤": "shi",
"鸥": "ou",
"鸦": "ya",
"鸧": "cang",
"鸨": "bao",
"鸩": "zhen",
"鸪": "gu",
"鸫": "dong",
"鸬": "lu",
"鸭": "ya",
"鸮": "xiao",
"鸯": "yang",
"鸰": "ling",
"鸱": "chi",
"鸲": "qu",
"鸳": "yuan",
"鸴": "xue",
"鸵": "tuo",
"鸶": "si",
"鸷": "zhi",
"鸸": "er",
"鸹": "gua",
"鸺": "xiu",
"鸻": "heng",
"鸼": "zhou",
"鸽": "ge",
"鸾": "luan",
"鸿": "hong",
"鹀": "wu",
"鹁": "bo",
"鹂": "li",
"鹃": "juan",
"鹄": "hu",
"鹅": "e",
"鹆": "yu",
"鹇": "xian",
"鹈": "ti",
"鹉": "wu",
"鹊": "que",
"鹋": "miao",
"鹌": "an",
"鹍": "kun",
"鹎": "bei",
"鹏": "peng",
"鹐": "qian",
"鹑": "chun",
"鹒": "geng",
"鹓": "yuan",
"鹔": "su",
"鹕": "hu",
"鹖": "he",
"鹗": "e",
"鹘": "gu",
"鹙": "qiu",
"鹚": "ci",
"鹛": "mei",
"鹜": "wu",
"鹝": "yi",
"鹞": "yao",
"鹟": "weng",
"鹠": "liu",
"鹡": "ji",
"鹢": "yi",
"鹣": "jian",
"鹤": "he",
"鹥": "yi",
"鹦": "ying",
"鹧": "zhe",
"鹨": "liu",
"鹩": "liao",
"鹪": "jiao",
"鹫": "jiu",
"鹬": "yu",
"鹭": "lu",
"鹮": "huan",
"鹯": "zhan",
"鹰": "ying",
"鹱": "hu",
"鹲": "meng",
"鹳": "guan",
"鹴": "shuang",
"鹵": "lu",
"鹶": "jin",
"鹷": "ling",
"鹸": "jian",
"鹹": "xian",
"鹺": "cuo",
"鹻": "jian",
"鹼": "jian",
"鹽": "yan",
"鹾": "cuo",
"鹿": "lu",
"麀": "you",
"麁": "cu",
"麂": "ji",
"麃": "pao",
"麄": "cu",
"麅": "pao",
"麆": "zhu",
"麇": "jun",
"麈": "zhu",
"麉": "jian",
"麊": "mi",
"麋": "mi",
"麌": "yu",
"麍": "liu",
"麎": "chen",
"麏": "jun",
"麐": "lin",
"麑": "ni",
"麒": "qi",
"麓": "lu",
"麔": "jiu",
"麕": "jun",
"麖": "jing",
"麗": "li",
"麘": "xiang",
"麙": "xian",
"麚": "jia",
"麛": "mi",
"麜": "li",
"麝": "she",
"麞": "zhang",
"麟": "lin",
"麠": "jing",
"麡": "qi",
"麢": "ling",
"麣": "yan",
"麤": "cu",
"麥": "mai",
"麦": "mai",
"麧": "he",
"麨": "chao",
"麩": "fu",
"麪": "mian",
"麫": "mian",
"麬": "fu",
"麭": "pao",
"麮": "qu",
"麯": "qu",
"麰": "mou",
"麱": "fu",
"麲": "xian",
"麳": "lai",
"麴": "qu",
"麵": "mian",
"麶": "chi",
"麷": "feng",
"麸": "fu",
"麹": "qu",
"麺": "mian",
"麻": "ma",
"麼": "mo",
"麽": "mo",
"麾": "hui",
"麿": "mi",
"黀": "zou",
"黁": "nun",
"黂": "fen",
"黃": "huang",
"黄": "huang",
"黅": "jin",
"黆": "guang",
"黇": "tian",
"黈": "tou",
"黉": "hong",
"黊": "hua",
"黋": "kuang",
"黌": "hong",
"黍": "shu",
"黎": "li",
"黏": "nian",
"黐": "chi",
"黑": "hei",
"黒": "hei",
"黓": "yi",
"黔": "qian",
"黕": "dan",
"黖": "xi",
"黗": "tun",
"默": "mo",
"黙": "mo",
"黚": "qian",
"黛": "dai",
"黜": "chu",
"黝": "you",
"點": "dian",
"黟": "yi",
"黠": "xia",
"黡": "yan",
"黢": "qu",
"黣": "mei",
"黤": "yan",
"黥": "qing",
"黦": "yue",
"黧": "li",
"黨": "dang",
"黩": "du",
"黪": "can",
"黫": "yan",
"黬": "yan",
"黭": "yan",
"黮": "dan",
"黯": "an",
"黰": "zhen",
"黱": "dai",
"黲": "can",
"黳": "yi",
"黴": "mei",
"黵": "dan",
"黶": "yan",
"黷": "du",
"黸": "lu",
"黹": "zhi",
"黺": "fen",
"黻": "fu",
"黼": "fu",
"黽": "min",
"黾": "min",
"黿": "yuan",
"鼀": "cu",
"鼁": "qu",
"鼂": "chao",
"鼃": "wa",
"鼄": "zhu",
"鼅": "zhi",
"鼆": "meng",
"鼇": "ao",
"鼈": "bie",
"鼉": "tuo",
"鼊": "bi",
"鼋": "yuan",
"鼌": "chao",
"鼍": "tuo",
"鼎": "ding",
"鼏": "mi",
"鼐": "nai",
"鼑": "ding",
"鼒": "zi",
"鼓": "gu",
"鼔": "gu",
"鼕": "dong",
"鼖": "fen",
"鼗": "tao",
"鼘": "yuan",
"鼙": "pi",
"鼚": "chang",
"鼛": "gao",
"鼜": "cao",
"鼝": "yuan",
"鼞": "tang",
"鼟": "teng",
"鼠": "shu",
"鼡": "shu",
"鼢": "fen",
"鼣": "fei",
"鼤": "wen",
"鼥": "ba",
"鼦": "diao",
"鼧": "tuo",
"鼨": "zhong",
"鼩": "qu",
"鼪": "sheng",
"鼫": "shi",
"鼬": "you",
"鼭": "shi",
"鼮": "ting",
"鼯": "wu",
"鼰": "ju",
"鼱": "jing",
"鼲": "hun",
"鼳": "ju",
"鼴": "yan",
"鼵": "tu",
"鼶": "si",
"鼷": "xi",
"鼸": "xian",
"鼹": "yan",
"鼺": "lei",
"鼻": "bi",
"鼼": "yao",
"鼽": "qiu",
"鼾": "han",
"鼿": "wu",
"齀": "wu",
"齁": "hou",
"齂": "xie",
"齃": "e",
"齄": "zha",
"齅": "xiu",
"齆": "weng",
"齇": "zha",
"齈": "nong",
"齉": "nang",
"齊": "qi",
"齋": "zhai",
"齌": "ji",
"齍": "zi",
"齎": "ji",
"齏": "ji",
"齐": "qi",
"齑": "ji",
"齒": "chi",
"齓": "chen",
"齔": "chen",
"齕": "he",
"齖": "ya",
"齗": "yin",
"齘": "xie",
"齙": "bao",
"齚": "ze",
"齛": "xie",
"齜": "zi",
"齝": "chi",
"齞": "yan",
"齟": "ju",
"齠": "tiao",
"齡": "ling",
"齢": "ling",
"齣": "chu",
"齤": "quan",
"齥": "xie",
"齦": "yin",
"齧": "nie",
"齨": "jiu",
"齩": "yao",
"齪": "chuo",
"齫": "yun",
"齬": "yu",
"齭": "chu",
"齮": "yi",
"齯": "ni",
"齰": "ze",
"齱": "zou",
"齲": "qu",
"齳": "yun",
"齴": "yan",
"齵": "yu",
"齶": "e",
"齷": "wo",
"齸": "yi",
"齹": "ci",
"齺": "zou",
"齻": "dian",
"齼": "chu",
"齽": "jin",
"齾": "ya",
"齿": "chi",
"龀": "chen",
"龁": "he",
"龂": "yin",
"龃": "ju",
"龄": "ling",
"龅": "bao",
"龆": "tiao",
"龇": "zi",
"龈": "yin",
"龉": "yu",
"龊": "chuo",
"龋": "qu",
"龌": "wo",
"龍": "long",
"龎": "pang",
"龏": "gong",
"龐": "pang",
"龑": "yan",
"龒": "long",
"龓": "long",
"龔": "gong",
"龕": "kan",
"龖": "da",
"龗": "ling",
"龘": "da",
"龙": "long",
"龚": "gong",
"龛": "kan",
"龜": "gui",
"龝": "qiu",
"龞": "bie",
"龟": "gui",
"龠": "yue",
"龡": "chui",
"龢": "he",
"龣": "jiao",
"龤": "xie",
"龦": "chang",
"龧": "shu",
"龨": "huai",
"龪": "zhan",
"龫": "gan",
"龮": "ji",
"龯": "yue",
"龰": "zou",
"龱": "si",
"龲": "ku",
"龵": "shou",
"龷": "gong",
"龹": "juan",
"龺": "zhuo",
"龻": "luan",
"鿃": "shan",
"鿄": "liang",
"鿌": "liang",
"羽": "yu",
"﨤": "ji",
"﨩": "dao",
}
emoji = {
"chuizi": "🔨",
"xiao": "😄",
"weixiao": "😊",
"kaixin": "😃",
"zhayan": "😉",
"xihuan": "😍",
"feiwen": "😘",
"qin": "😚",
"tiaopi": "😜",
"han": "😓",
"nanguo": "😔",
"xiaren": "😥",
"nanshou": "😰",
"ku": "😭",
"xiao": "😂",
"emo": "👿",
"waixingren": "👽",
"xindong": "💗",
"lv": "💚",
"ai": "❤",
"xinsui": "💔",
"xindong": "💓",
"xing": "✨",
"xing": "🌟",
"shengqi": "💢",
"!": "❕",
"?": "¿",
"!": "❕",
"?": "¿",
"shuijiao": "💤",
"fangpi": "💨",
"di": "💦",
"shui": "💦",
"yinyue": "🎶",
"huo": "🔥",
"shit": "💩",
"shi": "💩",
"dabian": "💩",
"damuzhi": "👍",
"bang": "👍",
"cai": "👎",
"ok": "👌",
"quan": "👊",
"shou": "✊",
"yeah": "✌",
"bie": "👋",
"buxing": "👋",
"zhang": "✋",
"ting": "👋",
"shuangshou": "👐",
"chuo": "👆",
"shangmian": "👆",
"youmian": "👉",
"zuomian": "👈",
"shuangshou": "🙌",
"zhufu": "🙏",
"baoyou": "🙏",
"yi": "☝",
"paishou": "👏",
"guzhang": "👏",
"gebo": "💪",
"li": "💪",
"zoulu": "🚶",
"paobu": "🏃",
"qianshou": "👫",
"baotou": "🙆",
"buxing": "🙅",
"lingguangyixian": "🙇",
"xianglian": "💏",
"xianglian": "💑",
"jianfa": "💇",
"nv": "👧",
"didi": "👶",
"xiaohai": "👶",
"nainai": "👵",
"yeye": "👴",
"ye": "👴",
"baba": "👱",
"jingcha": "👮",
"tianshi": "👼",
"nvwang": "👸",
"si": "💀",
"wen": "💋",
"qinwen": "💋",
"zui": "👄",
"zuichun": "👄",
"er": "👂",
"erduo": "👂",
"kan": "👀",
"bi": "👃",
"shubao": "🎒",
"nangua": "🎃",
"gui": "👻",
"shengdanshu": "🎄",
"liwu": "🎁",
"lingdang": "🔔",
"gongxi": "🎉",
"qiqiu": "🎈",
"guangpan": "💿",
"guangpan": "📀",
"xiangji": "📷",
"shengxiangji": "🎥",
"diannao": "💻",
"dianshi": "📺",
"shouji": "📱",
"dianhua": "📠",
"dianhua": "☎",
"laba": "🔊",
"laba": "📢",
"laba": "📣",
"shouyinji": "📻",
"leida": "📡",
"fangda": "🔍",
"jiesuo": "🔓",
"suo": "🔒",
"yaochi": "🔑",
"yaoshi": "🔑",
"jiandao": "✂",
"dingyin": "🔨",
"liang": "💡",
"shouxin": "📩",
"xinxiang": "📫",
"paozao": "🛀",
"cesuo": "🚽",
"qian": "💰",
"xiyan": "🚬",
"zhadan": "💣",
"qiang": "🔫",
"yao": "💊",
"zhen": "💉",
"ganlanqiu": "🏈",
"qiu": "🏀",
"lanqiu": "🏀",
"zuqiu": "⚽",
"bangqiu": "⚾",
"wangqiu": "🎾",
"ba": "🎱",
"taiqiu": "🎱",
"youyong": "🏊",
"chonglang": "🏄",
"huaxue": "🎿",
"heitao": "♠",
"hongtao": "♥",
"meihua": "♣",
"fangpian": "♦",
"guanjun": "🏆",
"mingzhong": "🎯",
"zhong": "🀄",
"jilu": "📝",
"shu": "📖",
"yanse": "🎨",
"changge": "🎤",
"tingyinyue": "🎧",
"jita": "🎸",
"xiezi": "👟",
"gaogenxie": "👠",
"xuezi": "👢",
"yifu": "👕",
"qunzi": "👗",
"hanfu": "👘",
"bijini": "👙",
"hudiejie": "🎀",
"gaomao": "🎩",
"huangguan": "👑",
"caomao": "👒",
"yusan": "🌂",
"shoutidai": "💼",
"shoutidai": "👜",
"kouhong": "💄",
"jiezhi": "💍",
"zuanshi": "💎",
"cha": "☕",
"pi": "🍺",
"ganbei": "🍻",
"chazi": "🍴",
"hanbaobao": "🍔",
"shutiao": "🍟",
"miantiao": "🍝",
"mifan": "🍚",
"miantiao": "🍜",
"mianbao": "🍞",
"jiandan": "🍳",
"bingjilin": "🍦",
"shengdai": "🍧",
"dangao": "🎂",
"yikuaidangao": "🍰",
"pingguo": "🍎",
"juzi": "🍊",
"xigua": "🍉",
"caomei": "🍓",
"qiezi": "🍆",
"xihongshi": "🍅",
"taiyang": "☀",
"xiayu": "☔",
"yintian": "☁",
"xueren": "⛄",
"yueliang": "🌙",
"shandian": "⚡",
"lang": "🌊",
"miao": "🐱",
"ao": "🐱",
"gou": "🐶",
"shu": "🐭",
"tuzi": "🐰",
"qinghua": "🐸",
"laohu": "🐯",
"kaola": "🐨",
"zhu": "🐷",
"niu": "🐮",
"zhu": "🐗",
"hao": "🐵",
"hou": "🐒",
"ma": "🐴",
"ma": "🐎",
"loutuo": "🐫",
"yang": "🐑",
"xiang": "🐘",
"she": "🐍",
"niao": "🐦",
"xiaoji": "🐤",
"ji": "🐔",
"chong": "🐛",
"zhangyu": "🐙",
"hua": "🌸",
"meigui": "🌹",
"xiangrikui": "🌻",
"fengye": "🍁",
"xianrenzhang": "🌵",
"hailuo": "🐚",
"one": "1⃣",
"1": "1⃣",
"yi": "1⃣",
"two": "2⃣",
"2": "2⃣",
"er": "2⃣",
"three": "3⃣",
"3": "3⃣",
"san": "3⃣",
"four": "4⃣",
"4": "4⃣",
"si": "4⃣",
"five": "5⃣",
"5": "5⃣",
"wu": "5⃣",
"six": "6⃣",
"6": "6⃣",
"liu": "6⃣",
"seven": "7⃣",
"7": "7⃣",
"qi": "7⃣",
"eight": "8⃣",
"8": "8⃣",
"ba": "8⃣",
"nine": "9⃣",
"9": "9⃣",
"jiu": "9⃣",
"ten": "0⃣",
"0": "0⃣",
"ling": "0⃣",
"shang": "⬆",
"xia": "⬇",
"zuo": "⬅",
"you": "➡",
"youshang": "↗",
"zuoshang": "↖",
"youxia": "↘",
"zuoxia": "↙",
"houtui": "⏪",
"qianjin": "⏩",
"ok": "🆗",
"new": "🆕",
"top": "🔝",
"up": "🆙",
"xinhao": "📶",
"man": "🈵",
"kong": "🈳",
"de": "🉐",
"ge": "🈹",
"zhi": "🈯",
"gong": "🈺",
"you": "🈶",
"wu": "🈚",
"yue": "🈷",
"shen": "🈸",
"wc": "🚾",
"mi": "㊙",
"zhu": "㊗",
"shenfen": "🆔",
"jiaoliang": "🆚",
"guanji": "📴",
"gupiao": "💹",
"huilv": "💱",
"a": "🅰",
"b": "🅱",
"ab": "🆎",
"ou": "🅾",
"hei": "🔲",
"lingdian": "🕛",
"yidian": "🕐",
"erdian": "🕑",
"sandian": "🕒",
"sidian": "🕓",
"wudian": "🕔",
"liudian": "🕕",
"qidian": "🕖",
"badian": "🕗",
"jiudian": "🕘",
"shidian": "🕙",
"shiyidian": "🕚",
"tema": "™",
"tama": "™",
"fangzi": "🏠",
"yiyuan": "🏥",
"yinhang": "🏦",
"jiudian": "🏪",
"yiyuan": "🏩",
"yiyuan": "🏨",
"jiaotang": "💒",
"qukuanji": "🏧",
"caihong": "🌈",
"motianlun": "🎡",
"guoshanche": "🎢",
"youlun": "🚢",
"feiji": "✈",
"zixingche": "🚲",
"qiche": "🚙",
"jiaoche": "🚗",
"chuzuche": "🚕",
"gongjiaoche": "🚌",
"jingche": "🚓",
"xiaofangche": "🚒",
"jiuhuche": "🚑",
"huoche": "🚚",
"huochezhan": "🚉",
"gaotie": "🚄",
"dongche": "🚅",
"jiayouzhan": "⛽",
"houlvdeng": "🚥",
"jinggao": "⚠",
"java": "♨",
"riben": "🇯🇵",
"hanguo": "🇰🇷",
"zhongguo": "🇨🇳",
"meiguo": "🇺🇸",
"yingguo": "🇬🇧",
"na": "🌶",
"la": "🌶",
"nan": "♂",
"nv": "♀",
"yan": "👁",
"jiao": "👣",
"zhuang": "☄",
"que": "🍆",
"bu": "🧣",
"cuo": "👏",
"tang": "🍬",
"ban": "🍝",
"pa": "爪巴",
"jiang": "🥇",
"ming": "💗",
"xin": "💚",
"feng": "💨",
"dai": "⛑",
"tou": "🧑",
"wo": "🤝",
"mo": "👻",
"ri": "🌞",
"tie": "🛠",
"dao": "🗾",
"pao": "🏃",
"zhe": "🌤",
"wei": "🥀",
"peng": "💥",
"mai": "💸",
"pian": "🃏",
"cheng": "🆗",
"zei": "🤑",
"ni": "👇",
"qing": "🎈",
"lan": "🚫",
"kuai": "🚀",
"chou": "🤡",
"kai": "▶",
"che": "🚙",
"kaiche": "🔞",
"shun": "💹",
"hai": "🚢",
"jie": "💳",
"xiong": "🐻",
"jin": "🏆",
"tu": "🐇",
"gao": "🗻",
"du": "☠",
"dui": "✅",
"xian": "➰",
"ren": "👤",
"ha": "🐸",
"bin": "🍦",
"chang": "📏",
"guo": "🥘",
"jia": "➕",
"zai": "📥",
"ya": "🦆",
"chen": "🌅",
"xu": "⏭",
"meng": "💭",
"qu": "🎼",
"xi": "🎊",
"zao": "⛏",
"xizao": "🚿",
"da": "🤜😫🤛",
"hui": "🌠",
"lai": "🍼",
"le": "😆",
"dong": "🍩",
"ben": "🤯",
"gu": "🍄",
"tao": "🍑",
"bao": "🐆",
"lao": "👴",
"po": "🎢",
"shan": "🏔",
}
|
PypiClean
|
/revolt-sdk-1.0.1.tar.gz/revolt-sdk-1.0.1/revoltsdk/__init__.py
|
__version__ = '1.0.1'
__all__ = ['Revolt']
from concurrent.futures import ThreadPoolExecutor
from urllib.request import urlopen, Request
from datetime import datetime, timezone, timedelta
from multiprocessing import RLock
from threading import Timer
from uuid import uuid4
import locale
import time
import json
from typing import Any, Optional, List
from .config import _Config
from .event import _Event
if __debug__:
import logging
log = logging.getLogger(name='Revolt').debug
class Revolt:
"""
Revolt analytics client.
Use ``send_event`` to send analytics events.
Current implementation ignores errors occuring when sending events.
Errors will be shown in logs but there will be no retries.
Parameters
----------
tracking_id : str
Your unique client tracking ID. If you don't have tracking_id please contact Revolt Team.
secret_key : str
Client authorization secret for your tracking ID
app_code : str
Your application name/identifier i.e. 'com.company.myapp'
app_version : str
Your application version i.e. `1.12.4`
app_instance_id : Optional[str]
Unique application instance ID. It will be randomly generated each time if None provided.
It should be reused for same device/application instance.
timezone : datetime.timezone
Timezone used to send events. Default is current timezone provided by system.
revolt_host : str
Host name of Revolt server
batch_size : int
Maximum size of event batches sent to server. Events will be sent automatically when queue
reaches batch_size. Default is 20.
auto_flush_delay : Optional[int]
Delay in miliseconds before automatically sending events batch if batch_size was not reached.
Calling ``send_event`` will reset the timer. Auto flush will be disabled if you pass None instead.
If auto flush is disabled events will be flushed only if queue reaches batch_size or manually.
Default is 5 seconds (5000).
""" # noqa: E501
def __init__(self,
tracking_id: str,
secret_key: str,
app_code: str,
app_version: str,
app_instance_id: Optional[str] = None,
timezone: timezone = timezone(timedelta(seconds=time.localtime().tm_gmtoff)), # noqa: E501
revolt_host: str = 'api.revolt.rocks',
batch_size: int = 20,
auto_flush_delay: Optional[int] = 5000):
if __debug__:
log(f'Initializing Revolt {__version__}') # noqa: E501
self._config: _Config = _Config(
app_code=app_code,
app_version=app_version,
revolt_host=revolt_host,
tracking_id=tracking_id,
secret_key=secret_key,
app_instance_id=app_instance_id,
timezone=timezone,
batch_size=batch_size,
auto_flush_delay=auto_flush_delay
)
self._lock: RLock = RLock()
self._executor = ThreadPoolExecutor( # TODO: compare with ProcessPoolExecutor for performance # noqa: E501
max_workers=1,
)
self._event_queue: List[_Event] = []
if self._config.auto_flush_delay:
self._delay_timer = Timer(
self._config.auto_flush_delay / 1000.0,
self._timed_flush
)
elif __debug__:
log(f'Auto flush delay disabled')
if __debug__:
log(f'Initialized Revolt client with tracking_id: {tracking_id} and app_instance_id: {app_instance_id}') # noqa: E501
self._send_initial_event()
def send_event(self, type: str, data: Optional[Any] = None, flush: bool = False): # noqa: E501
"""
Adds analytics event to queue. Event timestamp will be assigned when this method is called.
Events are collected before sending and sent if forced by flush, after reaching queue size equal
batch size defined when initializing client or after delay if auto flush is enabled, after
defined time of inactivity. Calling this method resets auto flush timer.
Parameters
----------
type : str
Event type name, max 32 characters. It should use dots for separating domain,
object, event (example: ui.activity.started), last element of name should be a verb
expressing what happened e.g. signedIn, started, deleted etc.
Type of event determines format of event data.
data : Optional[Any]
Event data. Must be json serializable or None
flush : bool
Forces to send this and all queued events immediately if set to True. Default is False
Returns
-------
None
""" # noqa: E501
assert type # nosec
assert len(type) <= 32 # nosec
self._send(
_Event(
id=str(uuid4()),
type=type,
data=data,
timestamp=_timestamp()
),
flush
)
# private
def _send(self, event: _Event, flush: bool):
if __debug__:
log(f'Enqueueing event [{event.id}] of type: {event.type} with data: {event.data}') # noqa: E501
with self._lock:
self._event_queue.append(event)
if flush or len(self._event_queue) >= self._config.batch_size:
self._async_flush(self._event_queue)
self._event_queue = []
if self._config.auto_flush_delay:
self._stop_timer()
elif self._config.auto_flush_delay:
self._reset_timer()
else:
pass # keep waiting
def _send_initial_event(self):
self._send(
_Event(
id=str(uuid4()),
type='system.appInstanceData',
data={
'app': {
'type': 'backend', # no reason to add more types for now # noqa: E501
'code': self._config.app_code,
'version': self._config.app_version,
'sdkVersion': __version__
},
'device': {
'language': locale.getlocale()[0],
'zoneOffset': self._config.timezone_offset
}
},
timestamp=_timestamp()
),
flush=False
)
def _timed_flush(self):
with self._lock:
assert len(self._event_queue) >= 0 # nosec
if __debug__:
log(f'Flushing after {self._config.auto_flush_delay} miliseconds of inactivity') # noqa: E501
self._async_flush(self._event_queue)
self._event_queue = []
def _async_flush(self, events: List[_Event]):
def flush():
try:
response = urlopen(self._prepare_request(events)) # nosec
except Exception as e:
log(f'Error when sending events {e}')
return
try:
result = json.loads(response.read())
log(f'Successfully sent {result["eventsAccepted"]} events')
error = result.get('eventError')
if error:
message = error.get('errorMessage')
if message:
log(f'There was an error while sending events (code: {error["errorCode"]}): {message}') # noqa: E501
else:
log(f'There was an unknown error while sending events, code: {error["errorCode"]}') # noqa: E501
except Exception as e:
log(f'Error when decoding response {e}')
if __debug__:
log(f'Flushing {len(events)} events')
self._executor.submit(flush)
def _prepare_request(self, events: List[_Event]):
assert events # nosec
return Request(
url=self._config.endpoint_url,
method='POST',
headers={
'Authorization': 'Basic ' + self._config.credential,
'Content-Type': 'application/json; charset=utf-8;',
'User-Agent': f'Revolt Python SDK {__version__}'
},
data=self._prepare_request_body(events)
)
def _prepare_request_body(self, events: List[_Event]):
return json.dumps(
[
{
'meta': {
'id': event.id,
'type': event.type,
'timestamp': event.timestamp
},
"data": event.data
}
for event in events
]
).encode('utf-8')
def _stop_timer(self):
assert self._config.auto_flush_delay # nosec
self._delay_timer.cancel()
def _reset_timer(self):
assert self._config.auto_flush_delay # nosec
self._delay_timer.cancel()
self._delay_timer = Timer(
self._config.auto_flush_delay / 1000.0,
self._timed_flush
)
self._delay_timer.start()
def _timestamp() -> int:
return int(datetime.utcnow().timestamp() * 1000)
|
PypiClean
|
/HsOpenAPIDeviceBind-1.3.3.tar.gz/HsOpenAPIDeviceBind-1.3.3/hsbind/network_utils.py
|
import netifaces
import psutil
from hsbind.common_utils import get_logger
logging = get_logger(__name__)
invalid_macs_start_str = (
'00-05-69',
'00-1c-14',
'00-0c-29',
'00-50-56',
'08-00-27',
'0a-00-27',
'00-03-ff',
'00-15-5d'
)
def get_psutil_mac_address_list() -> dict:
mac_addresses = {}
addrs_info = psutil.net_if_addrs()
stats_info = psutil.net_if_stats()
for adapter in addrs_info:
snicaddr_list = addrs_info[adapter]
snicstats = stats_info[adapter]
for snicaddr in snicaddr_list:
if snicstats.isup and snicaddr.family.name in {'AF_LINK'}:
mac = snicaddr.address
if '-' in mac or ':' in mac:
if len(mac) == 17 and mac != '00:00:00:00:00:00' and mac != '00-00-00-00-00-00':
mac = mac.replace(':', '-').lower()
if not mac.startswith(invalid_macs_start_str):
mac_addresses[mac] = True
return mac_addresses
def get_routing_nic_names() -> set:
netifaces_gateways = netifaces.gateways()
gateways = netifaces_gateways[netifaces.AF_INET]
routing_nic_names = set()
for gateway in gateways:
routing_nic_names.add(gateway[1])
return routing_nic_names
def get_netifaces_mac_address_list() -> list:
mac_addresses = []
routing_nic_names = get_routing_nic_names()
for interface in netifaces.interfaces():
if interface in routing_nic_names:
routing_nic_mac_addr = netifaces.ifaddresses(interface)[netifaces.AF_LINK][0]['addr']
if len(routing_nic_mac_addr) == 17 and routing_nic_mac_addr != '00:00:00:00:00:00' and routing_nic_mac_addr != '00-00-00-00-00-00':
routing_nic_mac_addr = routing_nic_mac_addr.replace(':', '-').lower()
mac_addresses.append(routing_nic_mac_addr)
mac_addresses.sort()
return mac_addresses
def get_mac_address() -> str:
psutil_mac_dict = get_psutil_mac_address_list()
netifaces_mac_address_list = get_netifaces_mac_address_list()
result_list = []
if len(netifaces_mac_address_list) == 0:
for mac in psutil_mac_dict.keys():
result_list.append(mac)
elif len(psutil_mac_dict) == 0:
for mac in netifaces_mac_address_list:
result_list.append(mac)
else:
for mac in netifaces_mac_address_list:
if mac in psutil_mac_dict and psutil_mac_dict[mac] == True:
result_list.append(mac)
if len(result_list) == 0: # 都未获取到
raise Exception('get mac address error, length 0')
result_list.sort()
return result_list[0]
|
PypiClean
|
/mowl_borg-0.2.0-py3-none-any.whl/mowl/kge/model.py
|
from pykeen.triples.triples_factory import TriplesFactory
from pykeen.models import ERModel
from pykeen.training import SLCWATrainingLoop
import tempfile
import torch as th
from torch.optim import Adam, Optimizer
import os
import mowl.error as err
import logging
logging.basicConfig(level=logging.INFO)
class KGEModel():
'''
:param triples_factory: PyKEEN triples factory.
:type triples_factory: :class:`pykeen.triples.triples_factory.TriplesFactory`
:param model: Initialized PyKEEN model
:type model: Initialized model of the type :class:`EntityRelationEmbeddingModel \
<pykeen.models.base.EntityRelationEmbeddingModel>` or \
:class:`ERModel <pykeen.models.nbase.ERModel>`.
:param epochs: Number of epochs.
:type epochs: int
:param batch_size: Number of each data samples in each batch. Defaults to 32.
:type batch_size: int, optional
:param optimizer: Optimizer to be used while training the model. Defaults to \
:class:`torch.optim.Adam`.
:type optimizer: subclass of :class:`torch.optim.Optimizer`, optional
:param lr: Learning rate. Defaults to 1e-3.
:type lr: float, optional
:param device: Device to run the model. Defaults to `cpu`.
:type device: str
:param model_filepath: Path for saving the model. Defaults to \
:class:`tempfile.NamedTemporaryFile`
:type model_filepath: str, optional
'''
def __init__(self,
triples_factory,
pykeen_model,
epochs,
batch_size=32,
optimizer=Adam,
lr=1e-3,
device="cpu",
model_filepath=None,
):
if not isinstance(triples_factory, TriplesFactory):
raise TypeError(
"Parameter triples_factory must be of type or subtype of \
pykeen.triples.triples_factory.TriplesFactory.")
if not isinstance(pykeen_model, ERModel):
raise TypeError(
"Parameter pykeen_model must be of type or subtype of pykeen.models.ERModel.")
if not isinstance(epochs, int):
raise TypeError("Parameter epochs must be of type int.")
if not isinstance(batch_size, int):
raise TypeError("Optional parameter batch_size must be of type int.")
try:
optimizer(params=[th.empty(1)])
except Exception:
raise TypeError(
"Optional parameter optimizer must be a subtype of torch.optim.Optimizer.")
if not isinstance(lr, float):
raise TypeError("Optional parameter lr must be of type float.")
if not isinstance(device, str):
raise TypeError("Optional parameter device must be of type str.")
if not isinstance(model_filepath, str) and model_filepath is not None:
raise TypeError("Optional parameter model_filepath must be of type str or None.")
self.triples_factory = triples_factory
self.device = device
self.model = pykeen_model.to(self.device)
self.epochs = epochs
self.batch_size = batch_size
self.optimizer = optimizer
self.lr = lr
if model_filepath is None:
model_filepath = tempfile.NamedTemporaryFile()
model_filepath = model_filepath.name
self.model_filepath = model_filepath
self._trained = False
self._data_loaded = False
self._class_index_dict = None
self._class_embeddings_dict = None
self._object_property_index_dict = None
self._object_property_embeddings_dict = None
@property
def class_index_dict(self):
"""This returns a dictionary of the form class_name -> class_index. This equivalent to \
the method triples_factory.entity_to_id from PyKEEN."""
if self._class_index_dict is None:
self._class_index_dict = self.triples_factory.entity_to_id
return self._class_index_dict
@property
def object_property_index_dict(self):
"""This returns a dictionary of the form object_property_name -> object_property_index. \
This equivalent to the method triples_factory.relation_to_id from PyKEEN."""
if self._object_property_index_dict is None:
self._object_property_index_dict = self.triples_factory.relation_to_id
return self._object_property_index_dict
@property
def class_embeddings_dict(self):
if self._class_embeddings_dict is None:
try:
self._get_embeddings()
except FileNotFoundError:
raise AttributeError(err.EMBEDDINGS_NOT_FOUND_MODEL_NOT_TRAINED)
return self._class_embeddings_dict
@property
def object_property_embeddings_dict(self):
if self._object_property_embeddings_dict is None:
try:
self._get_embeddings()
except FileNotFoundError:
raise AttributeError(err.EMBEDDINGS_NOT_FOUND_MODEL_NOT_TRAINED)
return self._object_property_embeddings_dict
def load_best_model(self):
if not os.path.exists(self.model_filepath):
raise FileNotFoundError(
"Loading best model failed because file was not found at the given path. \
Please train the model first.")
self.model.load_state_dict(th.load(self.model_filepath))
self.model.eval()
def train(self):
optimizer = self.optimizer(params=self.model.get_grad_params(), lr=self.lr)
training_loop = SLCWATrainingLoop(model=self.model, triples_factory=self.triples_factory,
optimizer=optimizer)
_ = training_loop.train(triples_factory=self.triples_factory, num_epochs=self.epochs,
batch_size=self.batch_size)
th.save(self.model.state_dict(), self.model_filepath)
self._trained = True
def _get_embeddings(self, load_best_model=True):
if load_best_model:
self.load_best_model()
cls_embeddings = self.model.entity_representations[0](indices=None).cpu().detach().numpy()
cls_ids = {item[0]: item[1] for item in self.triples_factory.entity_to_id.items()}
cls_embeddings = {item[0]: cls_embeddings[item[1]] for item in
self.triples_factory.entity_to_id.items()}
self._class_index_dict = cls_ids
self._class_embeddings_dict = cls_embeddings
rel_embeddings = self.model.relation_representations[0](indices=None)
rel_embeddings = rel_embeddings.cpu().detach().numpy()
rel_ids = {item[0]: item[1] for item in self.triples_factory.relation_to_id.items()}
rel_embeddings = {item[0]: rel_embeddings[item[1]] for item in
self.triples_factory.relation_to_id.items()}
self._object_property_index_dict = rel_ids
self._object_property_embeddings_dict = rel_embeddings
@th.no_grad()
def score_method_point(self, point):
"""Receives the embedding of a point and returns its score."""
self.model.eval()
# TODO implement code that checks dimensionality
point = self.point_to_tensor(point)
return self.model.predict_hrt(point)
@th.no_grad()
def score_method_tensor(self, data):
self.model.eval()
return self.model.predict_hrt(data)
def point_to_tensor(self, point):
point = [list(point)]
point = th.tensor(point).to(self.device)
return point
|
PypiClean
|
/google-cloud-bare-metal-solution-1.4.2.tar.gz/google-cloud-bare-metal-solution-1.4.2/google/cloud/bare_metal_solution/__init__.py
|
from google.cloud.bare_metal_solution import gapic_version as package_version
__version__ = package_version.__version__
from google.cloud.bare_metal_solution_v2.services.bare_metal_solution.async_client import (
BareMetalSolutionAsyncClient,
)
from google.cloud.bare_metal_solution_v2.services.bare_metal_solution.client import (
BareMetalSolutionClient,
)
from google.cloud.bare_metal_solution_v2.types.baremetalsolution import (
OperationMetadata,
ResetInstanceResponse,
)
from google.cloud.bare_metal_solution_v2.types.instance import (
DetachLunRequest,
GetInstanceRequest,
Instance,
ListInstancesRequest,
ListInstancesResponse,
ResetInstanceRequest,
ServerNetworkTemplate,
StartInstanceRequest,
StartInstanceResponse,
StopInstanceRequest,
StopInstanceResponse,
UpdateInstanceRequest,
)
from google.cloud.bare_metal_solution_v2.types.lun import (
GetLunRequest,
ListLunsRequest,
ListLunsResponse,
Lun,
)
from google.cloud.bare_metal_solution_v2.types.network import (
VRF,
GetNetworkRequest,
ListNetworksRequest,
ListNetworksResponse,
ListNetworkUsageRequest,
ListNetworkUsageResponse,
LogicalInterface,
Network,
NetworkAddressReservation,
NetworkUsage,
UpdateNetworkRequest,
)
from google.cloud.bare_metal_solution_v2.types.nfs_share import (
GetNfsShareRequest,
ListNfsSharesRequest,
ListNfsSharesResponse,
NfsShare,
UpdateNfsShareRequest,
)
from google.cloud.bare_metal_solution_v2.types.volume import (
GetVolumeRequest,
ListVolumesRequest,
ListVolumesResponse,
ResizeVolumeRequest,
UpdateVolumeRequest,
Volume,
)
__all__ = (
"BareMetalSolutionClient",
"BareMetalSolutionAsyncClient",
"OperationMetadata",
"ResetInstanceResponse",
"DetachLunRequest",
"GetInstanceRequest",
"Instance",
"ListInstancesRequest",
"ListInstancesResponse",
"ResetInstanceRequest",
"ServerNetworkTemplate",
"StartInstanceRequest",
"StartInstanceResponse",
"StopInstanceRequest",
"StopInstanceResponse",
"UpdateInstanceRequest",
"GetLunRequest",
"ListLunsRequest",
"ListLunsResponse",
"Lun",
"GetNetworkRequest",
"ListNetworksRequest",
"ListNetworksResponse",
"ListNetworkUsageRequest",
"ListNetworkUsageResponse",
"LogicalInterface",
"Network",
"NetworkAddressReservation",
"NetworkUsage",
"UpdateNetworkRequest",
"VRF",
"GetNfsShareRequest",
"ListNfsSharesRequest",
"ListNfsSharesResponse",
"NfsShare",
"UpdateNfsShareRequest",
"GetVolumeRequest",
"ListVolumesRequest",
"ListVolumesResponse",
"ResizeVolumeRequest",
"UpdateVolumeRequest",
"Volume",
)
|
PypiClean
|
/az-iranian-bank-gateways-1.9.0.tar.gz/az-iranian-bank-gateways-1.9.0/azbankgateways/banks/idpay.py
|
import json
import logging
import requests
from azbankgateways.banks import BaseBank
from azbankgateways.exceptions import BankGatewayConnectionError, SettingDoesNotExist
from azbankgateways.exceptions.exceptions import BankGatewayRejectPayment
from azbankgateways.models import BankType, CurrencyEnum, PaymentStatus
from azbankgateways.utils import get_json, split_to_dict_querystring
class IDPay(BaseBank):
_merchant_code = None
_method = None
_x_sandbox = None
_payment_url = None
_params = {}
def __init__(self, **kwargs):
super(IDPay, self).__init__(**kwargs)
self.set_gateway_currency(CurrencyEnum.IRR)
self._token_api_url = "https://api.idpay.ir/v1.1/payment"
self._verify_api_url = "https://api.idpay.ir/v1.1/payment/verify"
def get_bank_type(self):
return BankType.IDPAY
def set_default_settings(self):
for item in ["MERCHANT_CODE", "METHOD", "X_SANDBOX"]:
if item not in self.default_setting_kwargs:
raise SettingDoesNotExist()
setattr(self, f"_{item.lower()}", self.default_setting_kwargs[item])
self._x_sandbox = str(self._x_sandbox)
"""
gateway
"""
def _get_gateway_payment_url_parameter(self):
return self._payment_url
def _get_gateway_payment_parameter(self):
params = {}
params.update(self._params)
return params
def _get_gateway_payment_method_parameter(self):
return "GET"
"""
pay
"""
def get_pay_data(self):
data = {
"order_id": self.get_tracking_code(),
"amount": self.get_gateway_amount(),
"phone": self.get_mobile_number(),
"callback": self._get_gateway_callback_url(),
}
return data
def prepare_pay(self):
super(IDPay, self).prepare_pay()
def pay(self):
super(IDPay, self).pay()
data = self.get_pay_data()
response_json = self._send_data(self._token_api_url, data)
if "id" in response_json and "link" in response_json and response_json["link"] and response_json["id"]:
token = response_json["id"]
self._payment_url, self._params = split_to_dict_querystring(response_json["link"])
self._set_reference_number(token)
else:
logging.critical("IDPay gateway reject payment")
raise BankGatewayRejectPayment(self.get_transaction_status_text())
"""
verify gateway
"""
def prepare_verify_from_gateway(self):
super(IDPay, self).prepare_verify_from_gateway()
for method in ["GET", "POST", "data"]:
token = getattr(self.get_request(), method).get("id", None)
if token:
self._set_reference_number(token)
self._set_bank_record()
break
def verify_from_gateway(self, request):
super(IDPay, self).verify_from_gateway(request)
"""
verify
"""
def get_verify_data(self):
super(IDPay, self).get_verify_data()
data = {
"id": self.get_reference_number(),
"order_id": self.get_tracking_code(),
}
return data
def prepare_verify(self, tracking_code):
super(IDPay, self).prepare_verify(tracking_code)
def verify(self, transaction_code):
super(IDPay, self).verify(transaction_code)
data = self.get_verify_data()
response_json = self._send_data(self._verify_api_url, data, timeout=10)
if response_json.get("verify", {}).get("date", None):
self._set_payment_status(PaymentStatus.COMPLETE)
extra_information = json.dumps(response_json)
self._bank.extra_information = extra_information
self._bank.save()
else:
self._set_payment_status(PaymentStatus.CANCEL_BY_USER)
logging.debug("IDPay gateway unapprove payment")
def _send_data(self, api, data, timeout=5):
headers = {
"X-API-KEY": self._merchant_code,
"X-SANDBOX": self._x_sandbox,
}
try:
response = requests.post(api, headers=headers, json=data, timeout=timeout)
except requests.Timeout:
logging.exception("IDPay time out gateway {}".format(data))
raise BankGatewayConnectionError()
except requests.ConnectionError:
logging.exception("IDPay time out gateway {}".format(data))
raise BankGatewayConnectionError()
response_json = get_json(response)
if "error_message" in response_json:
self._set_transaction_status_text(response_json["error_message"])
return response_json
|
PypiClean
|
/pov-fabric-helpers-0.3.tar.gz/pov-fabric-helpers-0.3/pov_fabric.py
|
import hashlib
import os
import posixpath
import subprocess
import string
import sys
import tempfile
import textwrap
import urlparse
from StringIO import StringIO
from contextlib import closing
from pipes import quote # TBD: use shlex.quote on Python 3.2+
from fabric.api import (
run, sudo, quiet, settings, cd, env, abort, task, with_settings,
)
from fabric.contrib.files import exists, append
from fabric.utils import apply_lcwd
from fabric.sftp import SFTP
#
# Constants
#
# Produced by 'ssh-keyscan github.com'
GITHUB_SSH_HOST_KEY = "github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ=="
# Fingerprint from https://help.github.com/articles/what-are-github-s-ssh-key-fingerprints/
GITHUB_SSH_HOST_KEY_FINGERPRINT = "16:27:ac:a5:76:28:2d:36:63:1b:56:4d:eb:df:a6:48"
GITHUB_SSH_HOST_KEY_FINGERPRINT_SHA256 = "SHA256:nThbg6kXUpJWGl7E1IGOCspRomTxdCARLviKw6E5SY8"
# Known SSH host keys to be added to ~/.ssh/known_hosts if needed
KNOWN_HOSTS = {
"github.com": GITHUB_SSH_HOST_KEY,
}
#
# Command-line parsing
#
def asbool(v):
"""Convert value to boolean."""
if isinstance(v, basestring):
return v.lower() in ('yes', 'true', 'on', '1')
else:
return bool(v)
def aslist(v):
"""Convert value to list."""
if isinstance(v, basestring):
return v.split()
else:
return list(v)
#
# System management helpers
#
def assert_shell_safe(*args, **kw):
"""Check that each argument can be passed to shell safely.
This is ultra-paranoid mode: only a small set of whitelisted characters are
allowed. No spaces, no leading dashes, no glob wildcards, no quotes, no
backticks, no dollar signs, no history expansion, no brace expansion.
Tilde expansion is allowed.
It might be too strict. Therefore you can supply a keyword-only argument
``extra_allow`` that lists additional characters to be allowed.
"""
extra_allow = kw.pop('extra_allow', '')
if kw:
raise TypeError('unexpected keyword arguments: {}'
.format(', '.join(sorted(kw))))
allowed_chars = set(string.letters + string.digits + '/-._~')
allowed_chars.update(extra_allow)
for arg in args:
if not set(arg) <= allowed_chars or arg.startswith('-'):
raise ValueError('{} is not safe for shell'.format(arg))
def ensure_apt_not_outdated():
"""Make sure apt-get update was run within the last day."""
if not run("find /var/lib/apt/lists -maxdepth 0 -mtime -1", quiet=True):
sudo("apt-get update -qq")
def package_available(package):
"""See if a package is available for installation."""
assert_shell_safe(package)
with quiet():
output = run('apt-cache madison {}'.format(package))
# The terrible: apt-cache always returns status code 0 and never prints
# to stderr, no matter if the package is or isn't available. The error
# message is translated. If the package is available, the output is
# a multi-line list with |-separated columns that contain package
# names, versions, and the repository info.
return '|' in output
def package_installed(package):
"""Check if the specified packages is installed."""
assert_shell_safe(package)
# XXX: doing this in a loop is slow :(
with quiet():
# XXX idea: return exists('/var/lib/dpkg/info/{}.list'.format(package))
# caveats: libnss-myhostname:amd64.list :/
status = run("dpkg-query -W --showformat='${Status}' %s" % package)
return status == "install ok installed"
def install_packages(*packages, **kw):
"""Install system packages.
You can use any of these styles::
install_packages('foo bar')
install_packages('foo', 'bar')
install_packages(['foo', 'bar'])
Keyword arguments:
- ``missing_only`` (default: False) -- apt-get install only the missing
packages. This can be slower than just letting apt figure it out.
- ``interactive`` (default: False) -- allow interactive prompts during
package installation.
- ``changelog`` (default: False) -- record installed packages in
/root/Changelog
"""
missing_only = kw.pop('missing_only', False)
interactive = kw.pop('interactive', False)
changelog = kw.pop('changelog', False)
if kw:
raise TypeError('unexpected keyword arguments: {}'
.format(', '.join(sorted(kw))))
if len(packages) == 1 and not isinstance(packages[0], str):
# handle lists and tuples
packages = packages[0]
packages = " ".join(packages).split()
if missing_only:
packages = [p for p in packages if not package_installed(p)]
if not packages:
return
ensure_apt_not_outdated()
for package in packages:
assert_shell_safe(package)
command = "apt-get install -qq -y %s" % " ".join(packages)
if not interactive:
command = "DEBIAN_FRONTEND=noninteractive " + command
if changelog:
changelog_append("apt-get install %s" % " ".join(packages))
sudo(command)
def install_missing_packages(*packages, **kw):
"""Install missing system packages.
Alias for install_packages(*packages, missing_only=True, **kw).
"""
kw.setdefault('missing_only', True)
install_packages(*packages, **kw)
def ssh_key_fingerprint(host_key, force_md5=False):
"""Compute the fingerprint of a public key.
Can return a SHA256 or an MD5 fignerprint, depending on your OpenSSH
version. You can insist on MD5 if you want.
"""
if not host_key.startswith('ssh-'):
host_key = host_key.split(None, 1)[1]
with tempfile.NamedTemporaryFile(prefix='pov-fabric-') as f:
f.write(host_key)
f.flush()
output = subprocess.check_output(['ssh-keygen', '-l', '-f', f.name])
# Example output (old ssh):
# "2048 16:27:ac:a5:76:28:2d:36:63:1b:56:4d:eb:df:a6:48 /tmp/github_rsa.pub (RSA)\n"
# Example output (new ssh):
# "2048 SHA256:nThbg6kXUpJWGl7E1IGOCspRomTxdCARLviKw6E5SY8 no comment (RSA)\n"
# Example output (new ssh with -E md5):
# "2048 MD5:16:27:ac:a5:76:28:2d:36:63:1b:56:4d:eb:df:a6:48 no comment (RSA)\n"
fingerprint = output.split()[1]
if fingerprint.startswith('SHA256') and force_md5:
# we want MD5 still, for backwards-compat, but we should stop doing
# that eventually
output = subprocess.check_output(['ssh-keygen', '-l', '-f', f.name,
'-E', 'md5'])
fingerprint = output.split()[1].replace('MD5:', '')
return fingerprint
def register_host_key(host_key, fingerprint=None, fingerprints=None, force_md5=False):
"""Register a known host key.
This will be used by git_clone() and such to add the host key automatically
if you're cloning from the host.
"""
if fingerprint is not None:
if fingerprints is not None:
raise ValueError('Please provide either fingerprint or fingerprints, but not both')
fingerprints = [fingerprint]
force_md5 = True
hostname = host_key.split()[0]
if hostname in KNOWN_HOSTS and KNOWN_HOSTS[hostname] != host_key:
abort("There's a different host key already registered for {}".format(hostname))
if fingerprint:
if ssh_key_fingerprint(host_key, force_md5=force_md5) not in fingerprints:
abort("SSH host key doesn't match fingerprint")
KNOWN_HOSTS[hostname] = host_key
def ensure_known_host(host_key, known_hosts='/root/.ssh/known_hosts'):
"""Make sure a host key exists in the known_hosts file.
This is idempotent: running it again won't add the same key again.
"""
assert_shell_safe(known_hosts)
if not exists(known_hosts, use_sudo=True):
ensure_directory(posixpath.dirname(known_hosts), mode=0o700)
sudo('touch %s' % known_hosts)
# Must use shell=True to work around Fabric bug, where it would fall
# flat in contains() with an error ("sudo: export: command not
# found") that is silently suppressed, resulting in always appending
# the ssh key to /root/.ssh/known_hosts. Probably because I use
# `with shell_env(LC_ALL='C.UTF-8'):`.
append(known_hosts, host_key, use_sudo=True, shell=True)
def ensure_user(user, shell=None, home=None, changelog=False, create_home=True):
"""Create a system user if it doesn't exist already.
This is idempotent: running it again won't add the same user again.
"""
assert_shell_safe(user, shell or '', home or '')
with quiet():
if run("id {user}".format(user=user)).succeeded:
# XXX: check if shell matches what we asked, and run chsh if not?
return
doit = run_and_changelog if changelog else sudo
with settings(sudo_user="root"):
command = ['adduser --quiet --system --group --disabled-password']
if shell:
command.append('--shell={}'.format(shell))
if home:
command.append('--home={}'.format(home))
if not create_home:
command.append('--no-create-home')
command.append(user)
doit(" ".join(command))
def ensure_locales(*languages):
"""Make sure locales are generated.
Example::
ensure_locales('en', 'lt')
"""
assert_shell_safe(*languages)
supported_locales = run("locale -a", quiet=True).splitlines()
# as a shortcut we'll assume that if one xx_... locale is supported
# then all of them are supported
supported_languages = set(locale.partition('.')[0].partition('_')[0]
for locale in supported_locales)
for language in languages:
if language not in supported_languages:
sudo("locale-gen {language}".format(language=language))
def ensure_directory(pathname, mode=None):
"""Make sure directory exists.
Returns True if it had to create the directory, False if the directory
already existed.
"""
if isinstance(mode, int):
mode = '{:o}'.format(mode)
assert_shell_safe(pathname, mode or '')
if not exists(pathname, use_sudo=True):
command = ['install -d']
if mode:
command.append('-m{}'.format(mode))
command.append(pathname)
sudo(' '.join(command))
return True
else:
return False
def upload_file(local_file, remote_path, mode=0o644, owner="root:root",
temp_dir="", changelog=False):
"""Upload a file to a remote host.
``local_file`` can be a filename or a seekable file-like object. Globbing
is not supported.
``remote_file`` should be a full filename, not just the directory.
``mode`` can be an integer (e.g. 0o755).
``changelog``, if True, adds a changelog message of the form "uploaded
{filename}".
Bug: doesn't handle ``with cd(...):`` or ``with lcd(...):``. Probably.
Bug: doesn't set mode/ownership if the file exists and has the same content
but different mode/ownership.
Warning: is not suitable for uploading secrets (changes the mode after
uploading the file), unless you take care to specify ``temp_dir`` to point
to a non-world-readable area.
Undocumented features that are subject to change without notice:
``mode`` can be a string or None; ``owner`` can be None.
"""
if isinstance(mode, int):
mode = '{:o}'.format(mode)
assert_shell_safe(remote_path, mode or '', temp_dir)
assert_shell_safe(owner or '', extra_allow=':')
local_is_path = not callable(getattr(local_file, 'read', None))
if isinstance(local_file, StringIO) and not getattr(local_file, 'name', None):
local_file.name = os.path.basename(remote_path)
with closing(SFTP(env.host_string)) as ftp:
if env.get('cwd'):
home = ftp.normalize('.')
temp_dir = posixpath.join(home, temp_dir)
tmp_path = posixpath.join(
temp_dir, hashlib.sha1(env.host_string + remote_path).hexdigest())
assert_shell_safe(tmp_path)
ftp.put(local_file, tmp_path, use_sudo=False, mirror_local_mode=False,
mode=None, local_is_path=local_is_path, temp_dir="")
with quiet():
same = sudo("test -f {realfile} && cmp -s {tempfile} {realfile}".format(
tempfile=tmp_path, realfile=remote_path)).succeeded
if same:
sudo("rm {tempfile}".format(tempfile=tmp_path))
return False
else:
if mode is not None:
sudo('chmod {mode} {tempfile}'.format(mode=mode, tempfile=tmp_path))
if owner:
sudo('chown {owner} {tempfile}'.format(owner=owner, tempfile=tmp_path))
sudo("mv {tempfile} {realfile}".format(tempfile=tmp_path,
realfile=remote_path))
if changelog:
changelog_append("# updated {}".format(remote_path))
return True
def render_jinja2(template_filename, context, template_dir=None):
"""Render a Jinja2 template.
Based on fabric.contrib.files.upload_template.
Differences: adds back the trailing newline that Jinja2 eats for some
reason.
"""
from jinja2 import Environment, FileSystemLoader
template_dir = template_dir or os.getcwd()
template_dir = apply_lcwd(template_dir, env)
jenv = Environment(loader=FileSystemLoader(template_dir))
text = jenv.get_template(template_filename).render(**context or {})
return text.encode('UTF-8') + '\n'
def render_sinterp(filename, context=None, template_dir=None):
"""Render a Python 2 string template.
Based on fabric.contrib.files.upload_template.
"""
if template_dir:
filename = os.path.join(template_dir, filename)
filename = apply_lcwd(filename, env)
with open(os.path.expanduser(filename)) as inputfile:
text = inputfile.read()
if context:
text = text % context
return text
def generate_file(template, filename, context=None, use_jinja=False,
mode=0o644, owner="root:root", changelog_append=True):
"""Generate a file from a template
Generates ``filename`` on the remote server using ``template`` as a source.
The syntax depends on ``use_jinja``: either Jinja2 (if True) or Python's
builtin string formatting (of the older, ``%(name)s`` variety).
``context`` should be a dict containing variables for interpolation.
Changes the file ownership and mode.
Creates the parent directory automatically if it doesn't exist (owned by
root, mode 0755).
If ``changelog_append`` is True, calls changelog_append() to note that
``filename`` was generated.
Returns True if it had to replace the file, False if the file already
existed with the right content.
"""
assert_shell_safe(filename)
ensure_directory(posixpath.dirname(filename))
if use_jinja:
text = render_jinja2(template, context)
else:
text = render_sinterp(template, context)
if upload_file(StringIO(text), filename, mode=mode, owner=owner):
changelog('# generated {filename}'.format(filename=filename),
append=changelog_append)
return True
else:
return False
def download_file(filename, url):
"""Download a file from a given URL."""
assert_shell_safe(filename)
assert_shell_safe(url, extra_allow=':')
run_and_changelog('wget {url} -O {filename}'.format(url=url, filename=filename))
#
# Git
#
def parse_git_repo(git_repo):
"""Parse a git repository URL.
git-clone(1) lists these as examples of supported URLs:
- ssh://[user@]host.xz[:port]/path/to/repo.git/
- git://host.xz[:port]/path/to/repo.git/
- http[s]://host.xz[:port]/path/to/repo.git/
- ftp[s]://host.xz[:port]/path/to/repo.git/
- rsync://host.xz/path/to/repo.git/
- [user@]host.xz:path/to/repo.git/
- ssh://[user@]host.xz[:port]/~[user]/path/to/repo.git/
- git://host.xz[:port]/~[user]/path/to/repo.git/
- [user@]host.xz:/~[user]/path/to/repo.git/
- /path/to/repo.git/
- file:///path/to/repo.git/
This function doesn't support the <transport>::<address> syntax, and it
doesn't understand insteadOf shortcuts from ~/.gitconfig.
"""
if '://' in git_repo:
return urlparse.urlparse(git_repo)
if ':' in git_repo:
netloc, colon, path = git_repo.partition(':')
return urlparse.ParseResult('ssh', netloc, path, '', '', '')
else:
return urlparse.ParseResult('file', '', git_repo, '', '', '')
@with_settings(sudo_user='root')
def git_clone(git_repo, work_dir, branch='master', force=False,
changelog=False):
"""Clone a specified branch of the git repository into work_dir.
If work_dir exists and force is False (default), aborts.
If work_dir exists and force is True, performs a 'git fetch' followed by
'git reset --hard origin/{branch}'.
Takes care to allow SSH agent forwarding to be used for authentication,
if you use SSH.
Takes care to add the SSH host key to /root/.ssh/known_hosts, if you're
cloning from a host in KNOWN_HOSTS.
Returns the commit hash of the version cloned.
"""
assert_shell_safe(git_repo, extra_allow='@:')
assert_shell_safe(work_dir, branch)
env = {}
url = parse_git_repo(git_repo)
if url.scheme == 'ssh':
host_key = KNOWN_HOSTS.get(url.hostname)
if host_key:
ensure_known_host(host_key)
# sudo removes SSH_AUTH_SOCK from the environment, so we can't make use
# of the ssh agent forwarding unless we cunningly preserve the envvar
# and sudo to root (because only root and the original user will be
# able to access the socket)
env['SSH_AUTH_SOCK'] = run("echo $SSH_AUTH_SOCK", quiet=True)
if exists(posixpath.join(work_dir, '.git')):
return git_update(work_dir, branch=branch, force=force,
changelog=changelog, verify_remote_url=git_repo)
doit = run_and_changelog if changelog else sudo
with settings(shell_env=env):
doit("git clone -b {branch} {git_repo} {work_dir}".format(
branch=branch,
git_repo=git_repo,
work_dir=work_dir))
with cd(work_dir):
got_commit = sudo("git describe --always --dirty", quiet=True).strip()
if changelog:
changelog_append('# got commit {sha}'.format(sha=got_commit))
return got_commit
@with_settings(sudo_user='root')
def git_update(work_dir, branch='master', force=False, changelog=False,
verify_remote_url=None):
"""Update a specified git checkout.
Aborts if the checkout cannot be fast-forwarded to the specified branch,
unless force is specified.
Discards all local changes (committed or not) if force is True, so use with
care!
Returns the commit hash of the version fetched.
"""
assert_shell_safe(work_dir, branch)
env = {}
with cd(work_dir):
with quiet():
tracking_branch = run("git rev-parse --symbolic-full-name 'HEAD@{u}'")
if not tracking_branch.startswith("refs/remotes/origin/"):
abort("{} is not tracking a branch from remote 'origin'".format(work_dir))
tracking_branch = tracking_branch[len("refs/remotes/origin/"):]
if force and tracking_branch != branch:
changelog_append('cd {work_dir} && git checkout {branch}'.format(
work_dir=work_dir, branch=branch))
sudo("git checkout {branch}".format(branch=branch))
with quiet():
tracking_branch = run("git rev-parse --symbolic-full-name 'HEAD@{u}'")
if not tracking_branch.startswith("refs/remotes/origin/"):
abort("{} is not tracking a branch from remote 'origin'".format(work_dir))
tracking_branch = tracking_branch[len("refs/remotes/origin/"):]
if tracking_branch != branch:
abort("{} is not tracking branch {} (it's tracking {})".format(
work_dir, branch, tracking_branch))
git_repo = run("git config --get remote.origin.url", quiet=True)
if verify_remote_url and git_repo != verify_remote_url:
abort("{} is not tracking the right remote {} (it's tracking {})".format(
work_dir, verify_remote_url, git_repo))
url = parse_git_repo(git_repo)
if url.scheme == 'ssh':
host_key = KNOWN_HOSTS.get(url.hostname)
if host_key:
ensure_known_host(host_key)
# sudo removes SSH_AUTH_SOCK from the environment, so we can't make use
# of the ssh agent forwarding unless we cunningly preserve the envvar
# and sudo to root (because only root and the original user will be
# able to access the socket)
env['SSH_AUTH_SOCK'] = run("echo $SSH_AUTH_SOCK", quiet=True)
with cd(work_dir):
with settings(shell_env=env):
sudo("git fetch")
old_commit = sudo("git describe --always --dirty", quiet=True).strip()
if force:
changelog_append('cd {work_dir}\n git fetch && git reset --hard origin/{branch}'.format(
work_dir=work_dir, branch=branch))
sudo("git reset --hard origin/{branch}".format(branch=branch))
else:
changelog_append('cd {work_dir}\n git pull --ff-only'.format(work_dir=work_dir))
sudo("git merge --ff-only origin/{branch}".format(branch=branch))
got_commit = sudo("git describe --always --dirty", quiet=True).strip()
if changelog:
if old_commit == got_commit:
changelog_append(' # no changes')
else:
changelog_append(' # update {oldsha}..{sha}'.format(oldsha=old_commit, sha=got_commit))
return got_commit
#
# PostgreSQL helper
#
def postgresql_user_exists(user):
"""Check if a postgresql user already exists."""
assert_shell_safe(user)
out = sudo("psql -tAc \"SELECT 1 FROM pg_roles WHERE rolname = '%s'\"" % user,
user='postgres', quiet=True)
return bool(out)
def ensure_postgresql_user(user):
"""Create a PostgreSQL user if it doesn't exist already.
This is idempotent: running it again won't add the same user again.
"""
assert_shell_safe(user)
if not postgresql_user_exists(user):
sudo("LC_ALL=C.UTF-8 createuser -DRS %s" % user, user='postgres')
def postgresql_db_exists(dbname):
"""Check if a PostgreSQL database already exists."""
assert_shell_safe(dbname)
out = sudo("psql -tAc \"SELECT 1 FROM pg_database WHERE datname = '%s'\"" % dbname,
user='postgres', quiet=True)
return bool(out)
def ensure_postgresql_db(dbname, owner):
"""Create a PostgreSQL database if it doesn't exist already.
This is idempotent: running it again won't create the database again.
"""
assert_shell_safe(dbname)
if not postgresql_db_exists(dbname):
sudo("LC_ALL=C.UTF-8 createdb -E utf-8 -l en_US.UTF-8 -T template0 -O %s %s" % (owner, dbname),
user='postgres')
#
# Apache
#
def install_apache_website(apache_conf_template, domain, context=None,
use_jinja=False, modules=[], reload_apache=True):
"""Upload Apache config for a website and enable it.
Takes care of
- generating an apache config file template from ``apache_conf_template``
- uploading it to /etc/apache2/sites-available/{domain}.conf
- file permissions and ownership (0644, root:root)
- creating a directory for logs (/var/log/apache2/{domain})
- enabling the website with a2ensite
- reloading apache
Caveats:
- assumes the Apache template configures logs in /var/log/apache2/{domain}
- assumes any other files (such as SSL certificates and keys) required for
the Apache config to work are already uploaded
"""
modules = aslist(modules)
changed = generate_file(apache_conf_template,
'/etc/apache2/sites-available/{}.conf'.format(domain),
context=context, use_jinja=use_jinja)
ensure_directory('/var/log/apache2/{}'.format(domain))
modules = [m for m in modules
if not exists('/etc/apache2/mods-enabled/{}.load'.format(m))]
if modules:
run_and_changelog("a2enmod {}".format(' '.join(modules)))
changed = True
if not exists('/etc/apache2/sites-enabled/{}.conf'.format(domain)):
run_and_changelog("a2ensite {}.conf".format(domain))
changed = True
if reload_apache and changed:
run_and_changelog("service apache2 reload")
#
# OpenSSL
#
STARTSSL_INTERMEDIATE_URL = 'https://www.startssl.com/certs/sub.class1.server.ca.pem'
# A test on 2015-04-29 shows that STARTSSL_INTERMEDIATE_URL gives you the same
# certificate as STARTSSL_INTERMEDIATE_SHA2_URL.
STARTSSL_INTERMEDIATE_SHA1_URL = 'https://www.startssl.com/certs/class1/sha1/pem/sub.class1.server.sha1.ca.pem'
STARTSSL_INTERMEDIATE_SHA2_URL = 'https://www.startssl.com/certs/class1/sha2/pem/sub.class1.server.sha2.ca.pem'
def ensure_ssl_key(ssl_key, ssl_csr, ssl_conf, ssl_cert, ssl_intermediate_cert,
ssl_intermediate_cert_url, ssl_options):
"""Make sure an SSL certificate exists.
- ``ssl_key``: filename of the private key (will be generated if missing)
- ``ssl_csr``: filename of the certificate signing request (will be
generated if needed)
- ``ssl_conf``: filename of the ssl configuration file (will be generated
using ``ssl_options`` if needed and missing)
- ``ssl_cert``: filename of the SSL certificate (alas this one cannot be
generated automatically)
- ``ssl_intermediate_cert``: filename of the SSL intermediate certificate
(will be downloaded if missing)
- ``ssl_intermediate_cert_url``: URL for downloading the intermediate
certificate (e.g. STARTSSL_INTERMEDIATE_URL)
- ``ssl_options``: a dictionary defining SSL certificate signing request
generation options, specifically, the arguments to be passed to
``generate_ssl_config()`` -- country, state, locality, organization,
organizational_unit, common_name, and email.
"""
if not exists(ssl_key, use_sudo=True):
if not exists(ssl_conf):
generate_ssl_config(ssl_conf, **ssl_options)
generate_ssl_key(ssl_key, ssl_csr, ssl_conf)
if not exists(ssl_csr):
if not exists(ssl_conf):
generate_ssl_config(ssl_conf, **ssl_options)
generate_ssl_csr(ssl_key, ssl_csr, ssl_conf)
if not exists(ssl_intermediate_cert):
download_file(ssl_intermediate_cert, ssl_intermediate_cert_url)
if not exists(ssl_cert):
changelog_append('# aborting: {ssl_cert} is missing'.format(ssl_cert=ssl_cert))
with quiet():
csr = run('cat {ssl_csr}'.format(ssl_csr=ssl_csr))
abort("{ssl_cert} is missing, please generate it using {ssl_csr}:\n\n{csr}".format(
ssl_cert=ssl_cert, ssl_csr=ssl_csr, csr=csr))
def generate_ssl_config(conffile, country, state, locality, organization,
organizational_unit, common_name, email):
"""Generate a config file for SSL certificates.
Example::
generate_ssl_config('/etc/pov/sslreq.conf',
country='LT', state='.', locality='Vilnius',
organization='POV', organizational_unit='.',
common_name='www.example.com',
email='[email protected]')
"""
assert_shell_safe(conffile)
config = textwrap.dedent("""\
[ req ]
default_bits = 2048
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
prompt = no
[ req_distinguished_name ]
countryName = {country}
stateOrProvinceName = {state}
localityName = {locality}
organizationName = {organization}
organizationalUnitName = {organizational_unit}
commonName = {common_name}
emailAddress = {email}
""").format(country=country, state=state, locality=locality,
organization=organization,
organizational_unit=organizational_unit,
common_name=common_name, email=email)
if upload_file(StringIO(config), conffile):
changelog_append("# generated {conffile}".format(conffile=conffile))
def generate_ssl_key(keyfile, csrfile, conffile):
"""Generate a new private SSL key and certificate signing request.
Uses modern defaults for 2015: 2048-bit RSA, SHA-256 signature.
"""
assert_shell_safe(keyfile, csrfile, conffile)
changelog_append("# generated {keyfile} and {csrfile}".format(
keyfile=keyfile, csrfile=csrfile))
sudo("openssl req -config {conffile} -newkey rsa:2048 -nodes"
" -keyout {keyfile} -sha256 -out {csrfile}".format(
keyfile=keyfile, csrfile=csrfile, conffile=conffile))
def generate_ssl_csr(keyfile, csrfile, conffile):
"""Generate a new certificate signing request for a given SSL private key.
Uses modern defaults for 2015: SHA-256 signature.
"""
assert_shell_safe(keyfile, csrfile, conffile)
changelog_append("# generated {csrfile}".format(csrfile=csrfile))
sudo("openssl req -config {conffile} -new -key {keyfile} -sha256"
" -out {csrfile}".format(
keyfile=keyfile, csrfile=csrfile, conffile=conffile))
#
# Postfix
#
def install_postfix_virtual_table(local, remote, changelog_append=True):
"""Upload a Postfix virtual table and install it.
Takes care of
- uploading the local file to remote
- file permissions and ownership (0644, root:root)
- running postmap
- adding the table to /etc/postfix/main.cf virtual_maps
- making sure that postfix accepts outside connections
(inet_interfaces != loopback-only)
- changelog updates for all of the above
If ``changelog_append`` is False creates a new timestamped header.
If it's True, appends to the current message.
"""
assert_shell_safe(remote)
if upload_file(local, remote):
changelog('# updated {remote}'.format(remote=remote),
append=changelog_append)
run_and_changelog("postmap {remote}".format(remote=remote))
# consider running postmap if the file exists and hasn't changed but the
# corresponding .map file is missing or outdated
add_postfix_virtual_map('hash:' + remote)
make_postfix_public()
def get_postfix_setting(setting):
"""Get the current value of a postfix setting"""
assert_shell_safe(setting)
with quiet():
current_setting = run("postconf -h {setting}".format(setting=setting))
if current_setting.startswith('postconf: warning:'):
# assume "postconf: warning: {setting}: unknown parameter"
current_setting = ''
return current_setting
def parse_postfix_setting(current_setting):
"""Parse a comma-separated postfix setting.
Returns a list of (non-empty) strings.
"""
return filter(None, map(str.strip, current_setting.split(',')))
def add_postfix_virtual_map(entry):
"""Add an entry to postfix's virtual_maps.
Takes care to
- preserve preexisting virtual maps
- reload postfix's configurationa after changing it
- document all the changes in the changelog
Idempotent: does nothing if entry is already included in virtual_maps.
"""
assert_shell_safe(entry, extra_allow=':')
current_setting = get_postfix_setting('virtual_alias_maps')
if current_setting != '$virtual_maps':
# TBH maybe we should ignore the legacy $virtual_maps and instead
# just use $virtual_alias_maps?
abort("Unexpected virtual_alias_maps setting ({})".format(current_setting))
add_postfix_setting('virtual_maps', entry)
def add_postfix_setting(setting, entry, reload_postfix=True):
"""Add an entry to a comma-separated postfix setting.
Takes care to
- preserve preexisting values
- reload postfix's configuration after changing it
- document all the changes in the changelog
Idempotent: does nothing if entry is already included in the setting.
Returns True if the setting was modified, False if it was untouched.
"""
assert_shell_safe(setting)
assert_shell_safe(entry, extra_allow=':')
old_value = get_postfix_setting(setting)
items = parse_postfix_setting(old_value)
if entry in items:
return False
else:
items.append(entry)
new_value = ', '.join(items)
if "'" in new_value:
abort("Cannot handle apostrophes in {setting} setting ({old_value}),"
" not touching anything!".format(setting=setting,
old_value=old_value))
changelog_append('# adding {entry} to {setting} in /etc/postfix/main.cf'.format(
entry=entry, setting=setting))
res = run_and_changelog("postconf {setting}='{new_value}'".format(
setting=setting, new_value=new_value))
if res.startswith("postconf: warning:"):
# Uhh on Ubuntu 10.04 postconf can't handle non-standard variables at all
changelog_append(" | %s" % res.rstrip())
abort("Your version of postconf ignores unknown settings; you'll have to edit /etc/postfix/main.cf and reload postfix manually.")
if reload_postfix:
run_and_changelog("postfix reload")
return True
def make_postfix_public():
"""Make sure postfix accepts connections from outside.
Takes care to
- restart postfix if necessary
- document all the changes in the changelog
"""
with quiet():
current_setting = run("postconf -h inet_interfaces")
if current_setting == 'loopback-only':
run_and_changelog("postconf inet_interfaces=all")
run_and_changelog("service postfix restart")
#
# pov-admin-tools
#
def has_new_changelog_message():
"""Check if new-changelog-entry is installed.
(You can get it by installing pov-admin-tools.)
"""
return (exists('/usr/sbin/new-changelog-entry') or
exists('/usr/local/sbin/new-changelog-entry'))
def changelog(message, context=None, append=False, optional=True):
"""Append a message to /root/Changelog, with a timestamped header.
Depends on pov-admin-tools. If it's not installed, skips the
message (unless you say optional=False, in which case it aborts
with an error).
By default the message gets a timestamped header. Use append=True
to append to an existing message instead of starting a new one.
If context is given, message will be formatted using given context
(``message = message.format(**context)``).
"""
# NB: no assert_shell_safe(): quote() ought to take care of everything.
if not optional or has_new_changelog_message():
cmd = 'new-changelog-entry'
if append:
cmd += ' -a'
if context is not None:
message = message.format(**context)
cmd += ' ' + quote(message)
run_as_root(cmd)
def changelog_append(message, context=None, optional=True):
"""Append a message to /root/Changelog.
Shortcut for changelog(message, append=True).
"""
changelog(message, context, append=True, optional=optional)
def changelog_banner(message, context=None, optional=True):
"""Append a banner message to /root/Changelog."""
changelog("#\n # %s\n #" % message, context, optional=optional)
def run_and_changelog(command, append=True):
"""Run a command and also append it to /root/Changelog"""
changelog(command, append=append)
return run_as_root(command)
def run_as_root(command):
"""Run a command as root; use sudo only if necessary."""
current_user = env.host_string.rpartition('@')[0] or env.user
if current_user != 'root':
return sudo(command, user='root')
else:
return run(command)
#
# Instance management
#
class Instance(dict):
"""Service instance configuration.
Subclass to add more parameters, e.g. ::
from pov_fabric import Instance as BaseInstance
class Instance(BaseInstance):
def __init__(self, name, host, home='/opt/project'):
super(Instance, self).Instance.__init__(name, host)
self.home = home
Or use the ``with_params()`` classmethod.
"""
def __init__(self, name, host, **kwargs):
# This trick lets us access dict keys as if they were object attributes
# and vice versa.
self.__dict__ = self
self.name = name
self.host = host
self.__dict__.update(kwargs)
def _asdict(self):
"""(DEPRECATED) Return the instance parameters as a dict.
Useful for string formatting, e.g. ::
print('{name} is on {host}'.format(**instance._asdict()))
but since now you can do ::
print('{name} is on {host}'.format(**instance))
this method is pointless and is retained for backwards compatibility
only.
Mimics the API of ``collections.namedtuple``.
"""
return self
REQUIRED = object()
@classmethod
def with_params(cls, **params):
"""Define an instance subclass
Usage example::
from pov_fabric import Instance as BaseInstance
Instance = BaseInstance.with_params(
required_arg1=BaseInstance.REQUIRED,
optional_arg1='default value',
optional_arg2=None)
"""
def __init__(self, name, host, **kw):
super(new_cls, self).__init__(name, host)
for k, v in params.items():
if v is cls.REQUIRED and k not in kw:
raise TypeError(
"__init__() requires a keyword argument '{}'"
.format(k))
setattr(self, k, v)
for k, v in kw.items():
if k not in params:
raise TypeError(
"__init__() got an unexpected keyword argument '{}'"
.format(k))
setattr(self, k, v)
new_cls = type('Instance', (cls, ), dict(__init__=__init__))
return new_cls
@classmethod
def define(cls, *args, **kwargs):
"""Define an instance.
Creates a new Instance object with the given constructor arguments,
registers it in env.instances and defines an instance selector task.
"""
instance = cls(*args, **kwargs)
_define_instance(instance)
_define_instance_task(instance.name, stacklevel=2)
@classmethod
def define_alias(cls, alias, name):
"""Define an alias for an instance.
Defines an instance selector task named ``alias`` that selects an
instance named ``name``.
Usage example::
Instance.define_alias('prod', 'srv1.example.com')
"""
_define_instance_task(alias, name, stacklevel=2)
def _define_instance(instance):
"""Define an instance.
Instances are stored in the ``env.instances`` dictionary, which is created
on demand.
"""
if not _valid_task_name(instance.name):
abort("'{name}' is not a valid instance name.".format(name=instance.name))
if not hasattr(env, 'instances'):
env.instances = {}
if instance.name in env.instances:
abort("Instance {name} is already defined.".format(name=instance.name))
env.instances[instance.name] = instance
def _define_instance_task(name, instance_name=None, stacklevel=1):
"""Define an instance task
This task will set env.instance to the name of the task.
"""
if not _valid_task_name(name):
abort("'{name}' is not a valid task name.".format(name=name))
if instance_name is None:
instance_name = name
def fn():
env.instance = instance_name
fn.__doc__ = """Select instance '%s' for subsequent tasks.""" % instance_name
instance_task = task(name=name)(fn)
fn_name = _pythonify_name(name)
module_globals = sys._getframe(stacklevel).f_globals
while fn_name in module_globals:
fn_name += '_'
module_globals[fn_name] = instance_task
def _valid_task_name(name):
"""Check if ``name`` is a valid Fabric task name"""
if not name:
return False
if name.startswith('-'):
return False
if ' ' in name:
return False
if ':' in name:
return False
if '.' in name:
return False
return True
def _pythonify_name(name):
"""Coerce the name to a valid Python identifier"""
name = ''.join(c if c.isalnum() else '_' for c in name)
if name[:1].isdigit():
name = '_' + name
return name
def get_instance(instance_name=None):
"""Select the instance to operate on.
Defaults to env.instance if instance_name is not specified.
Aborts with a help message if the instance is not defined.
"""
instances = sorted(getattr(env, 'instances', {}))
if not instances:
abort("There are no instances defined in env.instances.")
if not instance_name:
instance_name = getattr(env, 'instance', None)
try:
return env.instances[instance_name]
except KeyError:
abort("Please specify an instance ({known_instances}), e.g.\n\n"
" fab {instance} {command}".format(
known_instances=", ".join(instances),
instance=instances[0],
command=env.command))
|
PypiClean
|
/pulumi_google_native-0.31.2a1689827148.tar.gz/pulumi_google_native-0.31.2a1689827148/pulumi_google_native/compute/beta/get_interconnect_attachment.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetInterconnectAttachmentResult',
'AwaitableGetInterconnectAttachmentResult',
'get_interconnect_attachment',
'get_interconnect_attachment_output',
]
@pulumi.output_type
class GetInterconnectAttachmentResult:
def __init__(__self__, admin_enabled=None, bandwidth=None, candidate_ipv6_subnets=None, candidate_subnets=None, cloud_router_ip_address=None, cloud_router_ipv6_address=None, cloud_router_ipv6_interface_id=None, configuration_constraints=None, creation_timestamp=None, customer_router_ip_address=None, customer_router_ipv6_address=None, customer_router_ipv6_interface_id=None, dataplane_version=None, description=None, edge_availability_domain=None, encryption=None, google_reference_id=None, interconnect=None, ipsec_internal_addresses=None, kind=None, label_fingerprint=None, labels=None, mtu=None, name=None, operational_status=None, pairing_key=None, partner_asn=None, partner_metadata=None, private_interconnect_info=None, region=None, remote_service=None, router=None, satisfies_pzs=None, self_link=None, stack_type=None, state=None, subnet_length=None, type=None, vlan_tag8021q=None):
if admin_enabled and not isinstance(admin_enabled, bool):
raise TypeError("Expected argument 'admin_enabled' to be a bool")
pulumi.set(__self__, "admin_enabled", admin_enabled)
if bandwidth and not isinstance(bandwidth, str):
raise TypeError("Expected argument 'bandwidth' to be a str")
pulumi.set(__self__, "bandwidth", bandwidth)
if candidate_ipv6_subnets and not isinstance(candidate_ipv6_subnets, list):
raise TypeError("Expected argument 'candidate_ipv6_subnets' to be a list")
pulumi.set(__self__, "candidate_ipv6_subnets", candidate_ipv6_subnets)
if candidate_subnets and not isinstance(candidate_subnets, list):
raise TypeError("Expected argument 'candidate_subnets' to be a list")
pulumi.set(__self__, "candidate_subnets", candidate_subnets)
if cloud_router_ip_address and not isinstance(cloud_router_ip_address, str):
raise TypeError("Expected argument 'cloud_router_ip_address' to be a str")
pulumi.set(__self__, "cloud_router_ip_address", cloud_router_ip_address)
if cloud_router_ipv6_address and not isinstance(cloud_router_ipv6_address, str):
raise TypeError("Expected argument 'cloud_router_ipv6_address' to be a str")
pulumi.set(__self__, "cloud_router_ipv6_address", cloud_router_ipv6_address)
if cloud_router_ipv6_interface_id and not isinstance(cloud_router_ipv6_interface_id, str):
raise TypeError("Expected argument 'cloud_router_ipv6_interface_id' to be a str")
pulumi.set(__self__, "cloud_router_ipv6_interface_id", cloud_router_ipv6_interface_id)
if configuration_constraints and not isinstance(configuration_constraints, dict):
raise TypeError("Expected argument 'configuration_constraints' to be a dict")
pulumi.set(__self__, "configuration_constraints", configuration_constraints)
if creation_timestamp and not isinstance(creation_timestamp, str):
raise TypeError("Expected argument 'creation_timestamp' to be a str")
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if customer_router_ip_address and not isinstance(customer_router_ip_address, str):
raise TypeError("Expected argument 'customer_router_ip_address' to be a str")
pulumi.set(__self__, "customer_router_ip_address", customer_router_ip_address)
if customer_router_ipv6_address and not isinstance(customer_router_ipv6_address, str):
raise TypeError("Expected argument 'customer_router_ipv6_address' to be a str")
pulumi.set(__self__, "customer_router_ipv6_address", customer_router_ipv6_address)
if customer_router_ipv6_interface_id and not isinstance(customer_router_ipv6_interface_id, str):
raise TypeError("Expected argument 'customer_router_ipv6_interface_id' to be a str")
pulumi.set(__self__, "customer_router_ipv6_interface_id", customer_router_ipv6_interface_id)
if dataplane_version and not isinstance(dataplane_version, int):
raise TypeError("Expected argument 'dataplane_version' to be a int")
pulumi.set(__self__, "dataplane_version", dataplane_version)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if edge_availability_domain and not isinstance(edge_availability_domain, str):
raise TypeError("Expected argument 'edge_availability_domain' to be a str")
pulumi.set(__self__, "edge_availability_domain", edge_availability_domain)
if encryption and not isinstance(encryption, str):
raise TypeError("Expected argument 'encryption' to be a str")
pulumi.set(__self__, "encryption", encryption)
if google_reference_id and not isinstance(google_reference_id, str):
raise TypeError("Expected argument 'google_reference_id' to be a str")
pulumi.set(__self__, "google_reference_id", google_reference_id)
if interconnect and not isinstance(interconnect, str):
raise TypeError("Expected argument 'interconnect' to be a str")
pulumi.set(__self__, "interconnect", interconnect)
if ipsec_internal_addresses and not isinstance(ipsec_internal_addresses, list):
raise TypeError("Expected argument 'ipsec_internal_addresses' to be a list")
pulumi.set(__self__, "ipsec_internal_addresses", ipsec_internal_addresses)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if label_fingerprint and not isinstance(label_fingerprint, str):
raise TypeError("Expected argument 'label_fingerprint' to be a str")
pulumi.set(__self__, "label_fingerprint", label_fingerprint)
if labels and not isinstance(labels, dict):
raise TypeError("Expected argument 'labels' to be a dict")
pulumi.set(__self__, "labels", labels)
if mtu and not isinstance(mtu, int):
raise TypeError("Expected argument 'mtu' to be a int")
pulumi.set(__self__, "mtu", mtu)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if operational_status and not isinstance(operational_status, str):
raise TypeError("Expected argument 'operational_status' to be a str")
pulumi.set(__self__, "operational_status", operational_status)
if pairing_key and not isinstance(pairing_key, str):
raise TypeError("Expected argument 'pairing_key' to be a str")
pulumi.set(__self__, "pairing_key", pairing_key)
if partner_asn and not isinstance(partner_asn, str):
raise TypeError("Expected argument 'partner_asn' to be a str")
pulumi.set(__self__, "partner_asn", partner_asn)
if partner_metadata and not isinstance(partner_metadata, dict):
raise TypeError("Expected argument 'partner_metadata' to be a dict")
pulumi.set(__self__, "partner_metadata", partner_metadata)
if private_interconnect_info and not isinstance(private_interconnect_info, dict):
raise TypeError("Expected argument 'private_interconnect_info' to be a dict")
pulumi.set(__self__, "private_interconnect_info", private_interconnect_info)
if region and not isinstance(region, str):
raise TypeError("Expected argument 'region' to be a str")
pulumi.set(__self__, "region", region)
if remote_service and not isinstance(remote_service, str):
raise TypeError("Expected argument 'remote_service' to be a str")
pulumi.set(__self__, "remote_service", remote_service)
if router and not isinstance(router, str):
raise TypeError("Expected argument 'router' to be a str")
pulumi.set(__self__, "router", router)
if satisfies_pzs and not isinstance(satisfies_pzs, bool):
raise TypeError("Expected argument 'satisfies_pzs' to be a bool")
pulumi.set(__self__, "satisfies_pzs", satisfies_pzs)
if self_link and not isinstance(self_link, str):
raise TypeError("Expected argument 'self_link' to be a str")
pulumi.set(__self__, "self_link", self_link)
if stack_type and not isinstance(stack_type, str):
raise TypeError("Expected argument 'stack_type' to be a str")
pulumi.set(__self__, "stack_type", stack_type)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if subnet_length and not isinstance(subnet_length, int):
raise TypeError("Expected argument 'subnet_length' to be a int")
pulumi.set(__self__, "subnet_length", subnet_length)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if vlan_tag8021q and not isinstance(vlan_tag8021q, int):
raise TypeError("Expected argument 'vlan_tag8021q' to be a int")
pulumi.set(__self__, "vlan_tag8021q", vlan_tag8021q)
@property
@pulumi.getter(name="adminEnabled")
def admin_enabled(self) -> bool:
"""
Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
"""
return pulumi.get(self, "admin_enabled")
@property
@pulumi.getter
def bandwidth(self) -> str:
"""
Provisioned bandwidth capacity for the interconnect attachment. For attachments of type DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner that is operating the interconnect must set the bandwidth. Output only for PARTNER type, mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: - BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s - BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G: 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
"""
return pulumi.get(self, "bandwidth")
@property
@pulumi.getter(name="candidateIpv6Subnets")
def candidate_ipv6_subnets(self) -> Sequence[str]:
"""
This field is not available.
"""
return pulumi.get(self, "candidate_ipv6_subnets")
@property
@pulumi.getter(name="candidateSubnets")
def candidate_subnets(self) -> Sequence[str]:
"""
Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress and customerRouterIpAddress for this attachment. All prefixes must be within link-local address space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to select an unused /29 from the supplied candidate prefix(es). The request will fail if all possible /29s are in use on Google's edge. If not supplied, Google will randomly select an unused /29 from all of link-local space.
"""
return pulumi.get(self, "candidate_subnets")
@property
@pulumi.getter(name="cloudRouterIpAddress")
def cloud_router_ip_address(self) -> str:
"""
IPv4 address + prefix length to be configured on Cloud Router Interface for this interconnect attachment.
"""
return pulumi.get(self, "cloud_router_ip_address")
@property
@pulumi.getter(name="cloudRouterIpv6Address")
def cloud_router_ipv6_address(self) -> str:
"""
IPv6 address + prefix length to be configured on Cloud Router Interface for this interconnect attachment.
"""
return pulumi.get(self, "cloud_router_ipv6_address")
@property
@pulumi.getter(name="cloudRouterIpv6InterfaceId")
def cloud_router_ipv6_interface_id(self) -> str:
"""
This field is not available.
"""
return pulumi.get(self, "cloud_router_ipv6_interface_id")
@property
@pulumi.getter(name="configurationConstraints")
def configuration_constraints(self) -> 'outputs.InterconnectAttachmentConfigurationConstraintsResponse':
"""
Constraints for this attachment, if any. The attachment does not work if these constraints are not met.
"""
return pulumi.get(self, "configuration_constraints")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> str:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter(name="customerRouterIpAddress")
def customer_router_ip_address(self) -> str:
"""
IPv4 address + prefix length to be configured on the customer router subinterface for this interconnect attachment.
"""
return pulumi.get(self, "customer_router_ip_address")
@property
@pulumi.getter(name="customerRouterIpv6Address")
def customer_router_ipv6_address(self) -> str:
"""
IPv6 address + prefix length to be configured on the customer router subinterface for this interconnect attachment.
"""
return pulumi.get(self, "customer_router_ipv6_address")
@property
@pulumi.getter(name="customerRouterIpv6InterfaceId")
def customer_router_ipv6_interface_id(self) -> str:
"""
This field is not available.
"""
return pulumi.get(self, "customer_router_ipv6_interface_id")
@property
@pulumi.getter(name="dataplaneVersion")
def dataplane_version(self) -> int:
"""
Dataplane version for this InterconnectAttachment. This field is only present for Dataplane version 2 and higher. Absence of this field in the API output indicates that the Dataplane is version 1.
"""
return pulumi.get(self, "dataplane_version")
@property
@pulumi.getter
def description(self) -> str:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="edgeAvailabilityDomain")
def edge_availability_domain(self) -> str:
"""
Desired availability domain for the attachment. Only available for type PARTNER, at creation time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY - AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should configure a pair of attachments, one per availability domain. The selected availability domain will be provided to the Partner via the pairing key, so that the provisioned circuit will lie in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
"""
return pulumi.get(self, "edge_availability_domain")
@property
@pulumi.getter
def encryption(self) -> str:
"""
Indicates the user-supplied encryption option of this VLAN attachment (interconnectAttachment). Can only be specified at attachment creation for PARTNER or DEDICATED attachments. Possible values are: - NONE - This is the default value, which means that the VLAN attachment carries unencrypted traffic. VMs are able to send traffic to, or receive traffic from, such a VLAN attachment. - IPSEC - The VLAN attachment carries only encrypted traffic that is encrypted by an IPsec device, such as an HA VPN gateway or third-party IPsec VPN. VMs cannot directly send traffic to, or receive traffic from, such a VLAN attachment. To use *HA VPN over Cloud Interconnect*, the VLAN attachment must be created with this option.
"""
return pulumi.get(self, "encryption")
@property
@pulumi.getter(name="googleReferenceId")
def google_reference_id(self) -> str:
"""
Google reference ID, to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
"""
warnings.warn("""[Output Only] Google reference ID, to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. [Deprecated] This field is not used.""", DeprecationWarning)
pulumi.log.warn("""google_reference_id is deprecated: [Output Only] Google reference ID, to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. [Deprecated] This field is not used.""")
return pulumi.get(self, "google_reference_id")
@property
@pulumi.getter
def interconnect(self) -> str:
"""
URL of the underlying Interconnect object that this attachment's traffic will traverse through.
"""
return pulumi.get(self, "interconnect")
@property
@pulumi.getter(name="ipsecInternalAddresses")
def ipsec_internal_addresses(self) -> Sequence[str]:
"""
A list of URLs of addresses that have been reserved for the VLAN attachment. Used only for the VLAN attachment that has the encryption option as IPSEC. The addresses must be regional internal IP address ranges. When creating an HA VPN gateway over the VLAN attachment, if the attachment is configured to use a regional internal IP address, then the VPN gateway's IP address is allocated from the IP address range specified here. For example, if the HA VPN gateway's interface 0 is paired to this VLAN attachment, then a regional internal IP address for the VPN gateway interface 0 will be allocated from the IP address specified for this VLAN attachment. If this field is not specified when creating the VLAN attachment, then later on when creating an HA VPN gateway on this VLAN attachment, the HA VPN gateway's IP address is allocated from the regional external IP address pool.
"""
return pulumi.get(self, "ipsec_internal_addresses")
@property
@pulumi.getter
def kind(self) -> str:
"""
Type of the resource. Always compute#interconnectAttachment for interconnect attachments.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> str:
"""
A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially a hash of the labels set used for optimistic locking. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels, otherwise the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
"""
return pulumi.get(self, "label_fingerprint")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
Labels for this resource. These can only be added or modified by the setLabels method. Each label key/value pair must comply with RFC1035. Label values may be empty.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def mtu(self) -> int:
"""
Maximum Transmission Unit (MTU), in bytes, of packets passing through this interconnect attachment. Only 1440 and 1500 are allowed. If not specified, the value will default to 1440.
"""
return pulumi.get(self, "mtu")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="operationalStatus")
def operational_status(self) -> str:
"""
The current status of whether or not this interconnect attachment is functional, which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete.
"""
return pulumi.get(self, "operational_status")
@property
@pulumi.getter(name="pairingKey")
def pairing_key(self) -> str:
"""
[Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The opaque identifier of an PARTNER attachment used to initiate provisioning with a selected partner. Of the form "XXXXX/region/domain"
"""
return pulumi.get(self, "pairing_key")
@property
@pulumi.getter(name="partnerAsn")
def partner_asn(self) -> str:
"""
Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available for DEDICATED.
"""
return pulumi.get(self, "partner_asn")
@property
@pulumi.getter(name="partnerMetadata")
def partner_metadata(self) -> 'outputs.InterconnectAttachmentPartnerMetadataResponse':
"""
Informational metadata about Partner attachments from Partners to display to customers. Output only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
"""
return pulumi.get(self, "partner_metadata")
@property
@pulumi.getter(name="privateInterconnectInfo")
def private_interconnect_info(self) -> 'outputs.InterconnectAttachmentPrivateInfoResponse':
"""
Information specific to an InterconnectAttachment. This property is populated if the interconnect that this is attached to is of type DEDICATED.
"""
return pulumi.get(self, "private_interconnect_info")
@property
@pulumi.getter
def region(self) -> str:
"""
URL of the region where the regional interconnect attachment resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="remoteService")
def remote_service(self) -> str:
"""
If the attachment is on a Cross-Cloud Interconnect connection, this field contains the interconnect's remote location service provider. Example values: "Amazon Web Services" "Microsoft Azure". The field is set only for attachments on Cross-Cloud Interconnect connections. Its value is copied from the InterconnectRemoteLocation remoteService field.
"""
return pulumi.get(self, "remote_service")
@property
@pulumi.getter
def router(self) -> str:
"""
URL of the Cloud Router to be used for dynamic routing. This router must be in the same region as this InterconnectAttachment. The InterconnectAttachment will automatically connect the Interconnect to the network & region within which the Cloud Router is configured.
"""
return pulumi.get(self, "router")
@property
@pulumi.getter(name="satisfiesPzs")
def satisfies_pzs(self) -> bool:
"""
Reserved for future use.
"""
return pulumi.get(self, "satisfies_pzs")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> str:
"""
Server-defined URL for the resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="stackType")
def stack_type(self) -> str:
"""
The stack type for this interconnect attachment to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used. This field can be both set at interconnect attachments creation and update interconnect attachment operations.
"""
return pulumi.get(self, "stack_type")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of this attachment's functionality. Enum values ACTIVE and UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take one of the following values: - ACTIVE: The attachment has been turned up and is ready to use. - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. - PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of provisioning after a PARTNER_PROVIDER attachment was created that references it. - PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional. This could be because the associated Interconnect was removed, or because the other side of a Partner attachment was deleted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="subnetLength")
def subnet_length(self) -> int:
"""
Length of the IPv4 subnet mask. Allowed values: - 29 (default) - 30 The default value is 29, except for Cross-Cloud Interconnect connections that use an InterconnectRemoteLocation with a constraints.subnetLengthRange.min equal to 30. For example, connections that use an Azure remote location fall into this category. In these cases, the default value is 30, and requesting 29 returns an error. Where both 29 and 30 are allowed, 29 is preferred, because it gives Google Cloud Support more debugging visibility.
"""
return pulumi.get(self, "subnet_length")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of interconnect attachment this is, which can take one of the following values: - DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner Interconnect, created by the partner.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vlanTag8021q")
def vlan_tag8021q(self) -> int:
"""
The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4093. Only specified at creation time.
"""
return pulumi.get(self, "vlan_tag8021q")
class AwaitableGetInterconnectAttachmentResult(GetInterconnectAttachmentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetInterconnectAttachmentResult(
admin_enabled=self.admin_enabled,
bandwidth=self.bandwidth,
candidate_ipv6_subnets=self.candidate_ipv6_subnets,
candidate_subnets=self.candidate_subnets,
cloud_router_ip_address=self.cloud_router_ip_address,
cloud_router_ipv6_address=self.cloud_router_ipv6_address,
cloud_router_ipv6_interface_id=self.cloud_router_ipv6_interface_id,
configuration_constraints=self.configuration_constraints,
creation_timestamp=self.creation_timestamp,
customer_router_ip_address=self.customer_router_ip_address,
customer_router_ipv6_address=self.customer_router_ipv6_address,
customer_router_ipv6_interface_id=self.customer_router_ipv6_interface_id,
dataplane_version=self.dataplane_version,
description=self.description,
edge_availability_domain=self.edge_availability_domain,
encryption=self.encryption,
google_reference_id=self.google_reference_id,
interconnect=self.interconnect,
ipsec_internal_addresses=self.ipsec_internal_addresses,
kind=self.kind,
label_fingerprint=self.label_fingerprint,
labels=self.labels,
mtu=self.mtu,
name=self.name,
operational_status=self.operational_status,
pairing_key=self.pairing_key,
partner_asn=self.partner_asn,
partner_metadata=self.partner_metadata,
private_interconnect_info=self.private_interconnect_info,
region=self.region,
remote_service=self.remote_service,
router=self.router,
satisfies_pzs=self.satisfies_pzs,
self_link=self.self_link,
stack_type=self.stack_type,
state=self.state,
subnet_length=self.subnet_length,
type=self.type,
vlan_tag8021q=self.vlan_tag8021q)
def get_interconnect_attachment(interconnect_attachment: Optional[str] = None,
project: Optional[str] = None,
region: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetInterconnectAttachmentResult:
"""
Returns the specified interconnect attachment.
"""
__args__ = dict()
__args__['interconnectAttachment'] = interconnect_attachment
__args__['project'] = project
__args__['region'] = region
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('google-native:compute/beta:getInterconnectAttachment', __args__, opts=opts, typ=GetInterconnectAttachmentResult).value
return AwaitableGetInterconnectAttachmentResult(
admin_enabled=pulumi.get(__ret__, 'admin_enabled'),
bandwidth=pulumi.get(__ret__, 'bandwidth'),
candidate_ipv6_subnets=pulumi.get(__ret__, 'candidate_ipv6_subnets'),
candidate_subnets=pulumi.get(__ret__, 'candidate_subnets'),
cloud_router_ip_address=pulumi.get(__ret__, 'cloud_router_ip_address'),
cloud_router_ipv6_address=pulumi.get(__ret__, 'cloud_router_ipv6_address'),
cloud_router_ipv6_interface_id=pulumi.get(__ret__, 'cloud_router_ipv6_interface_id'),
configuration_constraints=pulumi.get(__ret__, 'configuration_constraints'),
creation_timestamp=pulumi.get(__ret__, 'creation_timestamp'),
customer_router_ip_address=pulumi.get(__ret__, 'customer_router_ip_address'),
customer_router_ipv6_address=pulumi.get(__ret__, 'customer_router_ipv6_address'),
customer_router_ipv6_interface_id=pulumi.get(__ret__, 'customer_router_ipv6_interface_id'),
dataplane_version=pulumi.get(__ret__, 'dataplane_version'),
description=pulumi.get(__ret__, 'description'),
edge_availability_domain=pulumi.get(__ret__, 'edge_availability_domain'),
encryption=pulumi.get(__ret__, 'encryption'),
google_reference_id=pulumi.get(__ret__, 'google_reference_id'),
interconnect=pulumi.get(__ret__, 'interconnect'),
ipsec_internal_addresses=pulumi.get(__ret__, 'ipsec_internal_addresses'),
kind=pulumi.get(__ret__, 'kind'),
label_fingerprint=pulumi.get(__ret__, 'label_fingerprint'),
labels=pulumi.get(__ret__, 'labels'),
mtu=pulumi.get(__ret__, 'mtu'),
name=pulumi.get(__ret__, 'name'),
operational_status=pulumi.get(__ret__, 'operational_status'),
pairing_key=pulumi.get(__ret__, 'pairing_key'),
partner_asn=pulumi.get(__ret__, 'partner_asn'),
partner_metadata=pulumi.get(__ret__, 'partner_metadata'),
private_interconnect_info=pulumi.get(__ret__, 'private_interconnect_info'),
region=pulumi.get(__ret__, 'region'),
remote_service=pulumi.get(__ret__, 'remote_service'),
router=pulumi.get(__ret__, 'router'),
satisfies_pzs=pulumi.get(__ret__, 'satisfies_pzs'),
self_link=pulumi.get(__ret__, 'self_link'),
stack_type=pulumi.get(__ret__, 'stack_type'),
state=pulumi.get(__ret__, 'state'),
subnet_length=pulumi.get(__ret__, 'subnet_length'),
type=pulumi.get(__ret__, 'type'),
vlan_tag8021q=pulumi.get(__ret__, 'vlan_tag8021q'))
@_utilities.lift_output_func(get_interconnect_attachment)
def get_interconnect_attachment_output(interconnect_attachment: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
region: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetInterconnectAttachmentResult]:
"""
Returns the specified interconnect attachment.
"""
...
|
PypiClean
|
/test-tracaccountmanager.tar.gz/acct_mgr-0.4/dist/TracAccountManager-0.5dev-r12398/acct_mgr/admin.py
|
import inspect
import re
from genshi.builder import tag
from genshi.core import Markup
from trac.core import Component, TracError, implements
from trac.config import Option
from trac.perm import PermissionSystem
from trac.util.datefmt import format_datetime, to_datetime
from trac.util.presentation import Paginator
from trac.web.chrome import Chrome, add_link, add_notice, add_stylesheet, \
add_warning
from trac.admin import IAdminPanelProvider
from acct_mgr.api import AccountManager, CommonTemplateProvider, \
_, dgettext, gettext, ngettext, tag_
from acct_mgr.guard import AccountGuard
from acct_mgr.model import del_user_attribute, email_verified, \
get_user_attribute, last_seen, \
set_user_attribute
from acct_mgr.register import EmailVerificationModule, RegistrationError
from acct_mgr.web_ui import AccountModule
from acct_mgr.util import is_enabled, get_pretty_dateinfo
try:
from trac.util import as_int
except ImportError:
def as_int(s, default, min=None, max=None):
"""Convert s to an int and limit it to the given range, or
return default if unsuccessful (copied verbatim from Trac0.12dev).
"""
try:
value = int(s)
except (TypeError, ValueError):
return default
if min is not None and value < min:
value = min
if max is not None and value > max:
value = max
return value
def fetch_user_data(env, req):
acctmgr = AccountManager(env)
guard = AccountGuard(env)
accounts = {}
for username in acctmgr.get_users():
if req.perm.has_permission('ACCTMGR_USER_ADMIN'):
url = req.href.admin('accounts', 'users', user=username)
else:
url = None
accounts[username] = {'username': username, 'review_url': url}
if guard.user_locked(username):
accounts[username]['locked'] = True
t_lock = guard.lock_time(username)
if t_lock > 0:
t_release = guard.pretty_release_time(req, username)
accounts[username]['release_hint'] = _(
"Locked until %(t_release)s",
t_release=t_release)
for acct, status in get_user_attribute(env, username=None,
authenticated=None).iteritems():
account = accounts.get(acct)
if account is not None and 1 in status:
# Only use attributes related to authenticated
# accounts.
account['name'] = status[1].get('name')
account['email'] = status[1].get('email')
if account['email']:
account['email'] = Chrome(env).format_author(req,
account['email'])
ts_seen = last_seen(env)
if ts_seen is not None:
for username, last_visit in ts_seen:
account = accounts.get(username)
if account and last_visit:
account['last_visit'] = to_datetime(last_visit)
return sorted(accounts.itervalues(), key=lambda acct: acct['username'])
def _getoptions(cls):
opt_cls = isinstance(cls, Component) and cls.__class__ or cls
options = [(name, value) for name, value in inspect.getmembers(opt_cls)
if isinstance(value, Option)]
index = 0
for option in options:
index += 1
try:
opt_val = option[1].__get__(cls, cls)
except AttributeError:
# Error will be raised again when parsing options list,
# so don't care here.
continue
# Check, if option is a valid component (possibly with own options).
opt_cls = isinstance(opt_val, Component) and opt_val.__class__ or None
extents = _getoptions(opt_cls)
for extent in extents:
options.insert(index, extent)
index += 1
return options
def _setorder(req, stores):
"""Pull the password store ordering out of the req object"""
for store in stores.get_all_stores():
stores[store] = int(req.args.get(store.__class__.__name__, 0))
continue
class StoreOrder(dict):
"""Keeps the order of the Password Stores"""
instance = 0
def __init__(self, d={}, stores=[], list=[]):
self.instance += 1
self.d = {}
self.sxref = {}
for store in stores:
self.d[store] = 0
self[0] = store
self.sxref[store.__class__.__name__] = store
continue
for i, s in enumerate(list):
self.d[s] = i + 1
self[i + 1] = s
def __getitem__(self, key):
"""Lookup a store in the list"""
return self.d[key]
def __setitem__(self, key, value):
if isinstance(key, Component):
order = self.d[key]
self.d[key] = value
self.d[order].remove(key)
self[value] = key
elif isinstance(key, basestring):
self.d[self.sxref[key]] = value
elif isinstance(key, int):
self.d.setdefault(key, [])
self.d[key].append(value)
else:
raise KeyError(_("Invalid key type (%s) for StoreOrder")
% str(type(key)))
pass
def get_enabled_stores(self):
"""Return an ordered list of password stores
All stores that are order 0 are dropped from the list.
"""
keys = [k for k in self.d.keys() if isinstance(k, int)]
keys.sort()
storelist = []
for k in keys[1:]:
storelist.extend(self.d[k])
continue
return storelist
def get_enabled_store_names(self):
"""Returns the class names of the enabled password stores"""
stores = self.get_enabled_stores()
return [s.__class__.__name__ for s in stores]
def get_all_stores(self):
return [k for k in self.d.keys() if isinstance(k, Component)]
def numstores(self):
return len(self.get_all_stores())
class AccountManagerAdminPanel(CommonTemplateProvider):
implements(IAdminPanelProvider)
ACCTS_PER_PAGE = 5
def __init__(self):
self.acctmgr = AccountManager(self.env)
self.guard = AccountGuard(self.env)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if req.perm.has_permission('ACCTMGR_CONFIG_ADMIN'):
yield ('accounts', _("Accounts"), 'config', _("Configuration"))
if req.perm.has_permission('ACCTMGR_USER_ADMIN'):
yield ('accounts', _("Accounts"), 'users', _("Users"))
def render_admin_panel(self, req, cat, page, path_info):
if page == 'config':
return self._do_config(req)
elif page == 'users':
return self._do_users(req)
def _do_config(self, req):
stores = StoreOrder(stores=self.acctmgr.stores,
list=self.acctmgr.password_store)
if req.method == 'POST':
if req.args.get('restart'):
del_user_attribute(self.env, attribute='password_refreshed')
req.redirect(req.href.admin('accounts', 'config',
done='restart'))
_setorder(req, stores)
self.config.set('account-manager', 'password_store',
','.join(stores.get_enabled_store_names()))
for store in stores.get_all_stores():
for attr, option in _getoptions(store):
cls_name = store.__class__.__name__
newvalue = req.args.get('%s.%s' % (cls_name, attr))
self.log.debug("%s.%s: %s" % (cls_name, attr, newvalue))
if newvalue is not None:
self.config.set(option.section, option.name, newvalue)
self.config.save()
self.config.set('account-manager', 'force_passwd_change',
req.args.get('force_passwd_change', False))
self.config.set('account-manager', 'persistent_sessions',
req.args.get('persistent_sessions', False))
self.config.set('account-manager', 'verify_email',
req.args.get('verify_email', False))
self.config.set('account-manager', 'refresh_passwd',
req.args.get('refresh_passwd', False))
self.config.save()
sections = []
for store in self.acctmgr.stores:
if store.__class__.__name__ == "ResetPwStore":
# Exclude special store, that is used strictly internally and
# inherits configuration from SessionStore anyway.
continue
options = []
for attr, option in _getoptions(store):
error = None
opt_val = None
value = None
try:
opt_val = option.__get__(store, store)
except AttributeError, e:
self.env.log.error(e)
error = _("""Error while reading configuration -
Hint: Enable/install the required component.""")
pass
if opt_val:
value = isinstance(opt_val, Component) and \
opt_val.__class__.__name__ or opt_val
opt_sel = None
try:
interface = option.xtnpt.interface
opt_sel = {'options': [], 'selected': None}
except AttributeError:
# No ExtensionOption / Interface undefined
pass
if opt_sel:
for impl in option.xtnpt.extensions(self.env):
extension = impl.__class__.__name__
opt_sel['options'].append(extension)
if opt_val and extension == value:
opt_sel['selected'] = extension
if len(opt_sel['options']) == 0 and error:
opt_sel['error'] = error
value = opt_sel
options.append(
{'label': attr,
'name': '%s.%s' % (store.__class__.__name__, attr),
'value': value,
'doc': gettext(option.__doc__)
})
continue
sections.append(
{'name': store.__class__.__name__,
'classname': store.__class__.__name__,
'order': stores[store],
'options' : options,
})
continue
sections = sorted(sections, key=lambda i: i['name'])
numstores = range(0, stores.numstores() + 1)
data = {
'_dgettext': dgettext,
'sections': sections,
'numstores': numstores,
'force_passwd_change': self.acctmgr.force_passwd_change,
'persistent_sessions': self.acctmgr.persistent_sessions,
'verify_email': self.acctmgr.verify_email,
'refresh_passwd': self.acctmgr.refresh_passwd,
}
result = req.args.get('done')
if result == 'restart':
data['result'] = _("Password hash refresh procedure restarted.")
return 'admin_accountsconfig.html', data
def _do_users(self, req):
env = self.env
perm = PermissionSystem(env)
acctmgr = self.acctmgr
acctmod = AccountModule(env)
guard = self.guard
listing_enabled = acctmgr.supports('get_users')
create_enabled = acctmgr.supports('set_password')
password_change_enabled = acctmgr.supports('set_password')
password_reset_enabled = acctmod.reset_password_enabled
delete_enabled = acctmgr.supports('delete_user')
verify_enabled = acctmgr.verify_email and \
EmailVerificationModule(env).email_enabled
account = dict(email=req.args.get('email', '').strip(),
name=req.args.get('name', '').strip(),
username=acctmgr.handle_username_casing(
req.args.get('username', '').strip()))
data = {
'_dgettext': dgettext,
'acctmgr': account,
'email_approved': True,
'listing_enabled': listing_enabled,
'create_enabled': create_enabled,
'delete_enabled': delete_enabled,
'verify_enabled': verify_enabled,
'ignore_auth_case': self.config.getbool('trac',
'ignore_auth_case'),
'password_change_enabled': password_change_enabled,
'password_reset_enabled': password_reset_enabled
}
if req.method == 'GET':
if 'user' in req.args.iterkeys():
return self._do_acct_details(req)
elif req.args.get('max_per_page'):
return self._do_db_cleanup(req)
if req.method == 'POST':
email_approved = req.args.get('email_approved')
# Preserve selection during a series of requests.
data['email_approved'] = email_approved
if req.args.get('add'):
# Add new user account.
if create_enabled:
# Check request and prime account on success.
try:
acctmgr.validate_registration(req)
# Account email approval for authoritative action.
if verify_enabled and email_approved and \
account['email']:
set_user_attribute(env, account['username'],
'email_verification_sent_to', account['email'])
# User editor form clean-up.
data['acctmgr'] = {}
except RegistrationError, e:
# Attempt deferred translation.
message = gettext(e.message)
# Check for (matching number of) message arguments
# before attempting string substitution.
if e.msg_args and \
len(e.msg_args) == len(re.findall('%s',
message)):
message = message % e.msg_args
data['editor_error'] = Markup(message)
else:
data['editor_error'] = _(
"The password store does not support creating users.")
elif req.args.get('reset') and req.args.get('sel'):
# Password reset for one or more accounts.
if password_reset_enabled:
sel = req.args.get('sel')
sel = isinstance(sel, list) and sel or [sel]
for username, name, email in env.get_known_users():
if username in sel:
acctmod._reset_password(username, email)
else:
data['deletion_error'] = _(
"The password reset procedure is not enabled.")
elif req.args.get('remove') and req.args.get('sel'):
# Delete one or more accounts.
if delete_enabled:
sel = req.args.get('sel')
sel = isinstance(sel, list) and sel or [sel]
for account in sel:
acctmgr.delete_user(account)
else:
data['deletion_error'] = _(
"The password store does not support deleting users.")
elif req.args.get('change'):
# Change attributes and or password of existing user account.
attributes = {
'email': _("Email Address"),
'name': _("Pre-/Surname (Nickname)"),
'password': _("Password")
}
data['success'] = []
error = TracError('')
username = acctmgr.handle_username_casing(
req.args.get('username').strip())
try:
if not username:
error.account = {'username' : username}
error.message = _("Username cannot be empty.")
raise error
if not acctmgr.has_user(username):
error.account = {'username' : username}
error.message = _("Unknown user %(user)s.",
user=username)
raise error
password = req.args.get('password')
if password and (password.strip() != ''):
if password_change_enabled:
if password != req.args.get('password_confirm'):
error.message = _("The passwords must match.")
raise error
acctmgr.set_password(username, password)
data['success'].append(attributes.get('password'))
else:
data['editor_error'] = _(
"""The password store does not support
changing passwords.
""")
for attribute in ('name', 'email'):
value = req.args.get(attribute, '').strip()
if value:
set_user_attribute(env, username,
attribute, value)
data['success'].append(attributes.get(attribute))
# Account email approval for authoritative action.
if attribute == 'email' and verify_enabled and \
email_approved:
set_user_attribute(env, username,
'email_verification_sent_to', value)
# User editor form clean-up on success.
data['acctmgr'] = {}
except TracError, e:
data['editor_error'] = e.message
data['acctmgr'] = getattr(e, 'account', '')
elif len([action for action in req.args.iterkeys() \
if action in ('cleanup' 'purge' 'unselect')]) > 0:
return self._do_db_cleanup(req)
# (Re-)Build current user list.
if listing_enabled:
data['accounts'] = fetch_user_data(env, req)
data['cls'] = 'listing'
data['cols'] = ['email', 'name']
# Prevent IRequestFilter in trac.timeline.web_ui.TimelineModule
# of Trac 0.13 and later from adding a link to timeline by
# adding the function with a different key name here.
data['pretty_date'] = get_pretty_dateinfo(env, req)
add_stylesheet(req, 'acct_mgr/acct_mgr.css')
return 'admin_users.html', data
def _do_acct_details(self, req):
username = req.args.get('user')
if not username:
# Accessing user account details without username is not useful,
# so we revert such request immediately.
add_warning(req, Markup(tag.span(tag_(
"Please choose account by username from list to proceed."
))))
req.redirect(req.href.admin('accounts', 'users'))
acctmgr = self.acctmgr
guard = self.guard
if req.args.get('update'):
req.redirect(req.href.admin('accounts', 'users',
user=username))
elif req.args.get('delete') or req.args.get('release'):
# delete failed login attempts, evaluating attempts count
if guard.failed_count(username, reset=True) > 0:
add_notice(req, Markup(tag.span(Markup(_(
"Failed login attempts for user %(user)s deleted",
user=tag.b(username)
)))))
elif req.args.get('list'):
req.redirect(req.href.admin('accounts', 'users'))
data = {'_dgettext': dgettext,
'user': username,
}
stores = StoreOrder(stores=acctmgr.stores,
list=acctmgr.password_store)
user_store = acctmgr.find_user_store(username)
if not user_store is None:
data['user_store'] = user_store.__class__.__name__
data['store_order_num'] = stores[user_store]
data['ignore_auth_case'] = \
self.config.getbool('trac', 'ignore_auth_case')
for username_, name, email in self.env.get_known_users():
if username_ == username:
data['name'] = name
if email:
data['email'] = email
break
ts_seen = last_seen(self.env, username)
if ts_seen is not None:
data['last_visit'] = format_datetime(ts_seen[0][1], tzinfo=req.tz)
attempts = []
attempts_count = guard.failed_count(username, reset = None)
if attempts_count > 0:
for attempt in guard.get_failed_log(username):
t = format_datetime(to_datetime(
attempt['time']), tzinfo=req.tz)
attempts.append({'ipnr': attempt['ipnr'], 'time': t})
data['attempts'] = attempts
data['attempts_count'] = attempts_count
data['pretty_lock_time'] = guard.pretty_lock_time(username, next=True)
data['lock_count'] = guard.lock_count(username)
if guard.user_locked(username) is True:
data['user_locked'] = True
data['release_time'] = guard.pretty_release_time(req, username)
if is_enabled(self.env, EmailVerificationModule) and \
acctmgr.verify_email is True:
data['verification'] = 'enabled'
data['email_verified'] = email_verified(self.env, username, email)
self.log.debug('AcctMgr:admin:_do_acct_details for user \"' + \
username + '\", email \"' + str(email) + '\": ' + \
str(data['email_verified']))
add_stylesheet(req, 'acct_mgr/acct_mgr.css')
data['url'] = req.href.admin('accounts', 'users', user=username)
return 'account_details.html', data
def _do_db_cleanup(self, req):
if req.perm.has_permission('ACCTMGR_ADMIN'):
env = self.env
changed = False
# Get all data from 'session_attributes' db table.
attr = get_user_attribute(self.env, username=None,
authenticated=None)
attrs = {}
sel = req.args.get('sel')
if req.args.get('purge') and sel is not None:
sel = isinstance(sel, list) and sel or [sel]
sel_len = len(sel)
matched = []
for acct, states in attr.iteritems():
for state in states['id'].keys():
for elem, id in states[state]['id'].iteritems():
if id in sel:
if acct in attrs.keys():
if state in attrs[acct].keys():
attrs[acct][state] \
.append(elem)
else:
attrs[acct][state] = [elem]
else:
attrs[acct] = {state: [elem]}
matched.append(id)
if len(matched) == sel_len:
break
if len(matched) == sel_len:
break
if len(matched) == sel_len:
break
for id in (frozenset(sel) - frozenset(matched)):
for acct, states in attr.iteritems():
for state, id_ in states['id'].iteritems():
if id == id_:
# Full account is marked, forget attributes.
if acct in attrs.keys():
attrs[acct].update({state: []})
else:
attrs[acct] = {state: []}
matched.append(id)
if len(matched) == sel_len:
break
if len(matched) == sel_len:
break
# DEVEL: for Python>2.4 better use defaultdict for counters
del_count = {'acct': 0, 'attr': 0}
for account, states in attrs.iteritems():
for state, elem in states.iteritems():
if len(elem) == 0:
del_user_attribute(env, account, state)
del_count['acct'] += 1
else:
for attribute in elem:
del_user_attribute(env, account, state,
attribute)
del_count['attr'] += 1
changed = True
elif req.args.get('list'):
req.redirect(req.href.admin('accounts', 'users'))
if changed == True:
# Update the dict after changes.
attr = get_user_attribute(env, username=None,
authenticated=None)
data = {'_dgettext': dgettext}
data.update(self._prepare_attrs(req, attr))
if req.args.get('purge') and sel is not None:
accounts = attributes = ''
n_plural=del_count['acct']
if n_plural > 0:
accounts = tag.li(tag.span(tag(ngettext(
"%(count)s account",
"%(count)s accounts",
n_plural, count=n_plural
))))
n_plural=del_count['attr']
if n_plural > 0:
attributes = tag.li(tag.span(tag(ngettext(
"%(count)s account attribute",
"%(count)s account attributes",
n_plural, count=n_plural
))))
data['result'] = tag(_("Successfully deleted:"),
tag.ul(accounts, attributes))
add_stylesheet(req, 'acct_mgr/acct_mgr.css')
return 'db_cleanup.html', data
def _prepare_attrs(self, req, attr):
page = int(req.args.get('page', '1'))
# Paginator can't deal with dict, so convert to list.
attr_lst = [(k,v) for k,v in attr.iteritems()]
max_per_page = as_int(req.args.get('max_per_page'), None)
if max_per_page is None:
max_per_page = self.ACCTS_PER_PAGE
attr = Paginator(attr_lst, page - 1, max_per_page)
pagedata = []
shown_pages = attr.get_shown_pages(21)
for shown_page in shown_pages:
page_href = req.href.admin('accounts', 'users', page=shown_page,
max_per_page=max_per_page)
pagedata.append([page_href, None, str(shown_page),
_("page %(num)s", num=str(shown_page))])
fields = ['href', 'class', 'string', 'title']
attr.shown_pages = [dict(zip(fields, p)) for p in pagedata]
attr.current_page = {'href': None, 'class': 'current',
'string': str(attr.page + 1), 'title':None}
if attr.has_next_page:
next_href = req.href.admin('accounts', 'users', page=page + 1,
max_per_page=max_per_page)
add_link(req, 'next', next_href, _('Next Page'))
if attr.has_previous_page:
prev_href = req.href.admin('accounts', 'users', page=page - 1,
max_per_page=max_per_page)
add_link(req, 'prev', prev_href, _('Previous Page'))
page_href = req.href.admin('accounts', 'cleanup')
return {'attr': attr, 'page_href': page_href}
|
PypiClean
|
/alauda-celery-3.1.25rc1.tar.gz/alauda-celery-3.1.25rc1/docs/getting-started/brokers/sqlalchemy.rst
|
.. _broker-sqlalchemy:
==================
Using SQLAlchemy
==================
.. admonition:: Experimental Status
The SQLAlchemy transport is unstable in many areas and there are
several issues open. Unfortunately we don't have the resources or funds
required to improve the situation, so we're looking for contributors
and partners willing to help.
.. _broker-sqlalchemy-installation:
Installation
============
.. _broker-sqlalchemy-configuration:
Configuration
=============
Celery needs to know the location of your database, which should be the usual
SQLAlchemy connection string, but with 'sqla+' prepended to it::
BROKER_URL = 'sqla+sqlite:///celerydb.sqlite'
This transport uses only the :setting:`BROKER_URL` setting, which have to be
an SQLAlchemy database URI.
Please see `SQLAlchemy: Supported Databases`_ for a table of supported databases.
Here's a list of examples using a selection of other `SQLAlchemy Connection Strings`_:
.. code-block:: python
# sqlite (filename)
BROKER_URL = 'sqla+sqlite:///celerydb.sqlite'
# mysql
BROKER_URL = 'sqla+mysql://scott:tiger@localhost/foo'
# postgresql
BROKER_URL = 'sqla+postgresql://scott:tiger@localhost/mydatabase'
# oracle
BROKER_URL = 'sqla+oracle://scott:[email protected]:1521/sidname'
.. _`SQLAlchemy: Supported Databases`:
http://www.sqlalchemy.org/docs/core/engines.html#supported-databases
.. _`SQLAlchemy Connection Strings`:
http://www.sqlalchemy.org/docs/core/engines.html#database-urls
.. _sqlalchemy-results-configuration:
Results
-------
To store results in the database as well, you should configure the result
backend. See :ref:`conf-database-result-backend`.
.. _broker-sqlalchemy-limitations:
Limitations
===========
The SQLAlchemy database transport does not currently support:
* Remote control commands (:program:`celery events` command, broadcast)
* Events, including the Django Admin monitor.
* Using more than a few workers (can lead to messages being executed
multiple times).
|
PypiClean
|
/sense2-0.3.7-py3-none-any.whl/conans/model/workspace.py
|
import os
from collections import OrderedDict
import yaml
from conans.client.graph.graph import RECIPE_EDITABLE
from conans.errors import ConanException
from conans.model.editable_layout import get_editable_abs_path, EditableLayout
from conans.model.ref import ConanFileReference
from conans.paths import CONANFILE
from conans.util.files import load, save
class LocalPackage(object):
def __init__(self, base_folder, data, cache, ws_layout, ws_generators, ref):
if not data or not data.get("path"):
raise ConanException("Workspace editable %s does not define path" % str(ref))
self._base_folder = base_folder
self._conanfile_folder = data.pop("path", None) # The folder with the conanfile
layout = data.pop("layout", None)
if layout:
self.layout = get_editable_abs_path(layout, self._base_folder, cache.cache_folder)
else:
self.layout = ws_layout
generators = data.pop("generators", None)
if isinstance(generators, str):
generators = [generators]
if generators is None:
generators = ws_generators
self.generators = generators
if data:
raise ConanException("Workspace unrecognized fields: %s" % data)
@property
def root_folder(self):
return os.path.abspath(os.path.join(self._base_folder, self._conanfile_folder))
class Workspace(object):
default_filename = "conanws.yml"
def __init__(self, path, cache):
self._cache = cache
self._ws_generator = None
self._workspace_packages = OrderedDict() # {reference: LocalPackage}
if not os.path.isfile(path):
path = os.path.join(path, self.default_filename)
self._base_folder = os.path.dirname(path)
try:
content = load(path)
except IOError:
raise ConanException("Couldn't load workspace file in %s" % path)
try:
self._loads(content)
except Exception as e:
raise ConanException("There was an error parsing %s: %s" % (path, str(e)))
def generate(self, install_folder, graph, output):
if self._ws_generator == "cmake":
cmake = ""
add_subdirs = ""
# To avoid multiple additions (can happen for build_requires repeated nodes)
unique_refs = OrderedDict()
for node in graph.ordered_iterate():
if node.recipe != RECIPE_EDITABLE:
continue
unique_refs[node.ref] = node
for ref, node in unique_refs.items():
ws_pkg = self._workspace_packages[ref]
layout = self._cache.package_layout(ref)
editable = layout.editable_cpp_info()
conanfile = node.conanfile
src = build = None
if editable:
build = editable.folder(ref, EditableLayout.BUILD_FOLDER, conanfile.settings,
conanfile.options)
src = editable.folder(ref, EditableLayout.SOURCE_FOLDER, conanfile.settings,
conanfile.options)
if src is not None:
src = os.path.join(ws_pkg.root_folder, src).replace("\\", "/")
cmake += 'set(PACKAGE_%s_SRC "%s")\n' % (ref.name, src)
else:
output.warn("CMake workspace: source_folder is not defined for %s" % str(ref))
if build is not None:
build = os.path.join(ws_pkg.root_folder, build).replace("\\", "/")
cmake += 'set(PACKAGE_%s_BUILD "%s")\n' % (ref.name, build)
else:
output.warn("CMake workspace: build_folder is not defined for %s" % str(ref))
if src and build:
add_subdirs += (' add_subdirectory(${PACKAGE_%s_SRC} ${PACKAGE_%s_BUILD})\n'
% (ref.name, ref.name))
else:
output.warn("CMake workspace: cannot 'add_subdirectory()'")
if add_subdirs:
cmake += "macro(conan_workspace_subdirectories)\n"
cmake += add_subdirs
cmake += "endmacro()"
cmake_path = os.path.join(install_folder, "conanworkspace.cmake")
save(cmake_path, cmake)
def get_editable_dict(self):
ret = {}
for ref, ws_package in self._workspace_packages.items():
path = ws_package.root_folder
if os.path.isdir(path):
path = os.path.join(path, CONANFILE)
ret[ref] = {"path": path, "layout": ws_package.layout}
return ret
def __getitem__(self, ref):
return self._workspace_packages.get(ref)
@property
def root(self):
return self._root
def _loads(self, text):
yml = yaml.safe_load(text)
self._ws_generator = yml.pop("workspace_generator", None)
yml.pop("name", None)
ws_layout = yml.pop("layout", None)
if ws_layout:
ws_layout = get_editable_abs_path(ws_layout, self._base_folder,
self._cache.cache_folder)
generators = yml.pop("generators", None)
if isinstance(generators, str):
generators = [generators]
root_list = yml.pop("root", [])
if isinstance(root_list, str):
root_list = root_list.split(",")
self._root = [ConanFileReference.loads(s.strip())
for s in root_list if s.strip()]
if not self._root:
raise ConanException("Conan workspace needs at least 1 root conanfile")
editables = yml.pop("editables", {})
for ref, data in editables.items():
workspace_package = LocalPackage(self._base_folder, data,
self._cache, ws_layout, generators, ref)
package_name = ConanFileReference.loads(ref)
self._workspace_packages[package_name] = workspace_package
for package_name in self._root:
if package_name not in self._workspace_packages:
raise ConanException("Root %s is not defined as editable" % str(package_name))
if yml:
raise ConanException("Workspace unrecognized fields: %s" % yml)
|
PypiClean
|
/textgrid_tools-0.0.8-py3-none-any.whl/textgrid_tools/grid/audio_synchronization.py
|
from logging import Logger, getLogger
from typing import Optional
import numpy as np
from textgrid import IntervalTier, TextGrid
from textgrid_tools.globals import ExecutionResult
from textgrid_tools.helper import check_is_valid_grid, samples_to_s
from textgrid_tools.validation import InvalidGridError, ValidationError
class LastIntervalToShortError(ValidationError):
def __init__(self, grid: TextGrid, audio: np.ndarray, sample_rate: int) -> None:
super().__init__()
self.grid = grid
self.audio = audio
self.sample_rate = sample_rate
@classmethod
def validate(cls, grid: TextGrid, audio: np.ndarray, sample_rate: int):
if not can_set_end_to_audio_len(grid, audio, sample_rate):
return cls(grid, audio, sample_rate)
return None
@property
def default_message(self) -> str:
return "Couldn't change maxTime because it would be <= than minTime of last interval!"
def sync_grid_to_audio(grid: TextGrid, audio: np.ndarray, sample_rate: int, logger: Optional[Logger]) -> ExecutionResult:
if logger is None:
logger = getLogger(__name__)
if error := InvalidGridError.validate(grid):
return error, False
if error := LastIntervalToShortError.validate(grid, audio, sample_rate):
return error, False
changed_something = False
old_min_time = grid.minTime
set_minTime(grid, 0)
if old_min_time != grid.minTime:
changed_something = True
logger.info(f"Adjusted start from {old_min_time} to 0.")
old_max_time = grid.maxTime
set_end_to_audio_len(grid, audio, sample_rate)
if old_max_time != grid.maxTime:
changed_something = True
logger.info(f"Adjusted end from {old_max_time} to {grid.maxTime}.")
return None, changed_something
def can_set_end_to_audio_len(grid: TextGrid, audio: np.ndarray, sample_rate: bool) -> bool:
audio_duration_s = samples_to_s(audio.shape[0], sample_rate)
#audio_duration_s = round(audio_duration_s, n_digits)
return can_set_maxTime(grid, audio_duration_s)
def set_end_to_audio_len(grid: TextGrid, audio: np.ndarray, sample_rate: bool) -> None:
assert can_set_end_to_audio_len(grid, audio, sample_rate)
audio_duration_s = samples_to_s(audio.shape[0], sample_rate)
#audio_duration_s = round(audio_duration_s, n_digits)
set_maxTime(grid, audio_duration_s)
def can_set_maxTime(grid: TextGrid, max_time: float) -> bool:
for tier in grid.tiers:
if len(tier.intervals) > 0:
if max_time <= tier.intervals[-1].minTime:
return False
return True
def set_maxTime(grid: TextGrid, max_time: float) -> None:
assert check_is_valid_grid(grid)
assert can_set_maxTime(grid, max_time)
assert max_time > grid.minTime
assert max_time > 0
if grid.maxTime == max_time:
return
for tier in grid.tiers:
set_maxTime_tier(tier, max_time)
grid.maxTime = max_time
assert check_is_valid_grid(grid)
def set_maxTime_tier(tier: IntervalTier, max_time: float) -> bool:
assert tier.minTime < max_time
changed_anything = False
if len(tier.intervals) > 0:
last_interval = tier.intervals[-1]
assert last_interval.minTime < max_time
if last_interval.maxTime != max_time:
last_interval.maxTime = max_time
changed_anything = True
if tier.maxTime != max_time:
tier.maxTime = max_time
changed_anything = True
return changed_anything
def set_minTime(grid: TextGrid, min_time: float) -> None:
assert check_is_valid_grid(grid)
assert min_time >= 0
assert min_time < grid.maxTime
if grid.minTime == min_time:
return
for tier in grid.tiers:
if len(tier.intervals) > 0:
assert tier.intervals[0].maxTime < min_time
tier.intervals[0].minTime = min_time
tier.minTime = min_time
grid.minTime = min_time
assert check_is_valid_grid(grid)
|
PypiClean
|
/ydk-models-cisco-ios-xr-6.6.3.tar.gz/ydk-models-cisco-ios-xr-6.6.3/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_watchd_cfg.py
|
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_BITS, REFERENCE_UNION
from ydk._core._dm_meta_info import REFERENCE_CLASS, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, ANYXML_CLASS
from ydk._core._importer import _yang_ns
_meta_table = {
'Watchdog.ThresholdMemory' : {
'meta_info' : _MetaInfoClass('Watchdog.ThresholdMemory', REFERENCE_CLASS,
'''Memory thresholds''',
False,
[
_MetaInfoClassMember('minor', ATTRIBUTE, 'int', 'uint32',
None, None,
[('5', '40')], [],
''' Threshold, Range (5, 40)
''',
'minor',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('severe', ATTRIBUTE, 'int', 'uint32',
None, None,
[('4', '40')], [],
''' Threshold, Range (4, minor)
''',
'severe',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('critical', ATTRIBUTE, 'int', 'uint32',
None, None,
[('3', '40')], [],
''' Threshold, Range (3, severe)
''',
'critical',
'Cisco-IOS-XR-watchd-cfg', False),
],
'Cisco-IOS-XR-watchd-cfg',
'threshold-memory',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-watchd-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_watchd_cfg',
),
},
'Watchdog.DiskLimit' : {
'meta_info' : _MetaInfoClass('Watchdog.DiskLimit', REFERENCE_CLASS,
'''Disk thresholds''',
False,
[
_MetaInfoClassMember('minor', ATTRIBUTE, 'int', 'uint32',
None, None,
[('5', '40')], [],
''' Threshold, Range (5, 40)
''',
'minor',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('severe', ATTRIBUTE, 'int', 'uint32',
None, None,
[('4', '40')], [],
''' Threshold, Range (4, minor)
''',
'severe',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('critical', ATTRIBUTE, 'int', 'uint32',
None, None,
[('3', '40')], [],
''' Threshold, Range (3, severe)
''',
'critical',
'Cisco-IOS-XR-watchd-cfg', False),
],
'Cisco-IOS-XR-watchd-cfg',
'disk-limit',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-watchd-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_watchd_cfg',
),
},
'Watchdog' : {
'meta_info' : _MetaInfoClass('Watchdog', REFERENCE_CLASS,
'''Watchdog configuration commands''',
False,
[
_MetaInfoClassMember('threshold-memory', REFERENCE_CLASS, 'ThresholdMemory', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_watchd_cfg', 'Watchdog.ThresholdMemory',
[], [],
''' Memory thresholds
''',
'threshold_memory',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('disk-limit', REFERENCE_CLASS, 'DiskLimit', '',
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_watchd_cfg', 'Watchdog.DiskLimit',
[], [],
''' Disk thresholds
''',
'disk_limit',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('overload-notification', ATTRIBUTE, 'Empty', 'empty',
None, None,
[], [],
''' Disable critical event notification
''',
'overload_notification',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('restart-deadlock-disable', ATTRIBUTE, 'Empty', 'empty',
None, None,
[], [],
''' Disable watchdog restart deadlock
''',
'restart_deadlock_disable',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('restart-memoryhog-disable', ATTRIBUTE, 'Empty', 'empty',
None, None,
[], [],
''' Disable watchdog restart memory-hog
''',
'restart_memoryhog_disable',
'Cisco-IOS-XR-watchd-cfg', False),
_MetaInfoClassMember('overload-throttle-timeout', ATTRIBUTE, 'int', 'uint32',
None, None,
[('5', '120')], [],
''' Watchdog overload throttle timeout configuration
''',
'overload_throttle_timeout',
'Cisco-IOS-XR-watchd-cfg', False),
],
'Cisco-IOS-XR-watchd-cfg',
'watchdog',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-watchd-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_watchd_cfg',
),
},
'Watchd' : {
'meta_info' : _MetaInfoClass('Watchd', REFERENCE_CLASS,
'''watchd''',
False,
[
_MetaInfoClassMember('timeout', ATTRIBUTE, 'int', 'uint32',
None, None,
[('1', '10')], [],
''' Length of timeout in seconds
''',
'timeout',
'Cisco-IOS-XR-watchd-cfg', False),
],
'Cisco-IOS-XR-watchd-cfg',
'watchd',
_yang_ns.NAMESPACE_LOOKUP['Cisco-IOS-XR-watchd-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_watchd_cfg',
),
},
}
_meta_table['Watchdog.ThresholdMemory']['meta_info'].parent =_meta_table['Watchdog']['meta_info']
_meta_table['Watchdog.DiskLimit']['meta_info'].parent =_meta_table['Watchdog']['meta_info']
|
PypiClean
|
/dnb_arelle-0.1.0-py3-none-any.whl/arelle/ValidateXbrl.py
|
try:
import regex as re
except ImportError:
import re
from arelle import (ModelDocument, XmlUtil, XbrlUtil, XbrlConst,
ValidateXbrlCalcs, ValidateXbrlDimensions, ValidateXbrlDTS, ValidateFormula, ValidateUtr)
from arelle import FunctionIxt
from arelle.ModelObject import ModelObject
from arelle.ModelDtsObject import ModelConcept
from arelle.ModelInstanceObject import ModelInlineFact
from arelle.ModelValue import qname
from arelle.PluginManager import pluginClassMethods
from arelle.ValidateXbrlCalcs import inferredDecimals
from arelle.XbrlConst import (ixbrlAll, dtrNoDecimalsItemTypes, dtrPrefixedContentItemTypes, dtrPrefixedContentTypes,
dtrSQNameItemTypes, dtrSQNameTypes, dtrSQNamesItemTypes, dtrSQNamesTypes)
from arelle.XhtmlValidate import ixMsgCode
from arelle.XmlValidate import VALID
from collections import defaultdict
validateUniqueParticleAttribution = None # dynamic import
arcNamesTo21Resource = {"labelArc","referenceArc"}
xlinkTypeValues = {None, "simple", "extended", "locator", "arc", "resource", "title", "none"}
xlinkActuateValues = {None, "onLoad", "onRequest", "other", "none"}
xlinkShowValues = {None, "new", "replace", "embed", "other", "none"}
xlinkLabelAttributes = {"{http://www.w3.org/1999/xlink}label", "{http://www.w3.org/1999/xlink}from", "{http://www.w3.org/1999/xlink}to"}
periodTypeValues = {"instant","duration"}
balanceValues = {None, "credit","debit"}
baseXbrliTypes = {
"decimalItemType", "floatItemType", "doubleItemType", "integerItemType",
"nonPositiveIntegerItemType", "negativeIntegerItemType", "longItemType", "intItemType",
"shortItemType", "byteItemType", "nonNegativeIntegerItemType", "unsignedLongItemType",
"unsignedIntItemType", "unsignedShortItemType", "unsignedByteItemType",
"positiveIntegerItemType", "monetaryItemType", "sharesItemType", "pureItemType",
"fractionItemType", "stringItemType", "booleanItemType", "hexBinaryItemType",
"base64BinaryItemType", "anyURIItemType", "QNameItemType", "durationItemType",
"dateTimeItemType", "timeItemType", "dateItemType", "gYearMonthItemType",
"gYearItemType", "gMonthDayItemType", "gDayItemType", "gMonthItemType",
"normalizedStringItemType", "tokenItemType", "languageItemType", "NameItemType", "NCNameItemType"
}
class ValidateXbrl:
def __init__(self, testModelXbrl):
self.testModelXbrl = testModelXbrl
def close(self, reusable=True):
if reusable:
testModelXbrl = self.testModelXbrl
self.__dict__.clear() # dereference everything
if reusable:
self.testModelXbrl = testModelXbrl
def validate(self, modelXbrl, parameters=None):
self.parameters = parameters
self.precisionPattern = re.compile("^([0-9]+|INF)$")
self.decimalsPattern = re.compile("^(-?[0-9]+|INF)$")
self.isoCurrencyPattern = re.compile(r"^[A-Z]{3}$")
self.modelXbrl = modelXbrl
self.validateDisclosureSystem = modelXbrl.modelManager.validateDisclosureSystem
self.disclosureSystem = modelXbrl.modelManager.disclosureSystem
self.validateEFM = self.validateDisclosureSystem and self.disclosureSystem.EFM # deprecated non-plugin validators
self.validateGFM = self.validateDisclosureSystem and self.disclosureSystem.GFM
self.validateEFMorGFM = self.validateDisclosureSystem and self.disclosureSystem.EFMorGFM
self.validateHMRC = self.validateDisclosureSystem and self.disclosureSystem.HMRC
self.validateSBRNL = self.validateDisclosureSystem and self.disclosureSystem.SBRNL
self.validateEFMorGFMorSBRNL = self.validateEFMorGFM or self.validateSBRNL
self.validateXmlLang = self.validateDisclosureSystem and self.disclosureSystem.xmlLangPattern
self.validateCalcLB = modelXbrl.modelManager.validateCalcLB
self.validateInferDecimals = modelXbrl.modelManager.validateInferDecimals
self.validateDedupCalcs = modelXbrl.modelManager.validateDedupCalcs
self.validateUTR = (modelXbrl.modelManager.validateUtr or
(self.parameters and self.parameters.get(qname("forceUtrValidation",noPrefixIsNoNamespace=True),(None,"false"))[1] == "true") or
(self.validateEFM and
any((concept.qname.namespaceURI in self.disclosureSystem.standardTaxonomiesDict and concept.modelDocument.inDTS)
for concept in self.modelXbrl.nameConcepts.get("UTR",()))))
self.validateIXDS = False # set when any inline document found
self.validateEnum = bool(XbrlConst.enums & _DICT_SET(modelXbrl.namespaceDocs.keys()))
for pluginXbrlMethod in pluginClassMethods("Validate.XBRL.Start"):
pluginXbrlMethod(self, parameters)
# xlink validation
modelXbrl.profileStat(None)
modelXbrl.modelManager.showStatus(_("validating links"))
modelLinks = set()
self.remoteResourceLocElements = set()
self.genericArcArcroles = set()
for baseSetExtLinks in modelXbrl.baseSets.values():
for baseSetExtLink in baseSetExtLinks:
modelLinks.add(baseSetExtLink) # ext links are unique (no dups)
self.checkLinks(modelLinks)
modelXbrl.profileStat(_("validateLinks"))
modelXbrl.dimensionDefaultConcepts = {}
modelXbrl.qnameDimensionDefaults = {}
modelXbrl.qnameDimensionContextElement = {}
# check base set cycles, dimensions
modelXbrl.modelManager.showStatus(_("validating relationship sets"))
for baseSetKey in modelXbrl.baseSets.keys():
arcrole, ELR, linkqname, arcqname = baseSetKey
if arcrole.startswith("XBRL-") or ELR is None or \
linkqname is None or arcqname is None:
continue
elif arcrole in XbrlConst.standardArcroleCyclesAllowed:
# TODO: table should be in this module, where it is used
cyclesAllowed, specSect = XbrlConst.standardArcroleCyclesAllowed[arcrole]
elif arcrole in self.modelXbrl.arcroleTypes and len(self.modelXbrl.arcroleTypes[arcrole]) > 0:
cyclesAllowed = self.modelXbrl.arcroleTypes[arcrole][0].cyclesAllowed
if arcrole in self.genericArcArcroles:
specSect = "xbrlgene:violatedCyclesConstraint"
else:
specSect = "xbrl.5.1.4.3:cycles"
else:
cyclesAllowed = "any"
specSect = None
if cyclesAllowed != "any" or arcrole in (XbrlConst.summationItem,) \
or arcrole in self.genericArcArcroles \
or arcrole.startswith(XbrlConst.formulaStartsWith) \
or (modelXbrl.hasXDT and arcrole.startswith(XbrlConst.dimStartsWith)):
relsSet = modelXbrl.relationshipSet(arcrole,ELR,linkqname,arcqname)
if cyclesAllowed != "any" and \
((XbrlConst.isStandardExtLinkQname(linkqname) and XbrlConst.isStandardArcQname(arcqname)) \
or arcrole in self.genericArcArcroles):
noUndirected = cyclesAllowed == "none"
fromRelationships = relsSet.fromModelObjects()
for relFrom, rels in fromRelationships.items():
cycleFound = self.fwdCycle(relsSet, rels, noUndirected, {relFrom})
if cycleFound is not None:
pathEndsAt = len(cycleFound) # consistently find start of path
loopedModelObject = cycleFound[1].toModelObject
for i, rel in enumerate(cycleFound[2:]):
if rel.fromModelObject == loopedModelObject:
pathEndsAt = 3 + i # don't report extra path elements before loop
break
path = str(loopedModelObject.qname) + " " + " - ".join(
"{0}:{1} {2}".format(rel.modelDocument.basename, rel.sourceline, rel.toModelObject.qname)
for rel in reversed(cycleFound[1:pathEndsAt]))
modelXbrl.error(specSect,
_("Relationships have a %(cycle)s cycle in arcrole %(arcrole)s \nlink role %(linkrole)s \nlink %(linkname)s, \narc %(arcname)s, \npath %(path)s"),
modelObject=cycleFound[1:pathEndsAt], cycle=cycleFound[0], path=path,
arcrole=arcrole, linkrole=ELR, linkname=linkqname, arcname=arcqname,
messageCodes=("xbrlgene:violatedCyclesConstraint", "xbrl.5.1.4.3:cycles",
# from XbrlCoinst.standardArcroleCyclesAllowed
"xbrl.5.2.4.2", "xbrl.5.2.5.2", "xbrl.5.2.6.2.1", "xbrl.5.2.6.2.1", "xbrl.5.2.6.2.3", "xbrl.5.2.6.2.4"))
break
# check calculation arcs for weight issues (note calc arc is an "any" cycles)
if arcrole == XbrlConst.summationItem:
for modelRel in relsSet.modelRelationships:
weight = modelRel.weight
fromConcept = modelRel.fromModelObject
toConcept = modelRel.toModelObject
if fromConcept is not None and toConcept is not None:
if weight == 0:
modelXbrl.error("xbrl.5.2.5.2.1:zeroWeight",
_("Calculation relationship has zero weight from %(source)s to %(target)s in link role %(linkrole)s"),
modelObject=modelRel,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR),
fromBalance = fromConcept.balance
toBalance = toConcept.balance
if fromBalance and toBalance:
if (fromBalance == toBalance and weight < 0) or \
(fromBalance != toBalance and weight > 0):
modelXbrl.error("xbrl.5.1.1.2:balanceCalcWeightIllegal" +
("Negative" if weight < 0 else "Positive"),
_("Calculation relationship has illegal weight %(weight)s from %(source)s, %(sourceBalance)s, to %(target)s, %(targetBalance)s, in link role %(linkrole)s (per 5.1.1.2 Table 6)"),
modelObject=modelRel, weight=weight,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR,
sourceBalance=fromBalance, targetBalance=toBalance,
messageCodes=("xbrl.5.1.1.2:balanceCalcWeightIllegalNegative", "xbrl.5.1.1.2:balanceCalcWeightIllegalPositive"))
if not fromConcept.isNumeric or not toConcept.isNumeric:
modelXbrl.error("xbrl.5.2.5.2:nonNumericCalc",
_("Calculation relationship has illegal concept from %(source)s%(sourceNumericDecorator)s to %(target)s%(targetNumericDecorator)s in link role %(linkrole)s"),
modelObject=modelRel,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR,
sourceNumericDecorator="" if fromConcept.isNumeric else _(" (non-numeric)"),
targetNumericDecorator="" if toConcept.isNumeric else _(" (non-numeric)"))
# check presentation relationships for preferredLabel issues
elif arcrole == XbrlConst.parentChild:
for modelRel in relsSet.modelRelationships:
preferredLabel = modelRel.preferredLabel
fromConcept = modelRel.fromModelObject
toConcept = modelRel.toModelObject
if preferredLabel is not None and isinstance(fromConcept, ModelConcept) and isinstance(toConcept, ModelConcept):
label = toConcept.label(preferredLabel=preferredLabel,fallbackToQname=False,strip=True)
if label is None:
modelXbrl.error("xbrl.5.2.4.2.1:preferredLabelMissing",
_("Presentation relationship from %(source)s to %(target)s in link role %(linkrole)s missing preferredLabel %(preferredLabel)s"),
modelObject=modelRel,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR,
preferredLabel=preferredLabel)
elif not label: # empty string
modelXbrl.info("arelle:info.preferredLabelEmpty",
_("(Info xbrl.5.2.4.2.1) Presentation relationship from %(source)s to %(target)s in link role %(linkrole)s has empty preferredLabel %(preferredLabel)s"),
modelObject=modelRel,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR,
preferredLabel=preferredLabel)
# check essence-alias relationships
elif arcrole == XbrlConst.essenceAlias:
for modelRel in relsSet.modelRelationships:
fromConcept = modelRel.fromModelObject
toConcept = modelRel.toModelObject
if fromConcept is not None and toConcept is not None:
if fromConcept.type != toConcept.type or fromConcept.periodType != toConcept.periodType:
modelXbrl.error("xbrl.5.2.6.2.2:essenceAliasTypes",
_("Essence-alias relationship from %(source)s to %(target)s in link role %(linkrole)s has different types or periodTypes"),
modelObject=modelRel,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR)
fromBalance = fromConcept.balance
toBalance = toConcept.balance
if fromBalance and toBalance:
if fromBalance and toBalance and fromBalance != toBalance:
modelXbrl.error("xbrl.5.2.6.2.2:essenceAliasBalance",
_("Essence-alias relationship from %(source)s to %(target)s in link role %(linkrole)s has different balances")).format(
modelObject=modelRel,
source=fromConcept.qname, target=toConcept.qname, linkrole=ELR)
elif modelXbrl.hasXDT and arcrole.startswith(XbrlConst.dimStartsWith):
ValidateXbrlDimensions.checkBaseSet(self, arcrole, ELR, relsSet)
elif arcrole in ValidateFormula.arcroleChecks:
ValidateFormula.checkBaseSet(self, arcrole, ELR, relsSet)
modelXbrl.isDimensionsValidated = True
modelXbrl.profileStat(_("validateRelationships"))
# instance checks
modelXbrl.modelManager.showStatus(_("validating instance"))
if modelXbrl.modelDocument.type in (ModelDocument.Type.INSTANCE, ModelDocument.Type.INLINEXBRL, ModelDocument.Type.INLINEXBRLDOCUMENTSET):
self.checkFacts(modelXbrl.facts)
self.checkContexts(self.modelXbrl.contexts.values())
self.checkUnits(self.modelXbrl.units.values())
modelXbrl.profileStat(_("validateInstance"))
if modelXbrl.hasXDT:
modelXbrl.modelManager.showStatus(_("validating dimensions"))
''' uncomment if using otherFacts in checkFact
dimCheckableFacts = set(f
for f in modelXbrl.factsInInstance
if f.concept.isItem and f.context is not None)
while (dimCheckableFacts): # check one and all of its compatible family members
f = dimCheckableFacts.pop()
ValidateXbrlDimensions.checkFact(self, f, dimCheckableFacts)
del dimCheckableFacts
'''
self.checkFactsDimensions(modelXbrl.facts) # check fact dimensions in document order
self.checkContextsDimensions(modelXbrl.contexts.values())
modelXbrl.profileStat(_("validateDimensions"))
# dimensional validity
#concepts checks
modelXbrl.modelManager.showStatus(_("validating concepts"))
for concept in modelXbrl.qnameConcepts.values():
conceptType = concept.type
if (concept.qname is None or
XbrlConst.isStandardNamespace(concept.qname.namespaceURI) or
not concept.modelDocument.inDTS):
continue
if concept.isTuple:
# must be global
if not concept.getparent().localName == "schema":
self.modelXbrl.error("xbrl.4.9:tupleGloballyDeclared",
_("Tuple %(concept)s must be declared globally"),
modelObject=concept, concept=concept.qname)
if concept.periodType:
self.modelXbrl.error("xbrl.4.9:tuplePeriodType",
_("Tuple %(concept)s must not have periodType"),
modelObject=concept, concept=concept.qname)
if concept.balance:
self.modelXbrl.error("xbrl.4.9:tupleBalance",
_("Tuple %(concept)s must not have balance"),
modelObject=concept, concept=concept.qname)
if conceptType is not None:
# check attribute declarations
for attribute in conceptType.attributes.values():
if attribute.qname is not None and attribute.qname.namespaceURI in (XbrlConst.xbrli, XbrlConst.link, XbrlConst.xlink, XbrlConst.xl):
self.modelXbrl.error("xbrl.4.9:tupleAttribute",
_("Tuple %(concept)s must not have attribute in this namespace %(attribute)s"),
modelObject=concept, concept=concept.qname, attribute=attribute.qname)
# check for mixed="true" or simple content
if XmlUtil.descendantAttr(conceptType, XbrlConst.xsd, ("complexType", "complexContent"), "mixed") == "true":
self.modelXbrl.error("xbrl.4.9:tupleMixedContent",
_("Tuple %(concept)s must not have mixed content"),
modelObject=concept, concept=concept.qname)
if XmlUtil.descendant(conceptType, XbrlConst.xsd, "simpleContent"):
self.modelXbrl.error("xbrl.4.9:tupleSimpleContent",
_("Tuple %(concept)s must not have simple content"),
modelObject=concept, concept=concept.qname)
# child elements must be item or tuple
for elementQname in conceptType.elements:
childConcept = self.modelXbrl.qnameConcepts.get(elementQname)
if childConcept is None:
self.modelXbrl.error("xbrl.4.9:tupleElementUndefined",
_("Tuple %(concept)s element %(tupleElement)s not defined"),
modelObject=concept, concept=str(concept.qname), tupleElement=elementQname)
elif not (childConcept.isItem or childConcept.isTuple or # isItem/isTuple do not include item or tuple itself
childConcept.qname == XbrlConst.qnXbrliItem or # subs group includes item as member
childConcept.qname == XbrlConst.qnXbrliTuple):
self.modelXbrl.error("xbrl.4.9:tupleElementItemOrTuple",
_("Tuple %(concept)s must not have element %(tupleElement)s not an item or tuple"),
modelObject=concept, concept=concept.qname, tupleElement=elementQname)
elif concept.isItem:
if concept.periodType not in periodTypeValues: #("instant","duration"):
self.modelXbrl.error("xbrl.5.1.1.1:itemPeriodType",
_("Item %(concept)s must have a valid periodType"),
modelObject=concept, concept=concept.qname)
if concept.isMonetary:
if concept.balance not in balanceValues: #(None, "credit","debit"):
self.modelXbrl.error("xbrl.5.1.1.2:itemBalance",
_("Item %(concept)s must have a valid balance %(balance)s"),
modelObject=concept, concept=concept.qname, balance=concept.balance)
else:
if concept.balance:
self.modelXbrl.error("xbrl.5.1.1.2:itemBalance",
_("Item %(concept)s may not have a balance"),
modelObject=concept, concept=concept.qname)
if concept.baseXbrliType not in baseXbrliTypes:
self.modelXbrl.error("xbrl.5.1.1.3:itemType",
_("Item %(concept)s type %(itemType)s invalid"),
modelObject=concept, concept=concept.qname, itemType=concept.baseXbrliType)
if modelXbrl.hasXDT:
if concept.isHypercubeItem and not concept.abstract == "true":
self.modelXbrl.error("xbrldte:HypercubeElementIsNotAbstractError",
_("Hypercube item %(concept)s must be abstract"),
modelObject=concept, concept=concept.qname)
elif concept.isDimensionItem and not concept.abstract == "true":
self.modelXbrl.error("xbrldte:DimensionElementIsNotAbstractError",
_("Dimension item %(concept)s must be abstract"),
modelObject=concept, concept=concept.qname)
if self.validateEnum and concept.isEnumeration: # either a enum item type or enum set dimension type
if not concept.enumDomainQname:
self.modelXbrl.error(("enum2te:" if concept.instanceOfType(XbrlConst.qnEnumeration2ItemTypes) else "enumte:") +
"MissingDomainError",
_("Item %(concept)s enumeration type must specify a domain."),
modelObject=concept, concept=concept.qname,
messageCodes=("enumte:MissingDomainError", "enum2te:MissingDomainError"))
elif concept.enumDomain is None or (not concept.enumDomain.isItem) or concept.enumDomain.isHypercubeItem or concept.enumDomain.isDimensionItem:
self.modelXbrl.error(("enum2te:" if concept.instanceOfType(XbrlConst.qnEnumeration2ItemTypes) else "enumte:") +
"InvalidDomainError",
_("Item %(concept)s enumeration type must be a xbrli:item that is neither a hypercube nor dimension."),
modelObject=concept, concept=concept.qname,
messageCodes=("enumte:InvalidDomainError", "enum2te:InvalidDomainError"))
if not concept.enumLinkrole:
self.modelXbrl.error(("enum2te:" if concept.instanceOfType(XbrlConst.qnEnumeration2ItemTypes) else "enumte:") +
"MissingLinkRoleError",
_("Item %(concept)s enumeration type must specify a linkrole."),
modelObject=concept, concept=concept.qname,
messageCodes=("enumte:MissingLinkRoleError", "enum2te:MissingLinkRoleError"))
if modelXbrl.hasXDT:
ValidateXbrlDimensions.checkConcept(self, concept)
modelXbrl.profileStat(_("validateConcepts"))
for pluginXbrlMethod in pluginClassMethods("Validate.XBRL.Finally"):
pluginXbrlMethod(self)
modelXbrl.profileStat() # reset after plugins
modelXbrl.modelManager.showStatus(_("validating DTS"))
self.DTSreferenceResourceIDs = {}
checkedModelDocuments = set()
ValidateXbrlDTS.checkDTS(self, modelXbrl.modelDocument, checkedModelDocuments)
# ARELLE-220: check imported documents that aren't DTS discovered
for importedModelDocument in (set(modelXbrl.urlDocs.values()) - checkedModelDocuments):
ValidateXbrlDTS.checkDTS(self, importedModelDocument, checkedModelDocuments)
del checkedModelDocuments, self.DTSreferenceResourceIDs
global validateUniqueParticleAttribution
if validateUniqueParticleAttribution is None:
from arelle.XmlValidateParticles import validateUniqueParticleAttribution
for modelType in modelXbrl.qnameTypes.values():
validateUniqueParticleAttribution(modelXbrl, modelType.particlesList, modelType)
modelXbrl.profileStat(_("validateDTS"))
if self.validateCalcLB:
modelXbrl.modelManager.showStatus(_("Validating instance calculations"))
ValidateXbrlCalcs.validate(modelXbrl,
inferDecimals=self.validateInferDecimals,
deDuplicate=self.validateDedupCalcs)
modelXbrl.profileStat(_("validateCalculations"))
if self.validateUTR:
ValidateUtr.validateFacts(modelXbrl)
modelXbrl.profileStat(_("validateUTR"))
if self.validateIXDS:
modelXbrl.modelManager.showStatus(_("Validating inline document set"))
_ixNS = modelXbrl.modelDocument.ixNS
ixdsIdObjects = defaultdict(list)
for ixdsDoc in self.ixdsDocs:
for idObject in ixdsDoc.idObjects.values():
if idObject.namespaceURI in ixbrlAll or idObject.elementQname in (XbrlConst.qnXbrliContext, XbrlConst.qnXbrliUnit):
ixdsIdObjects[idObject.id].append(idObject)
for _id, objs in ixdsIdObjects.items():
if len(objs) > 1:
idObject = objs[0]
modelXbrl.error(ixMsgCode("uniqueIxId", idObject, sect="validation"),
_("Inline XBRL id is not unique in the IXDS: %(id)s, for element(s) %(elements)s"),
modelObject=objs, id=_id, elements=",".join(sorted(set(str(obj.elementQname) for obj in objs))))
self.factsWithDeprecatedIxNamespace = []
factFootnoteRefs = set()
undefinedFacts = []
for f in modelXbrl.factsInInstance:
for footnoteID in f.footnoteRefs:
if footnoteID not in self.ixdsFootnotes:
modelXbrl.error(ixMsgCode("footnoteRef", f, name="footnote", sect="validation"),
_("Inline XBRL fact's footnoteRef not found: %(id)s"),
modelObject=f, id=footnoteID)
factFootnoteRefs.add(footnoteID)
if f.concept is None:
undefinedFacts.append(f)
if f.localName in {"fraction", "nonFraction", "nonNumeric"}:
if f.context is None:
self.modelXbrl.error(ixMsgCode("contextReference", f, sect="validation"),
_("Fact %(fact)s is missing a context for contextRef %(context)s"),
modelObject=f, fact=f.qname, context=f.contextID)
if f.localName in {"fraction", "nonFraction"}:
if f.unit is None:
self.modelXbrl.error(ixMsgCode("unitReference", f, sect="validation"),
_("Fact %(fact)s is missing a unit for unitRef %(unit)s"),
modelObject=f, fact=f.qname, unit=f.unitID)
fmt = f.format
if fmt:
if fmt.namespaceURI == FunctionIxt.deprecatedNamespaceURI:
self.factsWithDeprecatedIxNamespace.append(f)
if undefinedFacts:
self.modelXbrl.error("xbrl:schemaImportMissing",
_("Instance facts missing schema concept definition: %(elements)s"),
modelObject=undefinedFacts, elements=", ".join(sorted(set(str(f.qname) for f in undefinedFacts))))
del undefinedFacts # dereference facts
for _id, objs in self.ixdsFootnotes.items():
if len(objs) > 1:
modelXbrl.error(ixMsgCode("uniqueFootnoteId", ns=_ixNS, name="footnote", sect="validation"),
_("Inline XBRL footnote id is not unique in the IXDS: %(id)s"),
modelObject=objs, id=_id)
else:
if self.validateGFM:
elt = objs[0]
id = elt.footnoteID
if id and id not in factFootnoteRefs and elt.textValue:
self.modelXbrl.error(("EFM.N/A", "GFM:1.10.15"),
_("Inline XBRL non-empty footnote %(footnoteID)s is not referenced by any fact"),
modelObject=elt, footnoteID=id)
if not self.ixdsHeaderCount:
modelXbrl.error(ixMsgCode("headerMissing", ns=_ixNS, name="header", sect="validation"),
_("Inline XBRL document set must have at least one ix:header element"),
modelObject=modelXbrl)
if self.factsWithDeprecatedIxNamespace:
self.modelXbrl.info("arelle:info",
_("%(count)s facts have deprecated transformation namespace %(namespace)s"),
modelObject=self.factsWithDeprecatedIxNamespace,
count=len(self.factsWithDeprecatedIxNamespace),
namespace=FunctionIxt.deprecatedNamespaceURI)
del self.factsWithDeprecatedIxNamespace
for target, ixReferences in self.ixdsReferences.items():
targetDefaultNamespace = None
schemaRefUris = {}
for i, ixReference in enumerate(ixReferences):
defaultNamepace = XmlUtil.xmlns(ixReference, None)
if i == 0:
targetDefaultNamespace = defaultNamepace
elif targetDefaultNamespace != defaultNamepace:
modelXbrl.error(ixMsgCode("referenceInconsistentDefaultNamespaces", ns=_ixNS, sect="validation"),
_("Inline XBRL document set must have consistent default namespaces for target %(target)s"),
modelObject=ixReferences, target=target)
for schemaRef in XmlUtil.children(ixReference, XbrlConst.link, "schemaRef"):
href = schemaRef.get("{http://www.w3.org/1999/xlink}href")
prefix = XmlUtil.xmlnsprefix(schemaRef, href)
if href not in schemaRefUris:
schemaRefUris[href] = prefix
elif schemaRefUris[href] != prefix:
modelXbrl.error(ixMsgCode("referenceNamespacePrefixInconsistency", ns=_ixNS, sect="validation"),
_("Inline XBRL document set must have consistent prefixes for target %(target)s: %(prefix1)s, %(prefix2)s"),
modelObject=ixReferences, target=target, prefix1=schemaRefUris[href], prefix2=prefix)
for ixRel in self.ixdsRelationships:
for fromRef in ixRel.get("fromRefs","").split():
refs = ixdsIdObjects.get(fromRef)
if refs is None or refs[0].namespaceURI not in ixbrlAll or refs[0].localName not in ("fraction", "nonFraction", "nonNumeric", "tuple"):
modelXbrl.error(ixMsgCode("relationshipFromRef", ns=_ixNS, name="relationship", sect="validation"),
_("Inline XBRL fromRef %(ref)s is not a fraction, ix:nonFraction, ix:nonNumeric or ix:tuple."),
modelObject=ixRel, ref=fromRef)
hasFootnoteToRef = None
hasToRefMixture = False
for toRef in ixRel.get("toRefs","").split():
refs = ixdsIdObjects.get(toRef)
if refs is None or refs[0].namespaceURI not in ixbrlAll or refs[0].localName not in ("footnote", "fraction", "nonFraction", "nonNumeric", "tuple"):
modelXbrl.error(ixMsgCode("relationshipToRef", ns=_ixNS, name="relationship", sect="validation"),
_("Inline XBRL toRef %(ref)s is not a footnote, fraction, ix:nonFraction, ix:nonNumeric or ix:tuple."),
modelObject=ixRel, ref=toRef)
elif hasFootnoteToRef is None:
hasFootnoteToRef = refs[0].localName == "footnote"
elif hasFootnoteToRef != (refs[0].localName == "footnote"):
hasToRefMixture = True
if hasToRefMixture:
modelXbrl.error(ixMsgCode("relationshipToRefMix", ns=_ixNS, name="relationship", sect="validation"),
_("Inline XBRL fromRef is not only either footnotes, or ix:fraction, ix:nonFraction, ix:nonNumeric or ix:tuple."),
modelObject=ixRel)
if ixRel.get("linkRole") is not None: # XBRL 2.1 check of ixRelationships
ValidateXbrlDTS.checkLinkRole(self, ixRel, XbrlConst.qnLinkFootnoteLink, ixRel.get("linkRole"), "extended", self.ixdsRoleRefURIs)
if ixRel.get("arcrole") is not None:
ValidateXbrlDTS.checkArcrole(self, ixRel, XbrlConst.qnLinkFootnoteArc, ixRel.get("arcrole"), self.ixdsArcroleRefURIs)
del ixdsIdObjects
# tupleRefs already checked during loading
modelXbrl.profileStat(_("validateInline"))
if modelXbrl.hasFormulae or modelXbrl.modelRenderingTables:
ValidateFormula.validate(self,
statusMsg=_("compiling formulae and rendering tables") if (modelXbrl.hasFormulae and modelXbrl.modelRenderingTables)
else (_("compiling formulae") if modelXbrl.hasFormulae
else _("compiling rendering tables")),
# block executing formulas when validating if hasFormula is False (e.g., --formula=none)
compileOnly=modelXbrl.modelRenderingTables and not modelXbrl.hasFormulae)
for pluginXbrlMethod in pluginClassMethods("Validate.Finally"):
pluginXbrlMethod(self)
modelXbrl.modelManager.showStatus(_("ready"), 2000)
def checkLinks(self, modelLinks):
for modelLink in modelLinks:
fromToArcs = {}
locLabels = {}
resourceLabels = {}
resourceArcTos = []
for arcElt in modelLink.iterchildren():
if isinstance(arcElt,ModelObject):
xlinkType = arcElt.get("{http://www.w3.org/1999/xlink}type")
# locator must have an href
if xlinkType == "locator":
if arcElt.get("{http://www.w3.org/1999/xlink}href") is None:
self.modelXbrl.error("xlink:locatorHref",
_("Xlink locator %(xlinkLabel)s missing href in extended link %(linkrole)s"),
modelObject=arcElt,
linkrole=modelLink.role,
xlinkLabel=arcElt.get("{http://www.w3.org/1999/xlink}label"))
locLabels[arcElt.get("{http://www.w3.org/1999/xlink}label")] = arcElt
elif xlinkType == "resource":
resourceLabels[arcElt.get("{http://www.w3.org/1999/xlink}label")] = arcElt
# can be no duplicated arcs between same from and to
elif xlinkType == "arc":
fromLabel = arcElt.get("{http://www.w3.org/1999/xlink}from")
toLabel = arcElt.get("{http://www.w3.org/1999/xlink}to")
fromTo = (fromLabel,toLabel)
if fromTo in fromToArcs:
self.modelXbrl.error("xlink:dupArcs",
_("Duplicate xlink arcs in extended link %(linkrole)s from %(xlinkLabelFrom)s to %(xlinkLabelTo)s"),
modelObject=arcElt,
linkrole=modelLink.role,
xlinkLabelFrom=fromLabel, xlinkLabelTo=toLabel)
else:
fromToArcs[fromTo] = arcElt
if arcElt.namespaceURI == XbrlConst.link:
if arcElt.localName in arcNamesTo21Resource: #("labelArc","referenceArc"):
resourceArcTos.append((toLabel, arcElt.get("use"), arcElt))
elif self.isGenericArc(arcElt):
arcrole = arcElt.get("{http://www.w3.org/1999/xlink}arcrole")
self.genericArcArcroles.add(arcrole)
if arcrole in (XbrlConst.elementLabel, XbrlConst.elementReference):
resourceArcTos.append((toLabel, arcrole, arcElt))
# values of type (not needed for validating parsers)
if xlinkType not in xlinkTypeValues: # ("", "simple", "extended", "locator", "arc", "resource", "title", "none"):
self.modelXbrl.error("xlink:type",
_("Xlink type %(xlinkType)s invalid in extended link %(linkrole)s"),
modelObject=arcElt, linkrole=modelLink.role, xlinkType=xlinkType)
# values of actuate (not needed for validating parsers)
xlinkActuate = arcElt.get("{http://www.w3.org/1999/xlink}actuate")
if xlinkActuate not in xlinkActuateValues: # ("", "onLoad", "onRequest", "other", "none"):
self.modelXbrl.error("xlink:actuate",
_("Actuate %(xlinkActuate)s invalid in extended link %(linkrole)s"),
modelObject=arcElt, linkrole=modelLink.role, xlinkActuate=xlinkActuate)
# values of show (not needed for validating parsers)
xlinkShow = arcElt.get("{http://www.w3.org/1999/xlink}show")
if xlinkShow not in xlinkShowValues: # ("", "new", "replace", "embed", "other", "none"):
self.modelXbrl.error("xlink:show",
_("Show %(xlinkShow)s invalid in extended link %(linkrole)s"),
modelObject=arcElt, linkrole=modelLink.role, xlinkShow=xlinkShow)
# check from, to of arcs have a resource or loc
for fromTo, arcElt in fromToArcs.items():
fromLabel, toLabel = fromTo
for name, value, sect in (("from", fromLabel, "3.5.3.9.2"),("to",toLabel, "3.5.3.9.3")):
if value not in locLabels and value not in resourceLabels:
self.modelXbrl.error("xbrl.{0}:arcResource".format(sect),
_("Arc in extended link %(linkrole)s from %(xlinkLabelFrom)s to %(xlinkLabelTo)s attribute '%(attribute)s' has no matching loc or resource label"),
modelObject=arcElt,
linkrole=modelLink.role, xlinkLabelFrom=fromLabel, xlinkLabelTo=toLabel,
attribute=name,
messageCodes=("xbrl.3.5.3.9.2:arcResource", "xbrl.3.5.3.9.3:arcResource"))
if arcElt.localName == "footnoteArc" and arcElt.namespaceURI == XbrlConst.link and \
arcElt.get("{http://www.w3.org/1999/xlink}arcrole") == XbrlConst.factFootnote:
if fromLabel not in locLabels:
self.modelXbrl.error("xbrl.4.11.1.3.1:factFootnoteArcFrom",
_("Footnote arc in extended link %(linkrole)s from %(xlinkLabelFrom)s to %(xlinkLabelTo)s \"from\" is not a loc"),
modelObject=arcElt,
linkrole=modelLink.role, xlinkLabelFrom=fromLabel, xlinkLabelTo=toLabel)
if not((toLabel in resourceLabels and resourceLabels[toLabel] is not None
and resourceLabels[toLabel].qname == XbrlConst.qnLinkFootnote) or
(toLabel in locLabels and locLabels[toLabel].dereference() is not None
and locLabels[toLabel].dereference().qname == XbrlConst.qnLinkFootnote)):
self.modelXbrl.error("xbrl.4.11.1.3.1:factFootnoteArcTo",
_("Footnote arc in extended link %(linkrole)s from %(xlinkLabelFrom)s to %(xlinkLabelTo)s \"to\" is not a footnote resource"),
modelObject=arcElt,
linkrole=modelLink.role, xlinkLabelFrom=fromLabel, xlinkLabelTo=toLabel)
# check unprohibited label arcs to remote locs
for resourceArcTo in resourceArcTos:
resourceArcToLabel, resourceArcUse, arcElt = resourceArcTo
if resourceArcToLabel in locLabels:
toLabel = locLabels[resourceArcToLabel]
if resourceArcUse == "prohibited":
self.remoteResourceLocElements.add(toLabel)
else:
self.modelXbrl.error("xbrl.5.2.2.3:labelArcRemoteResource",
_("Unprohibited labelArc in extended link %(linkrole)s has illegal remote resource loc labeled %(xlinkLabel)s href %(xlinkHref)s"),
modelObject=arcElt,
linkrole=modelLink.role,
xlinkLabel=resourceArcToLabel,
xlinkHref=toLabel.get("{http://www.w3.org/1999/xlink}href"))
elif resourceArcToLabel in resourceLabels:
toResource = resourceLabels[resourceArcToLabel]
if resourceArcUse == XbrlConst.elementLabel:
if not self.isGenericLabel(toResource):
self.modelXbrl.error("xbrlle.2.1.1:genericLabelTarget",
_("Generic label arc in extended link %(linkrole)s to %(xlinkLabel)s must target a generic label"),
modelObject=arcElt,
linkrole=modelLink.role,
xlinkLabel=resourceArcToLabel)
elif resourceArcUse == XbrlConst.elementReference:
if not self.isGenericReference(toResource):
self.modelXbrl.error("xbrlre.2.1.1:genericReferenceTarget",
_("Generic reference arc in extended link %(linkrole)s to %(xlinkLabel)s must target a generic reference"),
modelObject=arcElt,
linkrole=modelLink.role,
xlinkLabel=resourceArcToLabel)
resourceArcTos = None # dereference arcs
def checkFacts(self, facts, inTuple=None): # do in document order
for f in facts:
concept = f.concept
if concept is not None:
if concept.isNumeric:
unit = f.unit
if f.unitID is None or unit is None:
self.modelXbrl.error("xbrl.4.6.2:numericUnit",
_("Fact %(fact)s context %(contextID)s is numeric and must have a unit"),
modelObject=f, fact=f.qname, contextID=f.contextID)
else:
if concept.isMonetary:
measures = unit.measures
if not measures or len(measures[0]) != 1 or len(measures[1]) != 0:
self.modelXbrl.error("xbrl.4.8.2:monetaryFactUnit-notSingleMeasure",
_("Fact %(fact)s context %(contextID)s must have a single unit measure which is monetary %(unitID)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, unitID=f.unitID)
elif (measures[0][0].namespaceURI != XbrlConst.iso4217 or
not self.isoCurrencyPattern.match(measures[0][0].localName)):
self.modelXbrl.error("xbrl.4.8.2:monetaryFactUnit-notMonetaryMeasure",
_("Fact %(fact)s context %(contextID)s must have a monetary unit measure %(unitID)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, unitID=f.unitID)
elif concept.isShares:
measures = unit.measures
if not measures or len(measures[0]) != 1 or len(measures[1]) != 0:
self.modelXbrl.error("xbrl.4.8.2:sharesFactUnit-notSingleMeasure",
_("Fact %(fact)s context %(contextID)s must have a single xbrli:shares unit %(unitID)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, unitID=f.unitID)
elif measures[0][0] != XbrlConst.qnXbrliShares:
self.modelXbrl.error("xbrl.4.8.2:sharesFactUnit-notSharesMeasure",
_("Fact %(fact)s context %(contextID)s must have a xbrli:shares unit %(unitID)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, unitID=f.unitID)
precision = f.precision
hasPrecision = precision is not None
if hasPrecision and precision != "INF" and not precision.isdigit():
self.modelXbrl.error("xbrl.4.6.4:precision",
_("Fact %(fact)s context %(contextID)s precision %(precision)s is invalid"),
modelObject=f, fact=f.qname, contextID=f.contextID, precision=precision)
decimals = f.decimals
hasDecimals = decimals is not None
if hasPrecision and not self.precisionPattern.match(precision):
self.modelXbrl.error("xbrl.4.6.4:precision",
_("Fact %(fact)s context %(contextID)s precision %(precision)s is invalid"),
modelObject=f, fact=f.qname, contextID=f.contextID, precision=precision)
if hasPrecision and hasDecimals:
self.modelXbrl.error("xbrl.4.6.3:bothPrecisionAndDecimals",
_("Fact %(fact)s context %(contextID)s can not have both precision and decimals"),
modelObject=f, fact=f.qname, contextID=f.contextID)
if hasDecimals and not self.decimalsPattern.match(decimals):
self.modelXbrl.error("xbrl.4.6.5:decimals",
_("Fact %(fact)s context %(contextID)s decimals %(decimals)s is invalid"),
modelObject=f, fact=f.qname, contextID=f.contextID, decimals=decimals)
if concept.isItem:
context = f.context
if context is None:
self.modelXbrl.error("xbrl.4.6.1:itemContextRef",
_("Item %(fact)s must have a context"),
modelObject=f, fact=f.qname)
else:
periodType = concept.periodType
if (periodType == "instant" and not context.isInstantPeriod) or \
(periodType == "duration" and not (context.isStartEndPeriod or context.isForeverPeriod)):
self.modelXbrl.error("xbrl.4.7.2:contextPeriodType",
_("Fact %(fact)s context %(contextID)s has period type %(periodType)s conflict with context"),
modelObject=f, fact=f.qname, contextID=f.contextID, periodType=periodType)
# check precision and decimals
if f.isNil:
if hasPrecision or hasDecimals:
self.modelXbrl.error("xbrl.4.6.3:nilPrecisionDecimals",
_("Fact %(fact)s context %(contextID)s can not be nil and have either precision or decimals"),
modelObject=f, fact=f.qname, contextID=f.contextID)
elif concept.isFraction:
if hasPrecision or hasDecimals:
self.modelXbrl.error("xbrl.4.6.3:fractionPrecisionDecimals",
_("Fact %(fact)s context %(contextID)s is a fraction concept and cannot have either precision or decimals"),
modelObject=f, fact=f.qname, contextID=f.contextID)
numerator, denominator = f.fractionValue
if not (numerator == "INF" or numerator.isnumeric()):
self.modelXbrl.error("xbrl.5.1.1:fractionPrecisionDecimals",
_("Fact %(fact)s context %(contextID)s is a fraction with invalid numerator %(numerator)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, numerator=numerator)
if not denominator.isnumeric() or _INT(denominator) == 0:
self.modelXbrl.error("xbrl.5.1.1:fractionPrecisionDecimals",
_("Fact %(fact)s context %(contextID)s is a fraction with invalid denominator %(denominator)")).format(
modelObject=f, fact=f.qname, contextID=f.contextID, denominator=denominator)
else:
if self.modelXbrl.modelDocument.type not in (ModelDocument.Type.INLINEXBRL, ModelDocument.Type.INLINEXBRLDOCUMENTSET):
for child in f.iterchildren():
if isinstance(child,ModelObject):
self.modelXbrl.error("xbrl.5.1.1:itemMixedContent",
_("Fact %(fact)s context %(contextID)s may not have child elements %(childElementName)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, childElementName=child.prefixedName)
break
if concept.isNumeric:
if not hasPrecision and not hasDecimals:
self.modelXbrl.error("xbrl.4.6.3:missingPrecisionDecimals",
_("Fact %(fact)s context %(contextID)s is a numeric concept and must have either precision or decimals"),
modelObject=f, fact=f.qname, contextID=f.contextID)
elif f.concept.instanceOfType(dtrNoDecimalsItemTypes) and inferredDecimals(f) > 0:
self.modelXbrl.error("dtre:noDecimalsItemType",
_("Fact %(fact)s context %(contextID)s is a may not have inferred decimals value > 0: %(inferredDecimals)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, inferredDecimals=inferredDecimals(f))
else:
if hasPrecision or hasDecimals:
self.modelXbrl.error("xbrl.4.6.3:extraneousPrecisionDecimals",
_("Fact %(fact)s context %(contextID)s is a non-numeric concept and must not have precision or decimals"),
modelObject=f, fact=f.qname, contextID=f.contextID)
if getattr(f,"xValid", 0) == 4:
if f.concept.instanceOfType(dtrSQNameItemTypes):
if not f.nsmap.get(f.xValue.rpartition(":")[0]):
self.modelXbrl.error("dtre:SQNameItemType",
_("Fact %(fact)s context %(contextID)s must have an in-scope prefix: %(value)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, value=f.xValue[:200])
elif f.concept.instanceOfType(dtrSQNamesItemTypes):
if not all(f.nsmap.get(n.rpartition(":")[0]) for n in f.xValue.split()):
self.modelXbrl.error("dtre:SQNamesItemType",
_("Fact %(fact)s context %(contextID)s must have an in-scope prefix: %(value)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, value=f.xValue[:200])
elif f.concept.instanceOfType(dtrPrefixedContentItemTypes):
self.modelXbrl.error("dtre:prefixedContentItemType",
_("Fact %(fact)s context %(contextID)s must not have an unrecognized subtype of dtr:prefixedContentItemType"),
modelObject=f, fact=f.qname, contextID=f.contextID, value=f.xValue[:200])
# not a real check
#if f.isNumeric and not f.isNil and f.precision :
# try:
# ValidateXbrlCalcs.roundValue(f.value, f.precision, f.decimals)
# except Exception as err:
# self.modelXbrl.error("arelle:info",
# _("Fact %(fact)s value %(value)s context %(contextID)s rounding exception %(error)s"),
# modelObject=f, fact=f.qname, value=f.value, contextID=f.contextID, error = err)
if self.validateEnum and concept.isEnumeration and getattr(f,"xValid", 0) == 4 and not f.isNil:
qnEnums = f.xValue
if not isinstance(qnEnums, list): qnEnums = (qnEnums,)
if not all(ValidateXbrlDimensions.enumerationMemberUsable(self, concept, self.modelXbrl.qnameConcepts.get(qnEnum))
for qnEnum in qnEnums):
self.modelXbrl.error(
("enum2ie:InvalidEnumerationSetValue" if concept.instanceOfType(XbrlConst.qnEnumerationSetItemTypes)
else "enum2ie:InvalidEnumerationValue") if concept.instanceOfType(XbrlConst.qnEnumeration2ItemTypes)
else ("InvalidListFactValue" if concept.instanceOfType(XbrlConst.qnEnumerationListItemTypes)
else "InvalidFactValue"),
_("Fact %(fact)s context %(contextID)s enumeration %(value)s is not in the domain of %(concept)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, value=f.xValue, concept=f.qname,
messageCodes=("enumie:InvalidFactValue", "enumie:InvalidListFactValue",
"enum2ie:InvalidEnumerationValue", "enum2ie:InvalidEnumerationSetValue"))
if concept.instanceOfType(XbrlConst.qnEnumerationSetItemTypes) and len(qnEnums) > len(set(qnEnums)):
self.modelXbrl.error(("enum2ie:" if concept.instanceOfType(XbrlConst.qnEnumeration2ItemTypes)
else "enumie:") +
"RepeatedEnumerationSetValue",
_("Fact %(fact)s context %(contextID)s enumeration has non-unique values %(value)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, value=f.xValue, concept=f.qname,
messageCodes=("enumie:RepeatedEnumerationSetValue", "enum2ie:RepeatedEnumerationSetValue"))
if concept.instanceOfType(XbrlConst.qnEnumerationSetItemTypes) and any(
qnEnum < qnEnums[i] for i, qnEnum in enumerate(qnEnums[1:])):
self.modelXbrl.error("enum2ie:InvalidEnumerationSetOrder",
_("Fact %(fact)s context %(contextID)s enumeration is not in lexicographical order %(value)s"),
modelObject=f, fact=f.qname, contextID=f.contextID, value=f.xValue, concept=f.qname)
elif concept.isTuple:
if f.contextID:
self.modelXbrl.error("xbrl.4.6.1:tupleContextRef",
_("Tuple %(fact)s must not have a context"),
modelObject=f, fact=f.qname)
if hasPrecision or hasDecimals:
self.modelXbrl.error("xbrl.4.6.3:tuplePrecisionDecimals",
_("Fact %(fact)s is a tuple and cannot have either precision or decimals"),
modelObject=f, fact=f.qname)
# custom attributes may be allowed by anyAttribute but not by 2.1
for attrQname, attrValue in XbrlUtil.attributes(self.modelXbrl, f):
if attrQname.namespaceURI in (XbrlConst.xbrli, XbrlConst.link, XbrlConst.xlink, XbrlConst.xl):
self.modelXbrl.error("xbrl.4.9:tupleAttribute",
_("Fact %(fact)s is a tuple and must not have attribute in this namespace %(attribute)s"),
modelObject=f, fact=f.qname, attribute=attrQname),
else:
self.modelXbrl.error("xbrl.4.6:notItemOrTuple",
_("Fact %(fact)s must be an item or tuple"),
modelObject=f, fact=f.qname)
if isinstance(f, ModelInlineFact):
if not inTuple and f.order is not None:
self.modelXbrl.error(ixMsgCode("tupleOrder", f, sect="validation"),
_("Fact %(fact)s must not have an order (%(order)s) unless in a tuple"),
modelObject=f, fact=f.qname, order=f.order)
if f.isTuple or f.tupleID:
if inTuple is None:
inTuple = dict()
inTuple[f.qname] = f
self.checkIxTupleContent(f, inTuple)
if f.modelTupleFacts:
self.checkFacts(f.modelTupleFacts, inTuple=inTuple)
if isinstance(f, ModelInlineFact) and (f.isTuple or f.tupleID):
del inTuple[f.qname]
# uncomment if anybody uses this
#for pluginXbrlMethod in pluginClassMethods("Validate.XBRL.Fact"):
# pluginXbrlMethod(self, f)
def checkFactsDimensions(self, facts): # check fact dimensions in document order
for f in facts:
if f.concept is not None and (f.concept.isItem and f.context is not None):
ValidateXbrlDimensions.checkFact(self, f)
elif f.modelTupleFacts:
self.checkFactsDimensions(f.modelTupleFacts)
def checkIxTupleContent(self, tf, parentTuples):
if tf.isNil:
if tf.modelTupleFacts:
self.modelXbrl.error("ix:tupleNilContent",
_("Inline XBRL nil tuple has content"),
modelObject=[tf] + tf.modelTupleFacts)
else:
if not tf.modelTupleFacts:
self.modelXbrl.error("ix:tupleContent",
_("Inline XBRL non-nil tuple requires content: ix:fraction, ix:nonFraction, ix:nonNumeric or ix:tuple"),
modelObject=tf)
tfTarget = tf.get("target")
prevTupleFact = None
for f in tf.modelTupleFacts:
if f.qname in parentTuples:
self.modelXbrl.error("ix:tupleRecursion",
_("Fact %(fact)s is recursively nested in tuple %(tuple)s"),
modelObject=(f, parentTuples[f.qname]), fact=f.qname, tuple=tf.qname)
if f.order is None:
self.modelXbrl.error("ix:tupleOrder",
_("Fact %(fact)s missing an order in tuple %(tuple)s"),
modelObject=f, fact=f.qname, tuple=tf.qname)
if f.get("target") != tfTarget:
self.modelXbrl.error("ix:tupleItemTarget",
_("Fact %(fact)s has different target, %(factTarget)s, than tuple %(tuple)s, %(tupleTarget)s"),
modelObject=(tf, f), fact=f.qname, tuple=tf.qname, factTarget=f.get("target"), tupleTarget=tfTarget)
if prevTupleFact is None:
prevTupleFact = f
elif (prevTupleFact.order == f.order and
XmlUtil.collapseWhitespace(prevTupleFact.textValue) == XmlUtil.collapseWhitespace(f.textValue)):
self.modelXbrl.error("ix:tupleContentDuplicate",
_("Inline XBRL at order %(order)s has non-matching content %(value)s"),
modelObject=(prevTupleFact, f), order=f.order, value=prevTupleFact.textValue.strip())
def checkContexts(self, contexts):
for cntx in contexts:
if cntx.isStartEndPeriod:
try: # if no datetime value would have been a schema error at loading time
if (cntx.endDatetime is not None and cntx.startDatetime is not None and
cntx.endDatetime <= cntx.startDatetime):
self.modelXbrl.error("xbrl.4.7.2:periodStartBeforeEnd",
_("Context %(contextID)s must have startDate less than endDate"),
modelObject=cntx, contextID=cntx.id)
except (TypeError, ValueError) as err:
self.modelXbrl.error("xbrl.4.7.2:contextDateError",
_("Context %(contextID) startDate or endDate: %(error)s"),
modelObject=cntx, contextID=cntx.id, error=err)
elif cntx.isInstantPeriod:
try:
cntx.instantDatetime #parse field
except ValueError as err:
self.modelXbrl.error("xbrl.4.7.2:contextDateError",
_("Context %(contextID)s instant date: %(error)s"),
modelObject=cntx, contextID=cntx.id, error=err)
self.segmentScenario(cntx.segment, cntx.id, "segment", "4.7.3.2")
self.segmentScenario(cntx.scenario, cntx.id, "scenario", "4.7.4")
for dim in cntx.qnameDims.values():
if dim.isTyped:
typedMember = dim.typedMember
if typedMember is not None and typedMember.xValid >= VALID: # typed dimension may be nil or empty
modelConcept = self.modelXbrl.qnameConcepts.get(typedMember.qname)
if modelConcept is not None:
if modelConcept.instanceOfType(dtrSQNameTypes):
if not typedMember.nsmap.get(typedMember.xValue.rpartition(":")[0]):
self.modelXbrl.error("dtre:SQNameType",
_("Context %(contextID)s dimension %(dim)s must have an in-scope prefix: %(value)s"),
modelObject=typedMember, dim=typedMember.qname, contextID=cntx.id, value=typedMember.xValue[:200])
elif modelConcept.instanceOfType(dtrSQNamesTypes):
if not all(typedMember.nsmap.get(n.rpartition(":")[0]) for n in typedMember.xValue.split()):
self.modelXbrl.error("dtre:SQNamesType",
_("Context %(contextID)s dimension %(dim)s must have an in-scope prefix: %(value)s"),
modelObject=typedMember, dim=typedMember.qname, contextID=cntx.id, value=typedMember.xValue[:200])
elif modelConcept.instanceOfType(dtrPrefixedContentTypes):
self.modelXbrl.error("dtre:prefixedContentType",
_("Context %(contextID)s dimension %(dim)s must not have an unrecognized subtype of dtr:prefixedContentType."),
modelObject=typedMember, dim=typedMember.qname, contextID=cntx.id, value=typedMember.xValue[:200])
def checkContextsDimensions(self, contexts):
for cntx in contexts:
ValidateXbrlDimensions.checkContext(self,cntx)
def checkUnits(self, units):
for unit in units:
mulDivMeasures = unit.measures
if mulDivMeasures:
for measures in mulDivMeasures:
for measure in measures:
if measure.namespaceURI == XbrlConst.xbrli and not \
measure in (XbrlConst.qnXbrliPure, XbrlConst.qnXbrliShares):
self.modelXbrl.error("xbrl.4.8.2:measureElement",
_("Unit %(unitID)s illegal measure: %(measure)s"),
modelObject=unit, unitID=unit.id, measure=measure)
for numeratorMeasure in mulDivMeasures[0]:
if numeratorMeasure in mulDivMeasures[1]:
self.modelXbrl.error("xbrl.4.8.4:measureBothNumDenom",
_("Unit %(unitID)s numerator measure: %(measure)s also appears as denominator measure"),
modelObject=unit, unitID=unit.id, measure=numeratorMeasure)
def fwdCycle(self, relsSet, rels, noUndirected, fromConcepts, cycleType="directed", revCycleRel=None):
for rel in rels:
if revCycleRel is not None and rel.isIdenticalTo(revCycleRel):
continue # don't double back on self in undirected testing
relTo = rel.toModelObject
if relTo in fromConcepts: #forms a directed cycle
return [cycleType,rel]
fromConcepts.add(relTo)
nextRels = relsSet.fromModelObject(relTo)
foundCycle = self.fwdCycle(relsSet, nextRels, noUndirected, fromConcepts)
if foundCycle is not None:
foundCycle.append(rel)
return foundCycle
fromConcepts.discard(relTo)
# look for back path in any of the ELRs visited (pass None as ELR)
if noUndirected:
foundCycle = self.revCycle(relsSet, relTo, rel, fromConcepts)
if foundCycle is not None:
foundCycle.append(rel)
return foundCycle
return None
def revCycle(self, relsSet, toConcept, turnbackRel, fromConcepts):
for rel in relsSet.toModelObject(toConcept):
if not rel.isIdenticalTo(turnbackRel):
relFrom = rel.fromModelObject
if relFrom in fromConcepts:
return ["undirected",rel]
fromConcepts.add(relFrom)
foundCycle = self.revCycle(relsSet, relFrom, turnbackRel, fromConcepts)
if foundCycle is not None:
foundCycle.append(rel)
return foundCycle
fwdRels = relsSet.fromModelObject(relFrom)
foundCycle = self.fwdCycle(relsSet, fwdRels, True, fromConcepts, cycleType="undirected", revCycleRel=rel)
if foundCycle is not None:
foundCycle.append(rel)
return foundCycle
fromConcepts.discard(relFrom)
return None
def segmentScenario(self, element, contextId, name, sect, topLevel=True):
if topLevel:
if element is None:
return # nothing to check
else:
if element.namespaceURI == XbrlConst.xbrli:
self.modelXbrl.error("xbrl.{0}:{1}XbrliElement".format(sect,name),
_("Context %(contextID)s %(contextElement)s cannot have xbrli element %(elementName)s"),
modelObject=element, contextID=contextId, contextElement=name, elementName=element.prefixedName,
messageCodes=("xbrl.4.7.3.2:segmentXbrliElement", "xbrl.4.7.4:scenarioXbrliElement"))
else:
concept = self.modelXbrl.qnameConcepts.get(element.qname)
if concept is not None and (concept.isItem or concept.isTuple):
self.modelXbrl.error("xbrl.{0}:{1}ItemOrTuple".format(sect,name),
_("Context %(contextID)s %(contextElement)s cannot have item or tuple element %(elementName)s"),
modelObject=element, contextID=contextId, contextElement=name, elementName=element.prefixedName,
messageCodes=("xbrl.4.7.3.2:segmentItemOrTuple", "xbrl.4.7.4:scenarioItemOrTuple"))
hasChild = False
for child in element.iterchildren():
if isinstance(child,ModelObject):
self.segmentScenario(child, contextId, name, sect, topLevel=False)
hasChild = True
if topLevel and not hasChild:
self.modelXbrl.error("xbrl.{0}:{1}Empty".format(sect,name),
_("Context %(contextID)s %(contextElement)s cannot be empty"),
modelObject=element, contextID=contextId, contextElement=name,
messageCodes=("xbrl.4.7.3.2:segmentEmpty", "xbrl.4.7.4:scenarioEmpty"))
def isGenericObject(self, elt, genQname):
return self.modelXbrl.isInSubstitutionGroup(elt.qname,genQname)
def isGenericLink(self, elt):
return self.isGenericObject(elt, XbrlConst.qnGenLink)
def isGenericArc(self, elt):
return self.isGenericObject(elt, XbrlConst.qnGenArc)
def isGenericResource(self, elt):
return self.isGenericObject(elt.getparent(), XbrlConst.qnGenLink)
def isGenericLabel(self, elt):
return self.isGenericObject(elt, XbrlConst.qnGenLabel)
def isGenericReference(self, elt):
return self.isGenericObject(elt, XbrlConst.qnGenReference)
def executeCallTest(self, modelXbrl, name, callTuple, testTuple):
self.modelXbrl = modelXbrl
ValidateFormula.executeCallTest(self, name, callTuple, testTuple)
|
PypiClean
|
/nose-alert-0.9.1.tar.gz/nose-alert-0.9.1/nosealert/notifications.py
|
import warnings
from .platform import PLATFORM, OSX, LINUX
from os import path
IMAGES_DIR = path.join(path.dirname(__file__), 'images')
def get_icon_name(success):
name = 'ok' if success else 'error'
return 'dialog-%s' % name
def get_icon(success):
"""
Returns icon as binary data depending on given ``success`` boolean.
"""
icon_name = get_icon_name(success)
return open(path.join(IMAGES_DIR, '%s.png' % icon_name), 'rb').read()
def notify(sender, title, message, success):
if PLATFORM == OSX:
try:
from gntp.notifier import mini
mini(message, sender, title=title, notificationIcon=get_icon(success))
except ImportError:
warnings.warn('Could not import gntp. Please install it to see alerts')
elif PLATFORM == LINUX:
try:
import pynotify
pynotify.init('nosetests')
icon_name = get_icon_name(success)
notification = pynotify.Notification(title, message, icon_name)
notification.show()
except ImportError:
warnings.warn('Could not import pynotify. '
'Please install libnotify to see alerts')
class Notification(object):
def __init__(self, sender='Tests', total=0, fails=0, errors=0):
self.sender = sender
self.total = total
self.fails = fails
self.errors = errors
self.notify = notify
def __repr__(self):
return '<Notification: %d/%d >' % (self.problems, self.total)
def __eq__(self, other):
return all([
self.sender == other.sender,
self.fails == other.fails,
self.errors == other.errors,
self.total == other.total,
])
@property
def problems(self):
return self.fails + self.errors
def get_message(self):
if self.problems:
return '%d out of %d tests failed!' % (self.problems, self.total)
else:
return 'All %d tests passed.' % self.total
def get_title(self):
if self.problems:
return 'Failed'
else:
return 'Success'
def send(self):
"""
Sends this notification to underlying notification center.
"""
self.notify(
sender=self.sender,
title=self.get_title(),
message=self.get_message(),
success=self.problems == 0,
)
|
PypiClean
|
/streamrandom-0.0.1.tar.gz/streamrandom-0.0.1/streamrandom.py
|
from __future__ import unicode_literals
from random import Random, BPF, RECIP_BPF
from uuid import UUID
from unicodedata import normalize
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.ciphers.modes import CTR
from cryptography.hazmat.primitives.hashes import Hash, SHA256
from cryptography.hazmat.backends import default_backend
from cryptography.utils import int_from_bytes, int_to_bytes
from publication import publish
__all__ = ["StreamRandom", "CipherStream", "stream_from_seed"]
__version__ = "0.0.1"
def _bytes_for_bits(bits):
"""
How many bytes do I need to read to get the given number of bits of
entropy?
"""
bits_per_byte = 8
return (bits + (bits_per_byte - 1)) // bits_per_byte
_uint128max = (1 << 128) - 1
def _bits(*ns):
r = 0
for n in ns:
r |= 1 << (128 - (n + 1))
return r
_offBits = _uint128max ^ _bits(48, 50, 51, 65)
_onBits = _bits(49, 64)
class StreamRandom(Random, object):
"""
A L{StreamRandom} converts a stream of bytes into an object that has the
same useful methods as a standard library L{random.Random}, plus its own
C{uuid4} method.
"""
def __init__(self, stream):
"""
Create a L{StreamRandom}.
@param stream: A file-like object.
"""
# No super(); skip over the call to .seed() in Random.__init__.
self._stream = stream
def getrandbits(self, k):
"""
Get some random bits. This is the primitive upon which all
higher-level functions are built.
@return: an integer containing C{k} random bits
@rtype: L{int} or L{long}
"""
if k != int(k):
raise TypeError("k must be an integer")
if not k > 0:
raise ValueError("k must be positive")
octet_count = _bytes_for_bits(k)
octets = self._stream.read(octet_count)
if len(octets) != octet_count:
raise RuntimeError("out of entropy")
x = int_from_bytes(octets, byteorder="big")
return x >> (octet_count * 8 - k)
def seed(self, a=None):
"""
Create a new stream from the given seed.
"""
raise NotImplementedError(
"To re-seed, create a new StreamRandom with a new stream."
)
def random(self):
"""
Get the next random number in the range [0.0, 1.0).
"""
return self.getrandbits(BPF) * RECIP_BPF
def jumpahead(self, n):
"""
Jump ahead in the stream as if C{random} had been called C{n} times.
"""
self._stream.seek(n * 7, 1)
def getstate(self):
"""
Get the internal state necessary to serialize this object.
"""
return self._stream
def setstate(self, state):
"""
Unserialize this object from the given state, previously serialized by
C{getstate}.
"""
self._stream = state
def uuid4(self):
"""
Bonus method! Generate UUID4s from a deterministic source of
randomness.
"""
integer = self.randint(0, _uint128max)
return UUID(int=((integer & _offBits) | _onBits))
class CipherStream(object):
"""
A seekable stream of pseudo-random data based on a block cipher in CTR mode
"""
_remaining = b""
def __init__(self, algorithm):
"""
Create a keystream from an algorithm, and a function returning a mode
for that algorithm at a given block.
@param algorithm: a pyca/cryptography block cipher. block_size minimum
of 128 recommended, due to the internal usage of CTR.
"""
self._algorithm = algorithm
self._octets_per_block = self._algorithm.block_size // 8
self._null_block = int_to_bytes(0, self._octets_per_block)
self.seek(0)
def seek(self, n, whence=0):
if whence == 0:
goal = n
elif whence == 1:
goal = self._pos + n
else:
raise ValueError("SEEK_END not supported; keystreams are infinite.")
closest_block, beyond = divmod(goal, self._octets_per_block)
self._remaining = b""
self._pos = closest_block * self._octets_per_block
self._encryptor = Cipher(
self._algorithm,
CTR(int_to_bytes(closest_block, self._octets_per_block)),
backend=default_backend(),
).encryptor()
self.read(beyond)
def tell(self):
return self._pos
def read(self, n):
self._pos += n
result = b""
remaining = self._remaining
while n:
if not remaining:
blocks, remainder = divmod(n, self._octets_per_block)
remaining += self._encryptor.update(
self._null_block * (blocks + int(bool(remainder)))
)
more, remaining = remaining[:n], remaining[n:]
result += more
n -= len(more)
self._remaining = remaining
return result
def stream_from_seed(seed, version=1):
"""
Create a L{CipherStream}
@param seed: An arbitrary string.
@type seed: unicode text
"""
if version != 1:
raise NotImplementedError("only one version exists")
seed = normalize("NFKD", seed)
seed = seed.encode("utf-8")
hasher = Hash(SHA256(), backend=default_backend())
hasher.update(seed)
return CipherStream(AES(hasher.finalize()[: AES.block_size // 8]))
publish()
|
PypiClean
|
/rupersonaagent-0.1.0-py3-none-any.whl/internet_memory_model/search/base.py
|
import asyncio
from abc import ABCMeta, abstractmethod
from contextlib import suppress
from enum import Enum, unique
from urllib.parse import urlencode, urlparse
from bs4 import BeautifulSoup
from search.exceptions import NoResultsOrTrafficError
from .utils import CacheHandler, get_rand_user_agent
@unique
class ReturnType(Enum):
FULL = "full"
TITLE = "titles"
DESCRIPTION = "descriptions"
LINK = "links"
# All results returned are each items of search
class SearchItem(dict):
"""
SearchItem is a dict of results containing keys (titles, descriptions, links and other
additional keys dependending on the engine)
>>> result
<search_engine_parser.core.base.SearchItem object at 0x7f907426a280>
>>> result["description"]
Some description
>>> result["descriptions"]
Same description
"""
def __getitem__(self, value):
""" Allow getting by index and by type ('descriptions', 'links'...)"""
try:
return super().__getitem__(value)
except KeyError:
pass
if not value.endswith('s'):
value += 's'
return super().__getitem__(value)
class SearchResult():
"""
The SearchResults after the searching
>>> results = gsearch.search("preaching the choir", 1)
>>> results
<search_engine_parser.core.base.SearchResult object at 0x7f907426a280>
The object supports retreiving individual results by iteration of just by type
>>> results[0] # Returns the first result <SearchItem>
>>> results["descriptions"] # Returns a list of all descriptions from all results
It can be iterated like a normal list to return individual SearchItem
"""
def __init__(self):
self.results = []
def append(self, value):
self.results.append(value)
def __getitem__(self, value):
""" Allow getting by index and by type ('descriptions', 'links'...)"""
if isinstance(value, int):
return self.results[value]
links = []
for x in self.results:
with suppress(KeyError):
links.append(x[value])
return links
def keys(self):
keys = {}
with suppress(IndexError):
x = self.results[0]
keys = x.keys()
return keys
def __len__(self):
return len(self.results)
def __repr_(self):
return "<SearchResult: {} results>".format(len(self.results))
class BaseSearch:
__metaclass__ = ABCMeta
"""
Search base to be extended by search parsers
Every subclass must have two methods `search` amd `parse_single_result`
"""
# Summary of engine
summary = None
# Search Engine Name
name = None
# Search Engine unformatted URL
search_url = None
# The url after all query params have been set
_parsed_url = None
# boolean that indicates cache hit or miss
_cache_hit = False
@abstractmethod
def parse_soup(self, soup):
"""
Defines the results contained in a soup
"""
raise NotImplementedError("subclasses must define method <parse_soup>")
@abstractmethod
def parse_single_result(self, single_result, return_type=ReturnType.FULL, **kwargs):
"""
Every div/span containing a result is passed here to retrieve
`title`, `link` and `descr`
"""
raise NotImplementedError(
"subclasses must define method <parse_results>")
def get_cache_handler(self):
""" Return Cache Handler to use"""
return CacheHandler()
@property
def cache_handler(self):
return self.get_cache_handler()
def parse_result(self, results, **kwargs):
"""
Runs every entry on the page through parse_single_result
:param results: Result of main search to extract individual results
:type results: list[`bs4.element.ResultSet`]
:returns: dictionary. Containing lists of titles, links, descriptions and other possible\
returns.
:rtype: dict
"""
search_results = SearchResult()
for each in results:
rdict = self.parse_single_result(each, **kwargs)
if rdict is not None:
search_results.append(rdict)
return search_results
def get_params(self, query=None, page=None, offset=None, **kwargs):
""" This function should be overwritten to return a dictionary of query params"""
return {'q': query, 'page': page}
def headers(self):
headers = {
"Cache-Control": 'no-cache',
"Connection": "keep-alive",
"User-Agent": get_rand_user_agent()
}
return headers
def clear_cache(self, all_cache=False):
"""
Triggers the clear cache function for a particular engine
:param all_cache: if True, deletes for all engines
"""
if all_cache:
return self.cache_handler.clear()
return self.cache_handler.clear(self.name)
async def get_source(self, url, cache=True, proxy=None, proxy_auth=None):
"""
Returns the source code of a webpage.
Also sets the _cache_hit if cache was used
:rtype: string
:param url: URL to pull it's source code
:param proxy: proxy address to make use off
:type proxy: str
:param proxy_auth: (user, password) tuple to authenticate proxy
:type proxy_auth: (str, str)
:return: html source code of a given URL.
"""
try:
html, cache_hit = await self.cache_handler.get_source(self.name, url, self.headers(), cache, proxy, proxy_auth)
except Exception as exc:
raise Exception('ERROR: {}\n'.format(exc))
self._cache_hit = cache_hit
return html
async def get_soup(self, url, cache, proxy, proxy_auth):
"""
Get the html soup of a query
:param url: url to obrain soup from
:type url: str
:param cache: cache request or not
:type cache: bool
:param proxy: proxy address to make use off
:type proxy: str
:param proxy_auth: (user, password) tuple to authenticate proxy
:type proxy_auth: (str, str)
:rtype: `bs4.element.ResultSet`
"""
html = await self.get_source(url, cache, proxy, proxy_auth)
return BeautifulSoup(html, 'lxml')
def get_search_url(self, query=None, page=None, **kwargs):
"""
Return a formatted search url
"""
# Some URLs use offsets
offset = (page * 10) - 9
params = self.get_params(
query=query, page=page, offset=offset, **kwargs)
url = urlparse(self.search_url)
# For localization purposes, custom urls can be parsed for the same engine
# such as google.de and google.com
if kwargs.get("url"):
new_url = urlparse(kwargs.pop("url"))
# When passing url without scheme e.g google.de, url is parsed as path
if not new_url.netloc:
url = url._replace(netloc=new_url.path)
else:
url = url._replace(netloc=new_url.netloc)
self.base_url = url.geturl()
self._parsed_url = url._replace(query=urlencode(params))
return self._parsed_url.geturl()
def get_results(self, soup, **kwargs):
""" Get results from soup"""
search_results = None
results = self.parse_soup(soup)
# TODO Check if empty results is caused by traffic or answers to query
# were not found
if not results:
print("ENGINE FAILURE: {}\n".format(self.name))
raise NoResultsOrTrafficError(
"The result parsing was unsuccessful. It is either your query could not be found"
" or it was flagged as unusual traffic")
try:
search_results = self.parse_result(results, **kwargs)
# AttributeError occurs as it cannot pass the returned soup
except AttributeError:
raise NoResultsOrTrafficError(
"The returned results could not be parsed. This might be due to site updates or "
"server errors. Drop an issue at https://github.com/bisoncorps/search-engine-parser"
" if this persists"
)
return search_results
def search(self, query=None, page=1, cache=True, proxy=None, proxy_auth=None, **kwargs):
"""
Query the search engine
:param query: the query to search for
:type query: str
:param page: Page to be displayed, defaults to 1
:type page: int
:param proxy: proxy address to make use off
:type proxy: str
:param proxy_auth: (user, password) tuple to authenticate proxy
:type proxy_auth: (str, str)
:return: dictionary. Containing titles, links, netlocs and descriptions.
"""
# Pages can only be from 1-N
if page <= 0:
page = 1
# Get search Page Results
loop = asyncio.get_event_loop()
url = self.get_search_url(query, page, **kwargs)
soup = loop.run_until_complete(
self.get_soup(url,
cache=cache,
proxy=proxy,
proxy_auth=proxy_auth))
return self.get_results(soup, **kwargs)
async def async_search(self, query=None, page=1, cache=True, proxy=None, proxy_auth=None, **kwargs):
"""
Query the search engine but in async mode
:param query: the query to search for
:type query: str
:param page: Page to be displayed, defaults to 1
:type page: int
:param proxy: proxy address to make use off
:type proxy: str
:param proxy_auth: (user, password) tuple to authenticate proxy
:type proxy_auth: (str, str)
:return: dictionary. Containing titles, links, netlocs and descriptions.
"""
# Pages can only be from 1-N
if page <= 0:
page = 1
soup = await self.get_soup(self.get_search_url(query, page, **kwargs), cache=cache, proxy=proxy, proxy_auth=proxy_auth)
return self.get_results(soup, **kwargs)
|
PypiClean
|
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/localization/fa/fa.js
|
MathJax.Localization.addTranslation("fa",null,{menuTitle:"\u0641\u0627\u0631\u0633\u06CC",fontDirection:"rtl",version:"2.7.9",isLoaded:true,domains:{_:{version:"2.7.9",isLoaded:true,strings:{CookieConfig:"MathJax \u06CC\u06A9 \u06A9\u0644\u0648\u0686\u06A9 \u062A\u0646\u0638\u06CC\u0645\u0627\u062A \u06A9\u0627\u0631\u0628\u0631\u06CC \u06CC\u0627\u0641\u062A \u06A9\u0647 \u0634\u0627\u0645\u0644 \u06A9\u062F \u0627\u062C\u0631\u0627\u06CC\u06CC \u0627\u0633\u062A. \u0622\u06CC\u0627 \u0645\u06CC\u200C\u062E\u0648\u0627\u0647\u06CC\u062F \u0622\u0646 \u0631\u0627 \u0627\u062C\u0631\u0627 \u06A9\u0646\u06CC\u062F\u061F\n\n(\u0628\u0647\u062A\u0631 \u0627\u0633\u062A \u0628\u0631 \u0644\u063A\u0648 \u0628\u0641\u0634\u0627\u0631\u06CC\u062F \u0645\u06AF\u0631 \u0627\u06CC\u0646\u06A9\u0647 \u062E\u0648\u062F\u062A\u0627\u0646 \u06A9\u0644\u0648\u0686\u06A9 \u0631\u0627 \u062A\u0646\u0638\u06CC\u0645\u200C\u06A9\u0631\u062F\u0647\u200C\u0627\u06CC\u062F.)",MathProcessingError:"\u062E\u0637\u0627\u06CC \u067E\u0631\u062F\u0627\u0632\u0634 \u0631\u06CC\u0627\u0636\u06CC",MathError:"\u062D\u0637\u0627\u06CC \u0631\u06CC\u0627\u0636\u06CC",LoadFile:"\u0628\u0627\u0631\u06AF\u06CC\u0631\u06CC %1",Loading:"\u0628\u0627\u0631\u06AF\u06CC\u0631\u06CC",LoadFailed:"\u062E\u0637\u0627 \u062F\u0631 \u0628\u0627\u0631\u06AF\u06CC\u0631\u06CC \u067E\u0631\u0648\u0646\u062F\u0647: %1",ProcessMath:"\u067E\u0631\u062F\u0627\u0632\u0634 \u0631\u06CC\u0627\u0636\u06CC: %1\u066A",Processing:"\u067E\u0631\u062F\u0627\u0632\u0634",TypesetMath:"\u062D\u0631\u0648\u0641\u200C\u0686\u06CC\u0646\u06CC \u0631\u06CC\u0627\u0636\u06CC: %1\u066A",Typesetting:"\u062D\u0631\u0648\u0641\u200C\u0686\u06CC\u0646\u06CC",MathJaxNotSupported:"\u0645\u0631\u0648\u0631\u06AF\u0631 \u0634\u0645\u0627 \u0627\u0632 MathJax \u067E\u0634\u062A\u06CC\u0628\u0627\u0646\u06CC \u0646\u0645\u06CC\u200C\u06A9\u0646\u062F"}},FontWarnings:{},"HTML-CSS":{},HelpDialog:{},MathML:{},MathMenu:{},TeX:{}},plural:function(a){return 1},number:function(a){return a}});MathJax.Ajax.loadComplete("[MathJax]/localization/fa/fa.js");
|
PypiClean
|
/sadie_antibody-1.0.2.tar.gz/sadie_antibody-1.0.2/src/sadie/reference/reference.py
|
from __future__ import annotations
import logging
from pathlib import Path
from time import sleep
from typing import Any, Dict, List, Optional, Tuple, Union
from urllib.parse import quote as url_quote
import pandas as pd
import requests
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from sadie.reference.models import GeneEntries, GeneEntry
from sadie.reference.util import (
make_blast_db_for_internal,
write_blast_db,
write_out_fasta,
)
from sadie.reference.yaml import YamlRef
# reference logger
logger = logging.getLogger("Reference")
# column typing from pandas stubs
Column = Union[Union[int, str], str]
class G3Error(Exception):
"""Exception for G3 - helps with being specific"""
def __init__(self, message: str) -> None:
self.message = message
super().__init__(self.message)
class Reference:
"""Reference class to handle reference databases for sadie.airr and sadie.numbering"""
# G3 API Endpoint
_endpoint = "https://g3.jordanrwillis.com/api/v1/genes"
def __init__(self, endpoint: str = _endpoint):
"""Initialize the reference object
Parameters
----------
endpoint : str, optional
The endpoint API address to get the data. Defaults to the G3 API.
"""
self.data: List[Dict[Column, str] | Dict[str, str]] = []
self.endpoint = endpoint
@property
def endpoint(self) -> str:
return self._endpoint
@endpoint.setter
def endpoint(self, endpoint: str) -> None:
_counter = 0
while True:
_get = requests.get(endpoint)
if _get.status_code == 503:
_counter += 1
sleep(5)
logger.info(f"Waiting for G3 API {endpoint} to be available --try: {_counter}")
elif _get.status_code == 200:
break
else:
raise G3Error(f"Error loading G3 API {endpoint}")
if _counter > 5:
raise G3Error(f"{endpoint} is not a valid G3 API endpoint or is down")
logger.info(f"G3 API {endpoint} is available")
self._endpoint = endpoint
def add_gene(self, gene: Dict[str, str]) -> None:
"""Add a single gene to the reference data
Parameters
----------
gene : dict
ex. `gene` should contain the following keys: {'species', 'gene', 'database'}
Examples
--------
reference_object = Refrence()
refrence_object.add_gene({"species": "human", "gene": "IGHV1-2*01", "database": "imgt"})
"""
# make gene model
gene_valid = GeneEntry(**gene)
# add dictionaries to list from G3
self.data.append(self._get_gene(gene_valid))
def add_genes(self, species: str, source: str, genes: List[str]) -> None:
"""Add a List of genes to the reference data object from a single species and database
Parameters
----------
species: str
genes : List[str]
database: str
Examples
--------
ref_class = Reference()
genes = []
genes.append("IGHV1-69*01")
genes.append("IGHD3-3*01"
genes.append("IGHJ6*01")
ref_class.add_genes('human','imgt',genes)
"""
genes_valid = GeneEntries(species=species, source=source, genes=genes)
self.data += self._get_genes(genes_valid)
def _g3_get(self, query: str) -> Tuple[int, List[Dict[str, str]]]:
"""Use the G3 Restful API
Parameters
----------
query : str
query string - ig. https://g3.jordanrwillis.com/api/v1/genes?source=imgt&common=human&gene=IGHV1-69%2A01
Returns
-------
Tuple[int, List[Dict[str,st]]]
status code and response
Raises
------
G3Error
if the response is 404 and we can't find the gene
G3Error
Any other response code that is not 200
"""
response = requests.get(query)
if response.status_code != 200:
raise G3Error(f"{response.url} error G3 database response: {response.status_code}\n{response.text}")
return response.status_code, response.json()
def _get_gene(self, gene: GeneEntry) -> Dict[str, str]:
"""Get a single gene from the G3 Restful API using a GeneEntry Model
Parameters
----------
gene : GeneEntry
The GeneEntry model
Returns
-------
Single Json -> Dict response
Raises
------
ValueError
If gene is not a GeneEntry model
G3Error
If more than one gene is found, i.e the list is longer than 1. Use _get_genes for more than 1.
"""
if not isinstance(gene, GeneEntry):
raise ValueError(f"{gene} is not GeneEntry")
# change weird characters to url characters
gene_url = url_quote(gene.gene)
# we should never have more than one match thanks to the index
query = f"{self.endpoint}?source={gene.source}&common={gene.species}&gene={gene_url}"
# use G3 get to return response and json
status_code, response_json = self._g3_get(query)
logger.debug(f"{gene.source}:{gene.species}:{gene.gene} database response: {status_code}")
# put the species in sub species in because they are not a part of G3.
logger.debug(f"have {len(response_json)} genes")
response_data: Dict[str, str] = response_json[0]
response_data["species"] = gene.species
return response_data
def _get_genes(self, genes: GeneEntries) -> List[Dict[str, str]]:
"""Get a list of genes from entries model. Similar to _get_gene but for multiple genes
Parameters
----------
genes : GeneEntries
The GeneEntries model object.
Returns
-------
List[dict]
A list of Json-> Dict responses from G3
Raises
------
ValueError
If genes is not GeneEntries model
"""
if not isinstance(genes, GeneEntries):
raise ValueError(f"{genes} is not GeneEntries")
# url query
query = f"{self.endpoint}?source={genes.source}&common={genes.species}&limit=-1"
# get request as method for future async
status_code, response_json = self._g3_get(query)
logger.debug(f"{genes.source}:{genes.species} database response: {status_code}")
# this is faster than getting individual genes from the g3 api
# @Todo, add a find_genes method to G3 rather than pulling all the data and filtering...
filtered_json = list(filter(lambda x: x["gene"] in genes.genes, response_json))
return filtered_json
def get_dataframe(self) -> pd.DataFrame:
"""Return a pandas dataframe of the references data"""
return pd.json_normalize(self.data)
@staticmethod
def from_dataframe(input_df: pd.DataFrame) -> "Reference":
matched_indexes = pd.Index(
[
"_id",
"source",
"common",
"gene",
"label",
"gene_segment",
"receptor",
"sequence",
"latin",
"imgt.sequence_gapped",
"imgt.sequence_gapped_aa",
"imgt.cdr3",
"imgt.cdr3_aa",
"imgt.fwr4",
"imgt.fwr4_aa",
"imgt.cdr3_start",
"imgt.cdr3_end",
"imgt.fwr4_start",
"imgt.fwr4_end",
"imgt.reading_frame",
"imgt.ignored",
"imgt.not_implemented",
"imgt.expression",
"imgt.expression_match",
"imgt.remainder",
"imgt.imgt_numbering",
"imgt.sequence",
"imgt.fwr1",
"imgt.fwr1_aa",
"imgt.fwr1_start",
"imgt.fwr1_end",
"imgt.cdr1",
"imgt.cdr1_aa",
"imgt.cdr1_start",
"imgt.cdr1_end",
"imgt.fwr2",
"imgt.fwr2_aa",
"imgt.fwr2_start",
"imgt.fwr2_end",
"imgt.cdr2",
"imgt.cdr2_aa",
"imgt.cdr2_start",
"imgt.cdr2_end",
"imgt.fwr3",
"imgt.fwr3_aa",
"imgt.fwr3_start",
"imgt.fwr3_end",
"imgt.imgt_functional",
"imgt.contrived_functional",
"chimera",
]
)
_diffs = matched_indexes.symmetric_difference(input_df.columns)
if not _diffs.empty:
raise ValueError(f"{_diffs} not in the dataframe")
# fresh instance
reference = Reference()
# get dict as lis tof records
input_list: List[Dict[Column, Any]] = input_df.to_dict(orient="records") # type: ignore
# can't assign dirrectly so have to append to beat mypy
for key in input_list:
reference.data.append(key)
return reference
class References:
def __init__(self, default_output_path: Path | str | None = None) -> None:
self.references: Dict[str, Reference] = {}
if not default_output_path:
self.default_output_path = Path(__file__).parent / "../airr/data/germlines"
else:
self.default_output_path = Path(default_output_path)
logger.info(f"Default output path is {self.default_output_path}")
self.reference_dataframe_path = self.default_output_path / ".references_dataframe.csv.gz"
logger.info(f"Default dataframe is {self.default_output_path}")
@property
def reference_dataframe_path(self) -> Path:
return self._reference_dataframe_path
@reference_dataframe_path.setter
def reference_dataframe_path(self, value: Path) -> None:
self._reference_dataframe_path = value
if self._reference_dataframe_path.exists():
self.reference_dataframe = pd.read_csv(self._reference_dataframe_path, index_col=0)
else:
raise FileExistsError(
f"Reference dataframe does not exist in default path {self._reference_dataframe_path}"
)
def add_reference(self, name: str, reference: Reference, overwrite: bool = False) -> None:
if name in self.references.keys():
if not overwrite:
raise NameError(f"{name} exists in References. Use overwrite if this is your intention")
else:
logger.warning(f"overwriting {name}")
logger.info(f"Adding {name} to references")
self.references[name] = reference
def get_dataframe(self) -> pd.DataFrame:
"""Return a pandas dataframe of the references data"""
names_dataframe: List[pd.DataFrame] = []
if not self.references:
return self.reference_dataframe
for name in self.references:
# create a single reference object
_ref: Reference = self.references[name]
_df: pd.DataFrame = _ref.get_dataframe()
# because a user could add genes multiple times, lets drop by unique id
logger.info(f"dropping duplicated: {_df['_id'].duplicated().sum()}")
_df = _df.drop_duplicates("_id")
# insert name at beggining
_df.insert(0, "name", name)
names_dataframe.append(_df)
# concat all the dataframes
concat_df = pd.concat(names_dataframe).reset_index(drop=True)
# groupby names
concat_df_groupby_name = concat_df.groupby("name")
# within names, check how many species are there
chimeric_gb = concat_df_groupby_name.apply(lambda x: len(x["common"].unique()) > 1)
list_of_chimera: List[str] = chimeric_gb[chimeric_gb].index.to_list()
logger.info(f"{list_of_chimera} are chimeric")
# get the indexes which contain names that are to be chimerized
indexes_to_chimera = concat_df[concat_df["name"].isin(list_of_chimera)].index
# set all cherics to false
concat_df["chimera"] = False
concat_df.loc[indexes_to_chimera, "chimera"] = True
# change the gene to common|gene
concat_df.loc[indexes_to_chimera, "gene"] = concat_df.loc[indexes_to_chimera, ["common", "gene"]].apply(
lambda x: "|".join(x), axis=1
)
self.reference_dataframe = concat_df
return concat_df
@staticmethod
def from_yaml(yaml_path: Optional[Path] = None) -> "References":
"""Parse a yaml file into a references file object
Parameters
----------
yaml_path : Path to yaml file
Returns
-------
Reference - Reference Object
"""
yaml_ref_object = YamlRef(yaml_path)
# the yaml object
yaml_ref = yaml_ref_object.yaml
# make emtpy references object
references_object = References()
# iterate through names
for name in yaml_ref:
reference_object = Reference()
# iterate where they came from
for source in yaml_ref.get(name):
# iterate through species within source
for species in yaml_ref.get(name).get(source):
logger.info(f"Adding {species} from {source} to {name}")
list_of_genes: List[str] = yaml_ref[name][source][species]
# add by list of genes per species given source
reference_object.add_genes(species, source, list_of_genes)
references_object.add_reference(name, reference_object)
return references_object
def _make_igblast_ref_database(self, outpath: Union[Path, str]) -> None:
"""Generate the IgBlast reference database from the reference object
Parameters
----------
outpath : Union[Path, str]
The output path to. example -> path/to/output.
Then the database will dump to path/to/output/{Ig,TCR}/blastdb/{name}
"""
# The blast DB groups by V,D and J
logger.debug("Generating from IMGT Internal Database File")
# get the database as a dataframe
database = self.get_dataframe()
if database[database.label == "D-REGION"].empty:
raise ValueError("No D-REGION found in reference object...make sure to add D gene")
# first name, i.e. "human" or "se09"
groupby_dataframe = database.groupby("name")
for name, group_df in groupby_dataframe:
receptor_blast_dir = Path(outpath) / Path(f"Ig/blastdb/{name}/")
if not receptor_blast_dir.exists():
receptor_blast_dir.mkdir(parents=True)
for segment, segment_df in group_df.groupby("gene_segment"):
out_segment = receptor_blast_dir.joinpath(f"{name}_{segment}")
seqs: List[SeqRecord] = segment_df.apply(
lambda x: SeqRecord(Seq(str(x["sequence"])), name=x["gene"]), axis=1
).to_list()
# write this to a fasta file
fasta_file = write_out_fasta(seqs, out_segment)
# Convert fasta file to blast db
write_blast_db(fasta_file, Path(str(fasta_file).split(".fasta")[0]))
logger.info(f"Wrote blast for {fasta_file}")
def _make_auxillary_file(self, outpath: Path) -> None:
"""Generate the auxillary file for the IgBlast reference database
Parameters
----------
outpath : Path
The output path to. example -> path/to/output.
Then the database will dump to path/to/output/aux_db/{scheme}/{name}.aux
Raises
------
ValueError
if the J region hasn't been added to the database, we refuse to make the aux file
"""
# get dataframe
database = self.get_dataframe()
if database[database.label == "J-REGION"].empty:
raise ValueError("No J-REGION found in reference object...make sure to add J def")
# group by source
# for now we only have one scheme
scheme = "imgt"
receptor_aux_dir = Path(outpath).joinpath(f"aux_db/{scheme}")
if not receptor_aux_dir.exists():
logger.info(f"Creating {receptor_aux_dir}")
receptor_aux_dir.mkdir(parents=True)
for group, group_df in database.groupby("name"):
aux_file_name = Path(str(receptor_aux_dir) + str(f"/{group}_gl.aux"))
# get a DF with just common species name
common_df = group_df[group_df["gene_segment"] == "J"].copy()
# make sure we don't have any dangling J-REGION
bad_remainders = common_df[(common_df["imgt.remainder"].isna())]
if not bad_remainders.empty:
logger.warning(f"Had to drop {bad_remainders.shape[0]} rows due to bad remainder for {group}")
common_df.drop(bad_remainders.index, inplace=True)
# make columns of an aux databaee common_df = common_df[(common_df["imgt.cdr3_end"] != "")]
common_df.loc[:, "reading_frame"] = common_df["imgt.reading_frame"].astype(int)
common_df.loc[:, "left_over"] = common_df["imgt.remainder"].astype(int)
common_df.loc[:, "end"] = common_df["imgt.cdr3_end"].astype(int) - 1
# JH, JK, JL
common_df["marker"] = (
common_df["gene"].str.split("|").str.get(-1).str.split("-").str.get(0).str[0:4].str[::-1].str[:2]
)
# write out the aux file with derived columns
common_df[["gene", "reading_frame", "marker", "end", "left_over"]].to_csv(
aux_file_name, sep="\t", header=None, index=False
)
logger.info(f"Wrote aux to {aux_file_name}")
def _make_internal_annotaion_file(self, outpath: Path) -> None:
"""Generate the internal database file for IgBlast
Parameters
----------
outpath : Path
The output path to. example -> path/to/output.
Then the database will dump to path/to/output/{Ig,TCR}/internal_data/{name}/{name}.ndm.imgt
"""
logger.debug(f"Generating internal annotation file at {outpath}")
# The internal data file structure goes Ig/internal_path/{name}/
database = self.get_dataframe()
for name, group_df in database.groupby("name"):
# get a filtered database for V genes
filtered_data = group_df.loc[group_df["gene_segment"] == "V"].copy()
# the species is the actual entity we are using for the annotation, e.g se09 or human
name_internal_df_path = Path(outpath).joinpath(Path(f"Ig/internal_data/{name}/"))
if not name_internal_df_path.exists():
logger.info(f"Creating {name_internal_df_path}")
name_internal_df_path.mkdir(parents=True)
# subselect and order
index_df = filtered_data[
[
"gene",
"imgt.fwr1_start",
"imgt.fwr1_end",
"imgt.cdr1_start",
"imgt.cdr1_end",
"imgt.fwr2_start",
"imgt.fwr2_end",
"imgt.cdr2_start",
"imgt.cdr2_end",
"imgt.fwr3_start",
"imgt.fwr3_end",
]
].copy()
# makes everything an integer. sets gene to index so its not affected
# add +1 to so we get 1-based indexing
index_df = (index_df.set_index("gene") + 1).astype("Int64").reset_index()
# drop anything where there is an na in the annotation idnex
index_df = index_df.drop(index_df[index_df.isna().any(axis=1)].index)
scheme = "imgt"
internal_annotations_file_path = name_internal_df_path.joinpath(f"{name}.ndm.{scheme}")
segment = [i.split("|")[-1].split("-")[0][0:4][::-1][:2] for i in index_df["gene"]]
index_df["segment"] = segment
index_df["weird_buffer"] = 0
logger.info(f"Writing to annotation file {internal_annotations_file_path}")
index_df.to_csv(internal_annotations_file_path, sep="\t", header=False, index=False)
# blast reads these suffixes depending on receptor
suffix = "V"
db_outpath = Path(str(name_internal_df_path) + f"/{name}_{suffix}")
# Pass the dataframe and write out the blast database
make_blast_db_for_internal(group_df, db_outpath)
@staticmethod
def from_json(path: Path | str) -> "References":
"""Read file into a reference object
Parameters
----------
path : Union[Path,str]
path to out file
Examples
--------
# read json
reference = Reference.read_file("/path/to/file.json") # can also be file.json.gz
Returns
-------
Reference - Reference Object
"""
_data = pd.read_json(path, orient="records").astype(
{"imgt.ignored": object, "imgt.not_implemented": object, "imgt.expression_match": object}
)
return References.from_dataframe(_data) # type: ignore
def make_airr_database(self, output_path: Path) -> Path:
"""
Make the igblastn database, internal database and auxilary database needed by igblast. On success
return a path to the output database.
Parameters
----------
output_path : Path
A path directory to output the database structure
Returns
-------
Path
On success return path of dumped database file.
Examples
--------
ref_class = Reference()
ref_class.add_gene({"species": "human", "gene": "IGHV1-69*01", "database": "imgt"})
ref_class.add_gene({"species": "human", "gene": "IGHD3-3*01", "database": "imgt"})
ref_class.to_airr_database("/path/to/output/")
"""
if not self.references:
# If empty make a reference from the yaml from object and call G3
logger.warning("Reference data is empty - Generating from yaml")
self.references = self.from_yaml().references.copy()
if isinstance(output_path, str):
output_path = Path(output_path)
# dataframe to internal annotation structure
self._make_internal_annotaion_file(output_path)
logger.info(f"Generated Internal Data {output_path}/Ig/internal_data")
# dataframe to igblast annotation structure
self._make_igblast_ref_database(output_path)
logger.info(f"Generated Blast Data {output_path}/Ig/blastdb")
# dataframe to igblast aux structure
self._make_auxillary_file(output_path)
logger.info(f"Generated Aux Data {output_path}/aux_db")
self.default_output_path = Path(output_path)
logger.debug(f"Regenerating frame to {self.reference_dataframe_path}")
self.reference_dataframe = self.get_dataframe()
_out = self.default_output_path / ".references_dataframe.csv.gz"
logger.info(f"Writing out reference dataframe to {self.reference_dataframe_path}")
self.reference_dataframe.to_csv(_out)
self.reference_dataframe_path = _out
return output_path
@staticmethod
def from_dataframe(dataframe: pd.DataFrame) -> "References":
"""Read dataframe into a reference object
Parameters
----------
dataframe : pd.DataFrame
dataframe of the Reference file
Examples
--------
reference_df = pd.read_csv("/path/to/file.csv") # can also be file.csv.gz
reference_object = Reference.from_dataframe(reference_df)
Returns
-------
Reference - Reference Object
Raises
------
ValueError
if pd.Dataframe is not suppplied
"""
references = References()
for name, name_df in dataframe.groupby("name"):
name_df["gene"] = name_df["gene"].str.split("|").str[-1]
ref = Reference().from_dataframe(
name_df.drop(columns=["name"]).astype(
{"imgt.ignored": object, "imgt.not_implemented": object, "imgt.expression_match": object}
)
)
references.add_reference(name, ref)
references.reference_dataframe = dataframe
return references
def __repr__(self) -> str:
return self.get_dataframe().__repr__()
|
PypiClean
|
/cumm_cu102-0.4.11-cp37-cp37m-win_amd64.whl/cumm/tensorview/tvio.py
|
from typing import Any, Union
from typing import Dict, Hashable
import numpy as np
from cumm import tensorview as tv
import json
from collections import abc
from functools import reduce
JSON_INDEX_KEY = "__cumm_io_json_index"
NPDTYPE_TO_JSONARRAY_MAP = {
np.dtype(np.uint64): tv.uint64,
np.dtype(np.uint32): tv.uint32,
np.dtype(np.uint16): tv.uint16,
np.dtype(np.uint8): tv.uint8,
np.dtype(np.int64): tv.int64,
np.dtype(np.int32): tv.int32,
np.dtype(np.int16): tv.int16,
np.dtype(np.int8): tv.int8,
np.dtype(np.float64): tv.float64,
np.dtype(np.float32): tv.float32,
np.dtype(np.float16): tv.float16,
np.dtype(np.bool_): tv.bool_,
}
def _inv_map(dict_map: Dict[Hashable, Hashable]) -> Dict[Hashable, Hashable]:
return {v: k for k, v in dict_map.items()}
INV_NPDTYPE_TO_JSONARRAY_MAP = _inv_map(NPDTYPE_TO_JSONARRAY_MAP)
class Placeholder(object):
def __init__(self, index: int, nbytes: int):
self.index = index
self.nbytes = nbytes
def __add__(self, other):
assert self.index == other.index
return Placeholder(self.index, self.nbytes + other.nbytes)
def __repr__(self):
return "Placeholder[{},{}]".format(self.index, self.nbytes)
def __eq__(self, other):
return self.index == other.index and self.nbytes == other.nbytes
def is_json_index(data):
return isinstance(data, dict) and JSON_INDEX_KEY in data
def byte_size(obj: Union[np.ndarray, tv.Tensor]) -> int:
if isinstance(obj, np.ndarray):
return obj.nbytes
if isinstance(obj, tv.Tensor):
return obj.size * obj.itemsize
else:
raise NotImplementedError
def _extract_arrays_from_data(arrays,
data,
object_classes=(np.ndarray,),
json_index=False):
# can't use abc.Sequence because string is sequence too.
if isinstance(data, (list, tuple)):
data_skeleton = [None] * len(data)
for i in range(len(data)):
e = data[i]
if isinstance(e, object_classes):
data_skeleton[i] = {JSON_INDEX_KEY: len(arrays)}
arrays.append(e)
else:
data_skeleton[i] = _extract_arrays_from_data(
arrays, e, object_classes, json_index)
if isinstance(data, tuple):
data_skeleton = tuple(data_skeleton)
return data_skeleton
elif isinstance(data, abc.Mapping):
data_skeleton = {}
for k, v in data.items():
if isinstance(v, object_classes):
data_skeleton[k] = {JSON_INDEX_KEY: len(arrays)}
arrays.append(v)
else:
data_skeleton[k] = _extract_arrays_from_data(
arrays, v, object_classes, json_index)
return data_skeleton
else:
data_skeleton = None
if isinstance(data, object_classes):
data_skeleton = {JSON_INDEX_KEY: len(arrays)}
arrays.append(data)
else:
data_skeleton = data
return data_skeleton
def extract_arrays_from_data(data,
object_classes=(np.ndarray,),
json_index=False):
arrays = []
data_skeleton = _extract_arrays_from_data(arrays,
data,
object_classes=object_classes,
json_index=json_index)
return arrays, data_skeleton
def align_offset(offset, n):
"""given a byte offset, align it and return an aligned offset
"""
if n <= 0:
return offset
return n * ((offset + n - 1) // n)
def put_arrays_to_data(arrays, data_skeleton, json_index=False) -> Any:
if not arrays:
return data_skeleton
return _put_arrays_to_data(arrays, data_skeleton, json_index)
def _put_arrays_to_data(arrays, data_skeleton, json_index=False):
if isinstance(data_skeleton, (list, tuple)):
length = len(data_skeleton)
data = [None] * length
for i in range(length):
e = data_skeleton[i]
if is_json_index(e):
data[i] = arrays[e[JSON_INDEX_KEY]]
else:
data[i] = _put_arrays_to_data(arrays, e, json_index)
if isinstance(data_skeleton, tuple):
data = tuple(data)
return data
elif isinstance(data_skeleton, abc.Mapping):
data = {}
for k, v in data_skeleton.items():
if is_json_index(v):
data[k] = arrays[v[JSON_INDEX_KEY]]
else:
data[k] = _put_arrays_to_data(arrays, v, json_index)
return data
else:
if is_json_index(data_skeleton):
data = arrays[data_skeleton[JSON_INDEX_KEY]]
else:
data = data_skeleton
return data
def dumps_jsonarray(obj, multi_thread=False, buffer=None, use_bytearray=False, align_size: int = 32):
"""
layout:
+--------------+------------+---------------------------------+--------------+
|meta_start_pos|meta_end_pos| array/bytes content | meta |
+--------------+------------+---------------------------------+--------------+
data without array/bytes will be saved as bytes in content.
meta format:
{
"array": [
{
"shape": [...]
"dtype": ...
"offset": ...
}
]
"data": skeleton
}
"""
arrays, data_skeleton = extract_arrays_from_data(obj, (np.ndarray, tv.Tensor), True)
array_meta = []
start = 16
for i in range(len(arrays)):
arr = arrays[i]
start_aligned = align_offset(start, align_size)
if isinstance(arr, tv.Tensor):
assert arr.device == -1
arr_np = arr.numpy_view()
else:
arr_np = arr
# ascontiguous will convert scalar to 1-D array. be careful.
arrays[i] = np.ascontiguousarray(arr_np)
array_meta.append({
"shape": arrays[i].shape,
"dtype": NPDTYPE_TO_JSONARRAY_MAP[arrays[i].dtype],
"offset": start_aligned,
"is_np": isinstance(arr, np.ndarray),
})
start = start_aligned + arrays[i].nbytes
meta = {
"array": array_meta,
"data": data_skeleton,
}
meta_json = json.dumps(meta).encode("utf8")
meta_length = len(meta_json)
array_buffers = []
for i in range(len(arrays)):
array_buffers.append((arrays[i].view(np.uint8),
array_meta[i]["offset"], arrays[i].nbytes))
total_length = start + meta_length
if buffer is None:
if not use_bytearray:
buffer = np.empty(total_length, dtype=np.uint8)
else:
buffer = bytearray(total_length)
else:
assert len(buffer) >= total_length
buffer_view = memoryview(buffer)
content_end_offset = start
meta_end_offset = content_end_offset + meta_length
buffer_view[:8] = np.array(content_end_offset, dtype=np.int64).tobytes()
buffer_view[8:16] = np.array(meta_end_offset, dtype=np.int64).tobytes()
buffer_view[16:24] = np.array(align_size, dtype=np.int64).tobytes()
shared_mem = np.frombuffer(buffer_view, dtype=np.uint8)
for a_buf, offset, size in array_buffers:
shared_mem_view = memoryview(shared_mem[offset:offset + size])
if not isinstance(a_buf, bytes):
buf_mem_view = memoryview(a_buf.reshape(-1))
if multi_thread: # slow when multi_thread copy in worker
shared_mem[offset:offset + size] = a_buf.reshape(-1)
else:
shared_mem_view[:] = buf_mem_view
else:
shared_mem_view[:] = a_buf
shared_mem[content_end_offset:content_end_offset +
meta_length] = np.frombuffer(meta_json, dtype=np.uint8)
return buffer
def loads_jsonarray(binary, copy=True):
buffer_view = memoryview(binary)
content_end_offset = np.frombuffer(buffer_view[:8], dtype=np.int64).item()
meta_end_offset = np.frombuffer(buffer_view[8:16], dtype=np.int64).item()
pb_bytes = buffer_view[content_end_offset:meta_end_offset]
meta = json.loads(bytearray(pb_bytes))
array_metas = meta["array"]
data_skeleton = meta["data"]
shared_mem = buffer_view
results_array = []
for array_meta in array_metas:
shape = array_meta["shape"]
dtype = INV_NPDTYPE_TO_JSONARRAY_MAP[array_meta["dtype"]]
offset = array_meta["offset"]
is_np = array_meta["is_np"]
length = reduce(lambda x, y: x * y, shape) * np.dtype(dtype).itemsize
arr = np.frombuffer(memoryview(shared_mem[offset:offset + length]),
dtype=dtype).reshape(shape)
if is_np:
if copy:
arr = arr.copy()
else:
arr = tv.from_numpy(arr)
if copy:
arr = arr.clone()
results_array.append(arr)
results = put_arrays_to_data(results_array, data_skeleton, json_index=True)
return results
|
PypiClean
|
/raptus.recipe.torii-1.0b1.tar.gz/raptus.recipe.torii-1.0b1/raptus/recipe/torii/README.txt
|
Supported options
=================
The recipe supports the following options:
.. Note to recipe author!
----------------------
For each option the recipe uses you should include a description
about the purpose of the option, the format and semantics of the
values it accepts, whether it is mandatory or optional and what the
default value is if it is omitted.
option1
Description for ``option1``...
option2
Description for ``option2``...
Example usage
=============
.. Note to recipe author!
----------------------
zc.buildout provides a nice testing environment which makes it
relatively easy to write doctests that both demonstrate the use of
the recipe and test it.
You can find examples of recipe doctests from the PyPI, e.g.
http://pypi.python.org/pypi/zc.recipe.egg
The PyPI page for zc.buildout contains documentation about the test
environment.
http://pypi.python.org/pypi/zc.buildout#testing-support
Below is a skeleton doctest that you can start with when building
your own tests.
We'll start by creating a buildout that uses the recipe::
>>> write('buildout.cfg',
... """
... [buildout]
... parts = test1
...
... [test1]
... recipe = raptus.recipe.torii
... option1 = %(foo)s
... option2 = %(bar)s
... """ % { 'foo' : 'value1', 'bar' : 'value2'})
Running the buildout gives us::
>>> print 'start', system(buildout)
start...
Installing test1.
Unused options for test1: 'option2' 'option1'.
<BLANKLINE>
|
PypiClean
|
/altimate_dataminion-0.0.8-py3-none-any.whl/altimate_models/base/metadata_profiler.py
|
import decimal
from altimate_models.base.extractor import SQLAlchemyExtractor
from altimate_models.base.profiler import SQLAlchemyProfiler
from altimate_models.base.source import DataSource
from altimate_models.shared_models import Policy
from altimate_profiler.builders.metadata_query_builder import MetadataQueryBuilder
from altimate_profiler.builders.utils import infer_type, map_type_to_standard_types
from altimate_profiler.metrics.metadata_metric_type import MetadataMetricType
class SqlAlchemyMetadataProfiler(SQLAlchemyProfiler):
COMMON_METRICS = [
MetadataMetricType.CARDINALITY,
MetadataMetricType.NULL_PERCENTAGE,
]
NUMERIC_METRICS = [
MetadataMetricType.NUMERIC_MAX,
MetadataMetricType.NUMERIC_MIN,
MetadataMetricType.NUMERIC_MEAN,
MetadataMetricType.NUMERIC_STD,
]
TEXT_METRICS = [MetadataMetricType.MIN_LENGTH, MetadataMetricType.MAX_LENGTH]
def __init__(
self,
data_source: DataSource,
dialect,
policy: Policy,
resource_name: str,
):
self.resource_name = resource_name
self.metadata_query_builder = MetadataQueryBuilder(dialect)
super().__init__(
SQLAlchemyExtractor(data_source.get_connection_string()), policy, dialect
)
def get_row_count(self):
query = self.metadata_query_builder.row_count(self.resource_name)
metadata = self.extractor.run(query)
return metadata["rows"][0][metadata["columns"][0]]
def extract_result(self, result, column_result):
column_metrics = result["columns"]
for column_metric in column_metrics:
column, metric = column_metric.split("__")
column = column.lower()
metric = metric.lower()
for row in result["rows"]:
if isinstance(row[column_metric], decimal.Decimal):
column_result[column][metric] = round(float(row[column_metric]), 2)
elif isinstance(row[column_metric], float):
column_result[column][metric] = round(row[column_metric], 2)
else:
column_result[column][metric] = row[column_metric]
return column_result
def get_common_metadata(self, column_result: dict):
# Get CARDINALITY AND NULL PERCENTAGE
query = self.metadata_query_builder.get_metrics(
list(column_result.keys()), self.resource_name, self.COMMON_METRICS
)
return self.extract_result(self.extractor.run(query), column_result)
def get_numeric_metadata(
self, column_result: dict, column_with_inferred_types: dict
):
numeric_columns = [
col
for col, col_type in column_result.items()
if column_with_inferred_types[col] == "NUMERIC"
]
if len(numeric_columns) == 0:
return column_result
query = self.metadata_query_builder.get_metrics(
numeric_columns, self.resource_name, self.NUMERIC_METRICS
)
return self.extract_result(self.extractor.run(query), column_result)
def text_metadata(self, column_result: dict, column_with_inferred_types: dict):
text_columns = [
col
for col, col_type in column_result.items()
if column_with_inferred_types[col] == "TEXT"
]
if len(text_columns) == 0:
return column_result
query = self.metadata_query_builder.get_metrics(
text_columns, self.resource_name, self.TEXT_METRICS
)
return self.extract_result(self.extractor.run(query), column_result)
def get_metadata(self):
discovery_data = self.get_discovery_data()
column_with_inferred_types = {}
column_result = {}
resource_result = {}
for row in discovery_data["rows"]:
column_name = row["COL_NAME"].lower()
column_type = row["COL_TYPE"]
column_result[column_name] = {}
common_type = map_type_to_standard_types.get(column_type.upper(), "UNKNOWN")
column_with_inferred_types[column_name] = infer_type(
column_name, common_type
)
resource_result["row_count"] = self.get_row_count()
column_result = self.get_common_metadata(column_result)
column_result = self.get_numeric_metadata(
column_result, column_with_inferred_types
)
column_result = self.text_metadata(column_result, column_with_inferred_types)
return {
"columns": column_result,
"resource": resource_result,
}
def get_debugdata(self, debug_sql):
debug_data = self.get_debug_data(debug_sql)
return debug_data
|
PypiClean
|
/skytime-0.16.1-py3-none-any.whl/sktime/registry/_base_classes.py
|
__author__ = ["fkiraly"]
import pandas as pd
from sktime.alignment.base import BaseAligner
from sktime.annotation.base import BaseSeriesAnnotator
from sktime.base import BaseEstimator, BaseObject
from sktime.classification.base import BaseClassifier
from sktime.classification.early_classification import BaseEarlyClassifier
from sktime.clustering.base import BaseClusterer
from sktime.dists_kernels._base import (
BasePairwiseTransformer,
BasePairwiseTransformerPanel,
)
from sktime.forecasting.base import BaseForecaster
from sktime.forecasting.model_selection._split import BaseSplitter
from sktime.networks.base import BaseDeepNetwork
from sktime.param_est.base import BaseParamFitter
from sktime.performance_metrics.base import BaseMetric
from sktime.regression.base import BaseRegressor
from sktime.transformations.base import (
BaseTransformer,
_PanelToPanelTransformer,
_PanelToTabularTransformer,
_SeriesToPrimitivesTransformer,
_SeriesToSeriesTransformer,
)
BASE_CLASS_REGISTER = [
("object", BaseObject, "object"),
("estimator", BaseEstimator, "estimator = object with fit"),
("aligner", BaseAligner, "time series aligner or sequence aligner"),
("classifier", BaseClassifier, "time series classifier"),
("clusterer", BaseClusterer, "time series clusterer"),
("early_classifier", BaseEarlyClassifier, "early time series classifier"),
("forecaster", BaseForecaster, "forecaster"),
("metric", BaseMetric, "performance metric"),
("network", BaseDeepNetwork, "deep learning network"),
("param_est", BaseParamFitter, "parameter fitting estimator"),
("regressor", BaseRegressor, "time series regressor"),
("series-annotator", BaseSeriesAnnotator, "time series annotator"),
("splitter", BaseSplitter, "time series splitter"),
("transformer", BaseTransformer, "time series transformer"),
(
"transformer-pairwise",
BasePairwiseTransformer,
"pairwise transformer for tabular data, distance or kernel",
),
(
"transformer-pairwise-panel",
BasePairwiseTransformerPanel,
"pairwise transformer for panel data, distance or kernel",
),
]
BASE_CLASS_SCITYPE_LIST = pd.DataFrame(BASE_CLASS_REGISTER)[0].tolist()
BASE_CLASS_LIST = pd.DataFrame(BASE_CLASS_REGISTER)[1].tolist()
BASE_CLASS_LOOKUP = dict(zip(BASE_CLASS_SCITYPE_LIST, BASE_CLASS_LIST))
TRANSFORMER_MIXIN_REGISTER = [
(
"series-to-primitive-trafo",
_SeriesToPrimitivesTransformer,
"time-series-to-primitives transformer",
),
(
"series-to-series-trafo",
_SeriesToSeriesTransformer,
"time-series-to-time-series transformer",
),
(
"panel-to-tabular-trafo",
_PanelToTabularTransformer,
"panel-to-tabular transformer",
),
("panel-to-panel-trafo", _PanelToPanelTransformer, "panel-to-panel transformer"),
]
TRANSFORMER_MIXIN_SCITYPE_LIST = pd.DataFrame(TRANSFORMER_MIXIN_REGISTER)[0].tolist()
TRANSFORMER_MIXIN_LIST = pd.DataFrame(TRANSFORMER_MIXIN_REGISTER)[1].tolist()
TRANSFORMER_MIXIN_LOOKUP = dict(
zip(TRANSFORMER_MIXIN_SCITYPE_LIST, TRANSFORMER_MIXIN_LIST)
)
|
PypiClean
|
/amazon-textract-textractor-1.3.2.tar.gz/amazon-textract-textractor-1.3.2/textractor/utils/search_utils.py
|
try:
import numpy as np
except ImportError:
# No need to log it here as numpy is only used if SentenceTransformers is used
# The latter has numpy as dependency.
pass
import math
import editdistance
from textractor.data.constants import SimilarityMetric
from textractor.exceptions import MissingDependencyException
from textractor.data.constants import (
IS_COLUMN_HEAD,
IS_FOOTER_CELL,
IS_TITLE_CELL,
IS_SUMMARY_CELL,
IS_SECTION_TITLE_CELL,
CellTypes,
)
class SearchUtils:
model = None
util = None
model_string = "all-MiniLM-L6-v2"
@classmethod
def get_word_similarity(
cls, word_1: str, word_2: str, similarity_metric: SimilarityMetric
) -> float:
"""
Returns the extent of similarity between the input words using the similarity_metric input by the user.
:param word_1: First word to check for similarity
:type word_1: str
:param word_2: Second word to check for similarity
:type word_2: str
:param similarity_metric: The function supports one of 3 metrics \
* Levenshtein distance/ edit distance \
* Euclidean distance \
* Cosine distance
:type similarity_metric: str
:return: Returns the similarity measure calculated based on the metric for the 2 input words.
:rtype: float
"""
if cls.model is None and similarity_metric != SimilarityMetric.LEVENSHTEIN:
try:
from sentence_transformers import SentenceTransformer, util
except ImportError:
raise MissingDependencyException(
"sentence_transformers is not installed. Use SimilarityMetric.LEVENSHTEIN."
)
cls.model = SentenceTransformer(cls.model_string)
cls.util = util
if similarity_metric == SimilarityMetric.LEVENSHTEIN:
return normalized_edit_distance(
word_1.lower(), word_2.lower()
)
elif similarity_metric == SimilarityMetric.EUCLIDEAN:
ref_word_emb = cls.model.encode([word_1])
word_emb = cls.model.encode([word_2])
dist = np.linalg.norm(ref_word_emb - word_emb)
return dist
else:
ref_word_emb = cls.model.encode([word_1])
word_emb = cls.model.encode([word_2])
similarity = cls.util.cos_sim(ref_word_emb, word_emb)
return similarity.item()
def jaccard_similarity(list_1: list, list_2: list) -> float:
"""
Calculates Jaccard similarity between the 2 input lists.
:param list_1: First list to check for similarity
:type list_1: list
:param list_2: Second list to check for similarity
:type list_2: list
:return: Returns the similarity measure calculated for the 2 input lists.
:rtype: float
"""
set_1 = set(list_1)
set_2 = set(list_2)
return float(len(set_1.intersection(set_2)) / len(set_1.union(set_2)))
def get_metadata_attr_name(cell_atr):
"""
Returns metadata attribute mapping to the input CellType.
:param cell_atr: Input cell type
:type: enum
:return: Returns metadata attribute mapping to the input CellType.
:rtype: str
"""
cell_map = {
CellTypes.COLUMN_HEADER: IS_COLUMN_HEAD,
CellTypes.SECTION_TITLE: IS_SECTION_TITLE_CELL,
CellTypes.SUMMARY_CELL: IS_SUMMARY_CELL,
CellTypes.FLOATING_TITLE: IS_TITLE_CELL,
CellTypes.FLOATING_FOOTER: IS_FOOTER_CELL,
}
try:
return cell_map[cell_atr]
except:
return ""
def normalized_edit_distance(s1: str, s2: str):
"""
Returns the normalized edit distance from Lopresti et al.
:param s1: First string
:type s1: str
:param s2: Second string
:type s2: str
"""
dist = editdistance.eval(s1, s2)
if min(len(s1), len(s2)) - dist == 0:
return 0.0
return 1.0 / math.exp(dist / (min(len(s1), len(s2)) - dist))
|
PypiClean
|
/dgl_cu92-0.7a210408-cp36-cp36m-manylinux1_x86_64.whl/dgl/dataloading/neighbor.py
|
from .dataloader import BlockSampler
from .. import sampling, subgraph, distributed
class MultiLayerNeighborSampler(BlockSampler):
"""Sampler that builds computational dependency of node representations via
neighbor sampling for multilayer GNN.
This sampler will make every node gather messages from a fixed number of neighbors
per edge type. The neighbors are picked uniformly.
Parameters
----------
fanouts : list[int] or list[dict[etype, int] or None]
List of neighbors to sample per edge type for each GNN layer, starting from the
first layer.
If the graph is homogeneous, only an integer is needed for each layer.
If None is provided for one layer, all neighbors will be included regardless of
edge types.
If -1 is provided for one edge type on one layer, then all inbound edges
of that edge type will be included.
replace : bool, default True
Whether to sample with replacement
return_eids : bool, default False
Whether to return the edge IDs involved in message passing in the MFG.
If True, the edge IDs will be stored as an edge feature named ``dgl.EID``.
Examples
--------
To train a 3-layer GNN for node classification on a set of nodes ``train_nid`` on
a homogeneous graph where each node takes messages from 5, 10, 15 neighbors for
the first, second, and third layer respectively (assuming the backend is PyTorch):
>>> sampler = dgl.dataloading.MultiLayerNeighborSampler([5, 10, 15])
>>> collator = dgl.dataloading.NodeCollator(g, train_nid, sampler)
>>> dataloader = torch.utils.data.DataLoader(
... collator.dataset, collate_fn=collator.collate,
... batch_size=1024, shuffle=True, drop_last=False, num_workers=4)
>>> for blocks in dataloader:
... train_on(blocks)
If training on a heterogeneous graph and you want different number of neighbors for each
edge type, one should instead provide a list of dicts. Each dict would specify the
number of neighbors to pick per edge type.
>>> sampler = dgl.dataloading.MultiLayerNeighborSampler([
... {('user', 'follows', 'user'): 5,
... ('user', 'plays', 'game'): 4,
... ('game', 'played-by', 'user'): 3}] * 3)
Notes
-----
For the concept of MFGs, please refer to
:ref:`User Guide Section 6 <guide-minibatch>` and
:doc:`Minibatch Training Tutorials <tutorials/large/L0_neighbor_sampling_overview>`.
"""
def __init__(self, fanouts, replace=False, return_eids=False):
super().__init__(len(fanouts), return_eids)
self.fanouts = fanouts
self.replace = replace
def sample_frontier(self, block_id, g, seed_nodes):
fanout = self.fanouts[block_id]
if isinstance(g, distributed.DistGraph):
if fanout is None:
# TODO(zhengda) There is a bug in the distributed version of in_subgraph.
# let's use sample_neighbors to replace in_subgraph for now.
frontier = distributed.sample_neighbors(g, seed_nodes, -1, replace=False)
else:
frontier = distributed.sample_neighbors(g, seed_nodes, fanout, replace=self.replace)
else:
if fanout is None:
frontier = subgraph.in_subgraph(g, seed_nodes)
else:
frontier = sampling.sample_neighbors(g, seed_nodes, fanout, replace=self.replace)
return frontier
class MultiLayerFullNeighborSampler(MultiLayerNeighborSampler):
"""Sampler that builds computational dependency of node representations by taking messages
from all neighbors for multilayer GNN.
This sampler will make every node gather messages from every single neighbor per edge type.
Parameters
----------
n_layers : int
The number of GNN layers to sample.
return_eids : bool, default False
Whether to return the edge IDs involved in message passing in the MFG.
If True, the edge IDs will be stored as an edge feature named ``dgl.EID``.
Examples
--------
To train a 3-layer GNN for node classification on a set of nodes ``train_nid`` on
a homogeneous graph where each node takes messages from all neighbors for the first,
second, and third layer respectively (assuming the backend is PyTorch):
>>> sampler = dgl.dataloading.MultiLayerFullNeighborSampler(3)
>>> collator = dgl.dataloading.NodeCollator(g, train_nid, sampler)
>>> dataloader = torch.utils.data.DataLoader(
... collator.dataset, collate_fn=collator.collate,
... batch_size=1024, shuffle=True, drop_last=False, num_workers=4)
>>> for blocks in dataloader:
... train_on(blocks)
Notes
-----
For the concept of MFGs, please refer to
:ref:`User Guide Section 6 <guide-minibatch>` and
:doc:`Minibatch Training Tutorials <tutorials/large/L0_neighbor_sampling_overview>`.
"""
def __init__(self, n_layers, return_eids=False):
super().__init__([None] * n_layers, return_eids=return_eids)
|
PypiClean
|
/z3c.widgets.flashupload-1.0c1.zip/z3c.widgets.flashupload-1.0c1/src/z3c/widgets/flashupload/resources/upload.js
|
function z3cFlashUploadStartBrowsing(){
// tells flash to start with browsing
if(window.fuploader){
window.document["fuploader"].SetVariable("startBrowse", "go");
}else if(document.fuploader){
document.fuploader.SetVariable("startBrowse", "go");
}
}
function z3cFlashUploadEnableBrowseButton(){
document.getElementById("flash.start.browsing").style.visibility = "visible";
document.getElementById("flash.start.browsing").disabled = false;
}
function z3cFlashUploadDisableBrowseButton(){
document.getElementById("flash.start.browsing").style.visibility = "hidden";
document.getElementById("flash.start.browsing").disabled = "disabled";
}
function z3cFlashUploadOnUploadCompleteFEvent(status){
// always fired from flash
if (typeof(z3cFlashUploadOnUploadComplete) == "function"){
z3cFlashUploadOnUploadComplete(status);
}
}
function z3cFlashUploadOnFileCompleteFEvent(filename){
// always fired from flash
if (typeof(z3cFlashUploadOnFileComplete) =="function"){
z3cFlashUploadOnFileComplete(filename);
}
}
/**
called when the user presses the cancel button while browsing
*/
function z3cFlashUploadOnCancelFEvent(){
if (typeof(z3cFlashUploadOnCancelEvent) =="function"){
z3cFlashUploadOnCancelEvent();
}
}
/**
called if an error occured during the upload progress
*/
function z3cFlashUploadOnErrorFEvent(error_str){
if (typeof(z3cFlashUploadOnErrorEvent) =="function"){
z3cFlashUploadOnErrorEvent(error_str);
}
}
function prepareUrlForFlash(url){
return escape(url).split("+").join("%2B");
}
/**
creates a instance of the multifile upload widget
insidde the target div.
Required global variable: swf_upload_target_path
*/
function createFlashUpload()
{
var so = new SWFObject(swf_upload_url, "fuploader", "100%", "100%", "8.0.33", "#f8f8f8");
so.addParam("allowScriptAccess", "sameDomain");
so.addParam("wmode", "transparent");
// we need to manually quote the "+" signs to make sure they do not
// result in a " " sign inside flash
so.addVariable("target_path", swf_upload_target_path);
so.addVariable("site_path", prepareUrlForFlash(swf_upload_site_url));
so.addVariable("config_path", prepareUrlForFlash(swf_upload_config_url));
var success = so.write("flashuploadtarget");
if (!success){
$("#flashuploadtarget").load("noflashupload.html")
}
}
if (window.addEventListener){
window.addEventListener('load', createFlashUpload, false);
}
else if(window.attachEvent){
window.attachEvent('onload', createFlashUpload);
}
|
PypiClean
|
/watchhub-2020.12.1.tar.gz/watchhub-2020.12.1/README.rst
|
python-watchhub
================
.. image:: https://travis-ci.com/imduffy15/python-watchhub.svg?branch=master
:target: https://travis-ci.com/imduffy15/python-watchhub
:alt: Build Status
.. image:: https://coveralls.io/repos/github/imduffy15/python-watchhub/badge.svg
:target: https://coveralls.io/github/imduffy15/python-watchhub
:alt: Coverage Status
.. image:: https://pepy.tech/badge/watchhub
:target: https://pepy.tech/project/watchhub
:alt: Downloads
Documentation for this package can be found at `https://watchhub.readthedocs.io <https://watchhub.readthedocs.io>`_.
``watchhub`` is a Python package for querying the strem.io Watchhub API. This API enables you to discover content IDs across many stream services such as netflix, disney+, nowtv, etc. from a IMDB ID
Installation
------------
.. code-block::
pip install watchhub
To utilize the async version of this code, you must install into a Python 3.7+ environment via:
.. code-block::
pip install watchhub[async]
|
PypiClean
|
/ixnetwork_restpy-1.1.10.tar.gz/ixnetwork_restpy-1.1.10/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/ancpvlanrange_4792f9d284c8f7853c65149155d5c72e.py
|
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class AncpVlanRange(Base):
"""
The AncpVlanRange class encapsulates a required ancpVlanRange resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = "ancpVlanRange"
_SDM_ATT_MAP = {
"Enabled": "enabled",
"FirstId": "firstId",
"IdIncrMode": "idIncrMode",
"Increment": "increment",
"IncrementStep": "incrementStep",
"InnerEnable": "innerEnable",
"InnerFirstId": "innerFirstId",
"InnerIncrement": "innerIncrement",
"InnerIncrementStep": "innerIncrementStep",
"InnerPriority": "innerPriority",
"InnerTpid": "innerTpid",
"InnerUniqueCount": "innerUniqueCount",
"Name": "name",
"ObjectId": "objectId",
"Priority": "priority",
"Tpid": "tpid",
"UniqueCount": "uniqueCount",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(AncpVlanRange, self).__init__(parent, list_op)
@property
def VlanIdInfo(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.vlanidinfo_1231a21eba96242c5e969b4af1b88293.VlanIdInfo): An instance of the VlanIdInfo class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.vlanidinfo_1231a21eba96242c5e969b4af1b88293 import (
VlanIdInfo,
)
if len(self._object_properties) > 0:
if self._properties.get("VlanIdInfo", None) is not None:
return self._properties.get("VlanIdInfo")
return VlanIdInfo(self)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Disabled ranges won't be configured nor validated.
"""
return self._get_attribute(self._SDM_ATT_MAP["Enabled"])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Enabled"], value)
@property
def FirstId(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: The first ID to be used for the first VLAN tag.
"""
return self._get_attribute(self._SDM_ATT_MAP["FirstId"])
@FirstId.setter
def FirstId(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["FirstId"], value)
@property
def IdIncrMode(self):
# type: () -> int
"""
Returns
-------
- number: Method used to increment VLAN IDs. May take the following values: 0 (First VLAN first), 1 (Last VLAN first), 2 (All).
"""
return self._get_attribute(self._SDM_ATT_MAP["IdIncrMode"])
@IdIncrMode.setter
def IdIncrMode(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["IdIncrMode"], value)
@property
def Increment(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: Amount of increment per increment step for first VLAN. E.g. increment step = 10 and increment = 2 means increment VLAN ID by 2 for every 10 IPs
"""
return self._get_attribute(self._SDM_ATT_MAP["Increment"])
@Increment.setter
def Increment(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["Increment"], value)
@property
def IncrementStep(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: Frequency of first VLAN ID increment. E.g., value of 10 means increment VLAN ID once for every 10 IP addresses.
"""
return self._get_attribute(self._SDM_ATT_MAP["IncrementStep"])
@IncrementStep.setter
def IncrementStep(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["IncrementStep"], value)
@property
def InnerEnable(self):
# type: () -> bool
"""DEPRECATED
Returns
-------
- bool: Enable the inner VLAN.
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerEnable"])
@InnerEnable.setter
def InnerEnable(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerEnable"], value)
@property
def InnerFirstId(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: The first ID to be used for the inner VLAN tag.
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerFirstId"])
@InnerFirstId.setter
def InnerFirstId(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerFirstId"], value)
@property
def InnerIncrement(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: Amount of increment per increment step for Inner VLAN. E.g. increment step = 10 and increment = 2 means increment VLAN ID by 2 for every 10 IPs
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerIncrement"])
@InnerIncrement.setter
def InnerIncrement(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerIncrement"], value)
@property
def InnerIncrementStep(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: Frequency of inner VLAN ID increment. E.g., value of 10 means increment VLAN ID once for every 10 IP addresses.
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerIncrementStep"])
@InnerIncrementStep.setter
def InnerIncrementStep(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerIncrementStep"], value)
@property
def InnerPriority(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: The 802.1Q priority to be used for the inner VLAN tag.
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerPriority"])
@InnerPriority.setter
def InnerPriority(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerPriority"], value)
@property
def InnerTpid(self):
# type: () -> str
"""DEPRECATED
Returns
-------
- str: The TPID value in the inner VLAN Tag.
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerTpid"])
@InnerTpid.setter
def InnerTpid(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerTpid"], value)
@property
def InnerUniqueCount(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: Number of unique inner VLAN IDs to use.
"""
return self._get_attribute(self._SDM_ATT_MAP["InnerUniqueCount"])
@InnerUniqueCount.setter
def InnerUniqueCount(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["InnerUniqueCount"], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of range
"""
return self._get_attribute(self._SDM_ATT_MAP["Name"])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["Name"], value)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP["ObjectId"])
@property
def Priority(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: The 802.1Q priority to be used for the outer VLAN tag.
"""
return self._get_attribute(self._SDM_ATT_MAP["Priority"])
@Priority.setter
def Priority(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["Priority"], value)
@property
def Tpid(self):
# type: () -> str
"""DEPRECATED
Returns
-------
- str: The TPID value in the outer VLAN Tag.
"""
return self._get_attribute(self._SDM_ATT_MAP["Tpid"])
@Tpid.setter
def Tpid(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["Tpid"], value)
@property
def UniqueCount(self):
# type: () -> int
"""DEPRECATED
Returns
-------
- number: Number of unique first VLAN IDs to use.
"""
return self._get_attribute(self._SDM_ATT_MAP["UniqueCount"])
@UniqueCount.setter
def UniqueCount(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["UniqueCount"], value)
def update(
self,
Enabled=None,
FirstId=None,
IdIncrMode=None,
Increment=None,
IncrementStep=None,
InnerEnable=None,
InnerFirstId=None,
InnerIncrement=None,
InnerIncrementStep=None,
InnerPriority=None,
InnerTpid=None,
InnerUniqueCount=None,
Name=None,
Priority=None,
Tpid=None,
UniqueCount=None,
):
# type: (bool, int, int, int, int, bool, int, int, int, int, str, int, str, int, str, int) -> AncpVlanRange
"""Updates ancpVlanRange resource on the server.
Args
----
- Enabled (bool): Disabled ranges won't be configured nor validated.
- FirstId (number): The first ID to be used for the first VLAN tag.
- IdIncrMode (number): Method used to increment VLAN IDs. May take the following values: 0 (First VLAN first), 1 (Last VLAN first), 2 (All).
- Increment (number): Amount of increment per increment step for first VLAN. E.g. increment step = 10 and increment = 2 means increment VLAN ID by 2 for every 10 IPs
- IncrementStep (number): Frequency of first VLAN ID increment. E.g., value of 10 means increment VLAN ID once for every 10 IP addresses.
- InnerEnable (bool): Enable the inner VLAN.
- InnerFirstId (number): The first ID to be used for the inner VLAN tag.
- InnerIncrement (number): Amount of increment per increment step for Inner VLAN. E.g. increment step = 10 and increment = 2 means increment VLAN ID by 2 for every 10 IPs
- InnerIncrementStep (number): Frequency of inner VLAN ID increment. E.g., value of 10 means increment VLAN ID once for every 10 IP addresses.
- InnerPriority (number): The 802.1Q priority to be used for the inner VLAN tag.
- InnerTpid (str): The TPID value in the inner VLAN Tag.
- InnerUniqueCount (number): Number of unique inner VLAN IDs to use.
- Name (str): Name of range
- Priority (number): The 802.1Q priority to be used for the outer VLAN tag.
- Tpid (str): The TPID value in the outer VLAN Tag.
- UniqueCount (number): Number of unique first VLAN IDs to use.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(
self,
Enabled=None,
FirstId=None,
IdIncrMode=None,
Increment=None,
IncrementStep=None,
InnerEnable=None,
InnerFirstId=None,
InnerIncrement=None,
InnerIncrementStep=None,
InnerPriority=None,
InnerTpid=None,
InnerUniqueCount=None,
Name=None,
ObjectId=None,
Priority=None,
Tpid=None,
UniqueCount=None,
):
# type: (bool, int, int, int, int, bool, int, int, int, int, str, int, str, str, int, str, int) -> AncpVlanRange
"""Finds and retrieves ancpVlanRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve ancpVlanRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all ancpVlanRange resources from the server.
Args
----
- Enabled (bool): Disabled ranges won't be configured nor validated.
- FirstId (number): The first ID to be used for the first VLAN tag.
- IdIncrMode (number): Method used to increment VLAN IDs. May take the following values: 0 (First VLAN first), 1 (Last VLAN first), 2 (All).
- Increment (number): Amount of increment per increment step for first VLAN. E.g. increment step = 10 and increment = 2 means increment VLAN ID by 2 for every 10 IPs
- IncrementStep (number): Frequency of first VLAN ID increment. E.g., value of 10 means increment VLAN ID once for every 10 IP addresses.
- InnerEnable (bool): Enable the inner VLAN.
- InnerFirstId (number): The first ID to be used for the inner VLAN tag.
- InnerIncrement (number): Amount of increment per increment step for Inner VLAN. E.g. increment step = 10 and increment = 2 means increment VLAN ID by 2 for every 10 IPs
- InnerIncrementStep (number): Frequency of inner VLAN ID increment. E.g., value of 10 means increment VLAN ID once for every 10 IP addresses.
- InnerPriority (number): The 802.1Q priority to be used for the inner VLAN tag.
- InnerTpid (str): The TPID value in the inner VLAN Tag.
- InnerUniqueCount (number): Number of unique inner VLAN IDs to use.
- Name (str): Name of range
- ObjectId (str): Unique identifier for this object
- Priority (number): The 802.1Q priority to be used for the outer VLAN tag.
- Tpid (str): The TPID value in the outer VLAN Tag.
- UniqueCount (number): Number of unique first VLAN IDs to use.
Returns
-------
- self: This instance with matching ancpVlanRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of ancpVlanRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the ancpVlanRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def CustomProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2=list, Arg3=enum, async_operation=bool)
---------------------------------------------------------------
- Arg2 (list(str)): List of plugin types to be added in the new custom stack
- Arg3 (str(kAppend | kMerge | kOverwrite)): Append, merge or overwrite existing protocol stack
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"customProtocolStack", payload=payload, response_object=None
)
def DisableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2=string, async_operation=bool)string
-------------------------------------------------------------
- Arg2 (str): Protocol class name to disable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"disableProtocolStack", payload=payload, response_object=None
)
def EnableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2=string, async_operation=bool)string
------------------------------------------------------------
- Arg2 (str): Protocol class name to enable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"enableProtocolStack", payload=payload, response_object=None
)
|
PypiClean
|
/smscx-0.1.11.tar.gz/smscx-0.1.11/smscx_client/model/new_group_response.py
|
import re # noqa: F401
import sys # noqa: F401
from smscx_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
ApiModel
)
from smscx_client.exceptions import ApiAttributeError
def lazy_import():
from smscx_client.model.info_new_group import InfoNewGroup
globals()['InfoNewGroup'] = InfoNewGroup
class NewGroupResponse(ModelNormal):
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def api_types():
"""
Returns
api_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'info': (InfoNewGroup,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'info': 'info', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_api_data(cls, info, *args, **kwargs): # noqa: E501
"""NewGroupResponse - a model
Args:
info (InfoNewGroup):
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(ApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.info = info
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, info, *args, **kwargs): # noqa: E501
"""NewGroupResponse - a model
Args:
info (InfoNewGroup):
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.info = info
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_api_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/righteous_fa-1.2.1-py3-none-any.whl/righteous/quantification.py
|
import numpy as np
import pandas as pd
def qvalues( pvalues, pi0=None ):
m = float(len(pvalues))
assert(m>0)
pvalues.sort()
if pi0 is None:
pi0 = 1.
num_p, p_sum, qs = 0, 0.0, []
for p,coord,coord2 in pvalues:
num_p += 1
p_sum += p
q = pi0*p*(m/float(num_p))
qs.append((q,p,coord,coord2))
qs.reverse()
old_q = 1.0
for ix in range(len(qs)):
q = min(old_q,qs[ix][0])
old_q = q
qs[ix] = (q,qs[ix][1],qs[ix][2],qs[ix][3])
qs.reverse()
return qs
from scipy import stats
from statsmodels.stats.anova import anova_lm as anova
import statsmodels.api as sm
import patsy
def anova_test ( formula, group_expression_df, journal_df, test_type = 'random' ) :
type_d = { 'paired':1 , 'random':2 , 'fixed':1 }
formula = formula.replace(' ','')
tmp_df = pd.concat([ journal_df, group_expression_df ])
gname = tmp_df.index.tolist()[-1]
formula_l = formula.split('~')
rename = { gname:formula_l[0] }
tmp_df.rename( index=rename, inplace=True )
tdf = tmp_df.T.iloc[ :,[ col in formula for col in tmp_df.T.columns] ].apply( pd.to_numeric )
y, X = patsy.dmatrices( formula, tdf, return_type='dataframe')
model = sm.OLS(endog=y,exog=X).fit()
model .model.data.design_info = X.design_info
table = sm.stats.anova_lm(model,typ=type_d[test_type])
return table.iloc[ [(idx in formula) for idx in table.index],-1]
def parse_test ( statistical_formula, group_expression_df , journal_df , test_type = 'random' ) :
result = anova_test( statistical_formula, group_expression_df , journal_df , test_type=test_type )
return ( result )
def prune_journal ( journal_df , remove_units_on = '_' ) :
journal_df = journal_df.loc[ [ 'label' in idx.lower() or '[' in idx for idx in journal_df.index.values] , : ].copy()
bSel = [ ('label' in idx.lower() ) for idx in journal_df.index.values]
bool_dict = { False:0 , True:1 , 'False':0 , 'True':1 }
str_journal = journal_df.iloc[ bSel ]
journal_df = journal_df.replace({'ND':np.nan})
nmr_journal = journal_df.iloc[ [ not b for b in bSel ] ].replace(bool_dict).apply( pd.to_numeric )
if not remove_units_on is None :
nmr_journal.index = [ idx.split(remove_units_on)[0] for idx in nmr_journal.index ]
journal_df = pd.concat( [nmr_journal,str_journal] )
return( journal_df )
class RCA( object ) :
def __init__(self):
self.components_ = None
self.F_ = None
self.U_ , self.S_, self.V_ = None,None,None
self.evr_ = None
self.var_ = None
def fit_transform(self,X):
Xc = X - np.mean( X , 0 )
u, s, v = np.linalg.svd( Xc, full_matrices=False )
S = np.diag( s )
self.F_ = np.dot(u,S)
self.var_ = s ** 2 / Xc.shape[0]
self.explained_variance_ratio_ = self.var_/self.var_.sum()
self.U_, self.S_, self.V_ = u,s,v
self.components_ = self.V_
return(self.F_)
def quantify ( analyte_df , journal_df , formula , grouping_file , synonyms = None ,
delimiter = '\t' , test_type = 'random' ,
split_id = None , skip_line_char = '#'
) :
from sklearn.decomposition import PCA
dimred = PCA()
statistical_formula = formula
if not split_id is None :
nidx = [ idx.split(split_id)[-1].replace(' ','') for idx in analyte_df.index.values ]
analyte_df.index = nidx
sidx = set( analyte_df.index.values ) ; nidx=len(sidx)
eval_df = None
with open ( grouping_file ) as input:
for line in input:
if line[0] == skip_line_char :
continue
vline = line.replace('\n','').split(delimiter)
gid,gdesc,analytes_ = vline[0],vline[1],vline[2:]
if not synonyms is None :
[ analytes_.append(synonyms[a]) for a in analytes_ if a in synonyms ]
try :
group = analyte_df.loc[[a for a in analytes_ if a in sidx] ].dropna( axis=0, how='any', thresh=analyte_df.shape[1]/2 ).drop_duplicates()
except KeyError as e :
continue
L_ = len( group ); str_analytes=','.join(group.index.values)
if L_>0 :
Xnew = dimred.fit_transform(group.T.values)
group_expression_df = pd.DataFrame([Xnew.T[0]],columns=analyte_df.columns.values,index=[gid])
rdf = pd.DataFrame( parse_test( statistical_formula, group_expression_df , journal_df , test_type=test_type )).T
rdf .columns = [ col+',p' if (not ',s' in col) else col+',s' for col in rdf.columns ]
rdf['description'] = gdesc+','+str(L_)
rdf['analytes'] = str_analytes
rdf.index = [ gid ] ; ndf = pd.concat([rdf.T,group_expression_df.T]).T
if eval_df is None :
eval_df = ndf
else :
eval_df = pd.concat([eval_df,ndf])
edf = eval_df.T
for col in eval_df.columns :
if ',p' in col :
pvals = [ (p,pidx,'') for (p,pidx) in zip( eval_df.loc[:,col].values,eval_df.index.values ) ]
q_d = { q_[2]:q_[0] for q_ in qvalues( pvals ) }
l = col.split(',')[0]+',q'
edf.loc[l] = [q_d[idx] for idx in eval_df.index.values ]
return ( edf.T )
def add_spearmanr( analyte_results_df, journal_df, what='M') :
if what in set( journal_df.index.values ) :
from scipy.stats import spearmanr
K = []
patients = [ c for c in analyte_results_df.columns if '_' in c ]
for idx in analyte_results_df.index :
y = journal_df.loc[what,patients].values
x = analyte_results_df.loc[[idx],patients].values[0] # IF DUPLICATE GET FIRST
k = spearmanr( x,y )
K .append( k )
analyte_results_df['Spearman'] = K
return ( analyte_results_df )
if __name__ == '__main__' :
#
pvs = [0.00001,0.01,0.0002,0.00005,0.01,0.1,0.2,0.4,0.5,0.6,0.7,0.8,0.9,0.99,0.0114,0.15,0.23,0.20]
print ( [q for q in qvalues(pvs) ] )
"""
path_ = './'
analyte_file = path_ + 'fine.txt'
journal_file = path_ + 'coarse.txt'
grouping_file = path_ + 'groups.gmt'
analyte_df = pd.read_csv(analyte_file,'\t' , index_col=0 )
journal_df = prune_journal( pd.read_csv(journal_file,'\t', index_col=0 ) )
print ( quantify( analyte_df, journal_df, 'Group ~ Var + C(Cat) ', grouping_file ) )
"""
|
PypiClean
|
/packagetest109-0.109.tar.gz/packagetest109-0.109/devdoo/datafilter.py
|
import re
import ast
import bson
from check import Check
from convert import Convert
# TODO:: Refator completamente classe DataFilter
class DataFilter:
# --------------------------------
# __init__
# --------------------------------
def __init__(self, params, status):
list_params = self.__to_list(params)
self.status = status
self.labels = {"item": dict(), "list": dict()}
self.__params = dict()
self.__prepare(self.__params, list_params, "item")
self.__conflict()
# --------------------------------
# __str__
# --------------------------------
def __str__(self):
return Convert.to_str(self.__params)
# --------------------------------
# add
# --------------------------------
def add(self, field, value):
if type(value) == str:
prepare_filter = Convert.to_str(field + "(" + value + ")")
else:
prepare_filter = unicode(field + "(" + value + ")")
self.__prepare(self.__params, [prepare_filter], 'item')
# --------------------------------
# check_options_values
# --------------------------------
# TODO:: Analisar melhor o nome desse método e onde deve ficar
@staticmethod
def check_options_values(key, value):
options = ["in", "nin", "and", "or", "nor"]
if key in options:
if type(value) != list:
value = [value]
index = 0
for item in value:
# { name: { $regex: "s3", $options: "si" } }
# Verifica se é do tipo regex
if type(item) == str or type(item) == unicode and re.match(r"/([\s\S]+)/([\s\S]+)\Z", item, re.IGNORECASE):
regex = re.match(r"/([\s\S]+)/([\s\S]+)\Z", item, re.IGNORECASE)
value[index] = {
"$regex": regex.group(1),
"$options": regex.group(2)
}
index = index + 1
return value
# --------------------------------
# to_list
# --------------------------------
def to_list(self):
return self.__params
# --------------------------------
# __conflict
# --------------------------------
def __conflict(self):
fields_errors = []
# Processa a lista de campos default
for item_check in self.labels["item"]:
# Retira um item da lista para comparar com outros itens da mesma lista
for item_field in self.labels["item"]:
# Caso um campo esteja em conflito com outros campos é adicionado na lista de erros
if item_check + '.' in item_field:
fields_errors.append([item_check, item_field])
if len(fields_errors) > 0:
self.status.warn(3363569, "ERROR:: CAMPOS EM CONFLITOS" + Convert.to_str(fields_errors))
# --------------------------------
# __dot_to_dict
# --------------------------------
def __dot_to_dict(self, d, dot_string, value):
if dot_string == "id":
dot_string = "_id"
options = ["eq", "gt", "gte", "lt", "lte", "ne", "exists", "not", "type", "and", "or", "nor", "in", "nin"]
if "." in dot_string:
key, rest = dot_string.split(".", 1)
if key == "id":
key = "_id"
if key not in d:
key = key if key not in options else '$' + key
d[key] = dict()
self.__dot_to_dict(d[key], rest, value)
elif dot_string in options:
if type(d) == dict:
key = '$' + dot_string
d[key] = self.check_options_values(dot_string, value)
else:
d[dot_string] = value
# --------------------------------
# __prepare
# --------------------------------
def __prepare(self, group_params, list_params, type_params):
for param in list_params:
regex_comparison_query_operators = r"(?:([\s\S]*|\.(eq|gt|gte|lt|lte|ne|exists|not|in|nin|type))\(([\s\S]+)\))\Z"
match_comparison_query_operators = re.match(regex_comparison_query_operators, param, re.IGNORECASE)
regex_comparison_query_operators_and_nor = r"(?:(and|nor)\[([\s\S]*)\])\Z"
match_comparison_query_operators_and_nor = re.match(regex_comparison_query_operators_and_nor, param, re.IGNORECASE)
regex_comparison_query_operators_or = r"(or)\{([\s\S]*)\}\Z"
match_comparison_query_operators_or = re.match(regex_comparison_query_operators_or, param, re.IGNORECASE)
if match_comparison_query_operators:
group_filter = match_comparison_query_operators.group(1)
group_value = match_comparison_query_operators.group(3)
if re.match(r"^([a-z0-9_]+\.?)+$", group_filter, re.IGNORECASE):
self.labels[type_params][group_filter] = True
self.__dot_to_dict(group_params, group_filter, self.__values(group_value))
else:
self.status.warn(87541545, "ERROR - Filtro inválido" + Convert.to_str(group_filter))
elif match_comparison_query_operators_and_nor:
group_filter = match_comparison_query_operators_and_nor.group(1)
group_value = match_comparison_query_operators_and_nor.group(2)
if not re.match(r"(?:[a-z0-9]+\([\s\S]+\)[^+])\Z", group_value, re.IGNORECASE):
if re.match(r"^([a-z0-9_]+\.?)+$", group_filter, re.IGNORECASE):
group = group_params["$" + group_filter] = []
self.__values_list(group, group_value, "+")
else:
self.status.warn(523152, "ERROR FILTRO- de separador '|' nao encontrado" + Convert.to_str(group_filter))
else:
self.status.warn(523152, "ERROR FILTRO- de separador '|' nao encontrado" + Convert.to_str(group_filter))
elif match_comparison_query_operators_or:
group_filter = match_comparison_query_operators_or.group(1)
group_value = match_comparison_query_operators_or.group(2)
if not re.match(r"(?:[a-z0-9]+\([\s\S]+\)[^|])\Z", group_value, re.IGNORECASE):
if re.match(r"^([a-z0-9_]+\.?)+$", group_filter, re.IGNORECASE):
group = group_params["$" + group_filter] = []
self.__values_list(group, group_value, "|")
else:
self.status.warn(5454545454, "Filtro inválido" + Convert.to_str(group_filter))
else:
self.status.warn(523152, "ERROR FILTRO- de separador '|' nao encontrado" + Convert.to_str(group_filter))
else:
self.status.warn(5454545454, "Filtro inválido XXXXXXX")
# --------------------------------
# __to_list
# --------------------------------
def __to_list(self, params, sep=";"):
list_params = []
if type(params) == str or type(params) == unicode:
params = params.split(sep)
for item in params:
if Check.is_empty(item) is False:
list_params.append(item)
return list_params
# --------------------------------
# __values
# --------------------------------
def __values(self, value):
# Verifica se o valor é do tipo número
if value.isdigit() or value.replace('.', '', 1).isdigit():
value = ast.literal_eval(value)
# Verifica se o valor é do tipo array
elif "," in value:
value = value.split(",")
i = 0
for item in value:
if item.isdigit() or item.replace('.', '', 1).isdigit():
value[i] = ast.literal_eval(item)
i += 1
# Verifica se é do tipo bolean
elif value.lower() in ["true", "1"]:
value = True
elif value.lower() in ["false", "0"]:
value = False
# Verifica se é do tipo objectID
elif re.match("^[0-9a-f]{24}$", value):
try:
value = bson.objectid.ObjectId(Convert.to_str(value))
except Exception as inst:
value = {"error": inst.args}
# Verifica se é do tipo date
# Verifica se é do tipo timestamp
# Verifica se é uma lista
if type(value) == list:
index = 0
# Processa s lista para verificar se os elementos são do tipo ObjectId
for item in value:
# Verifica se é do tipo objectId
if re.match("^[0-9a-f]{24}$", item):
try:
# Converte string em ObjectId
value[index] = bson.objectid.ObjectId(Convert.to_str(item))
index = index + 1
except Exception as inst:
value = {"error": inst.args}
return value
# --------------------------------
# __values_list
# --------------------------------
def __values_list(self, group_params, values, sep):
list_params = self.__to_list(values, sep)
if len(list_params) > 0:
for item in list_params:
item_filter = dict()
if re.match(r"(?:([\s\S]*|\.(eq|gt|gte|lt|lte|ne|exists|not|in|nin|type))\(([\s\S]+)\))\Z", item, re.IGNORECASE):
self.__prepare(item_filter, [item], "list")
elif re.match(r"(or){([\s\S]*)}\Z", item, re.IGNORECASE):
list_items = item.split(sep)
self.__prepare(item_filter, list_items, "list")
if len(item_filter.keys()) > 0:
group_params.append(item_filter)
|
PypiClean
|
/django-bootstrap-modal-forms-3.0.4.tar.gz/django-bootstrap-modal-forms-3.0.4/bootstrap_modal_forms/static/js/bootstrap5.modal.forms.min.js
|
const modalFormCallback=function(e){let t=document.querySelector(e.modalID),n=t.querySelector(e.modalContent),o=bootstrap.Modal.getInstance(t);null===o&&(o=new bootstrap.Modal(t,{keyboard:!1})),fetch(e.formURL).then(e=>e.text()).then(e=>{n.innerHTML=e}).then(()=>{o.show();let n=t.querySelector(e.modalForm);n&&(n.setAttribute("action",e.formURL),addEventHandlers(t,n,e))})},addEventHandlers=function(e,t,n){t.addEventListener("submit",e=>{if(!1===n.isDeleteForm)return e.preventDefault(),isFormValid(n,submitForm),!1}),e.addEventListener("hidden.bs.modal",t=>{let o=e.querySelector(n.modalContent);for(;o.lastChild;)o.removeChild(o.lastChild)})},isFormValid=function(e,t){let n=document.querySelector(e.modalID),o=n.querySelector(e.modalForm),r=new Headers;r.append("X-Requested-With","XMLHttpRequest");n.querySelector('button[type="submit"]').disabled=!0,fetch(o.getAttribute("action"),{headers:r,method:o.getAttribute("method"),body:new FormData(o)}).then(e=>e.text()).then(r=>{if(r.includes(e.errorClass)){if(n.querySelector(e.modalContent).innerHTML=r,!(o=n.querySelector(e.modalForm))){console.error("no form present in response");return}o.setAttribute("action",e.formURL),addEventHandlers(n,o,e)}else t(e)})},submitForm=function(e){let t=document.querySelector(e.modalID),n=t.querySelector(e.modalForm);if(e.asyncUpdate){if(validateAsyncSettings(e.asyncSettings)){let o=e.asyncSettings,r=new FormData(n);r.append("asyncUpdate","True"),fetch(n.getAttribute("action"),{method:n.getAttribute("method"),body:r}).then(e=>e.text()).then(r=>{let a=document.body;if(void 0===a){console.error("django-bootstrap-modal-forms: <body> element missing in your html.");return}let s=new DOMParser().parseFromString(o.successMessage,"text/xml");a.insertBefore(s.firstChild,a.firstChild),o.dataUrl?fetch(o.dataUrl).then(e=>e.json()).then(r=>{let a=document.querySelector(o.dataElementId);a&&(a.innerHTML=r[o.dataKey]),o.addModalFormFunction&&o.addModalFormFunction(),o.closeOnSubmit?bootstrap.Modal.getInstance(t).hide():fetch(e.formURL).then(e=>e.text()).then(o=>{if(t.querySelector(e.modalContent).innerHTML=o,!(n=t.querySelector(e.modalForm))){console.error("no form present in response");return}n.setAttribute("action",e.formURL),addEventHandlers(t,n,e)})}):o.closeOnSubmit&&bootstrap.Modal.getInstance(t).hide()})}}else n.submit()},validateAsyncSettings=function(e){var t=[];return e.successMessage||(t.push("successMessage"),console.error("django-bootstrap-modal-forms: 'successMessage' in asyncSettings is missing.")),e.dataUrl||(t.push("dataUrl"),console.error("django-bootstrap-modal-forms: 'dataUrl' in asyncSettings is missing.")),e.dataElementId||(t.push("dataElementId"),console.error("django-bootstrap-modal-forms: 'dataElementId' in asyncSettings is missing.")),e.dataKey||(t.push("dataKey"),console.error("django-bootstrap-modal-forms: 'dataKey' in asyncSettings is missing.")),e.addModalFormFunction||(t.push("addModalFormFunction"),console.error("django-bootstrap-modal-forms: 'addModalFormFunction' in asyncSettings is missing.")),!(t.length>0)},modalForm=function(e,t){let n={modalID:"#modal",modalContent:".modal-content",modalForm:".modal-content form",formURL:null,isDeleteForm:!1,errorClass:"is-invalid",asyncUpdate:!1,asyncSettings:{closeOnSubmit:!1,successMessage:null,dataUrl:null,dataElementId:null,dataKey:null,addModalFormFunction:null},...t};return e.addEventListener("click",()=>{modalFormCallback(n)}),e};
|
PypiClean
|
/seanalgorithms2-0.2-py3-none-any.whl/seanalgorithms/divide_and_conquer/strassen_matrix_multiplication.py
|
from __future__ import annotations
import math
def default_matrix_multiplication(a: list, b: list) -> list:
"""
Multiplication only for 2x2 matrices
"""
if len(a) != 2 or len(a[0]) != 2 or len(b) != 2 or len(b[0]) != 2:
raise Exception("Matrices are not 2x2")
new_matrix = [
[a[0][0] * b[0][0] + a[0][1] * b[1][0], a[0][0] * b[0][1] + a[0][1] * b[1][1]],
[a[1][0] * b[0][0] + a[1][1] * b[1][0], a[1][0] * b[0][1] + a[1][1] * b[1][1]],
]
return new_matrix
def matrix_addition(matrix_a: list, matrix_b: list):
return [
[matrix_a[row][col] + matrix_b[row][col] for col in range(len(matrix_a[row]))]
for row in range(len(matrix_a))
]
def matrix_subtraction(matrix_a: list, matrix_b: list):
return [
[matrix_a[row][col] - matrix_b[row][col] for col in range(len(matrix_a[row]))]
for row in range(len(matrix_a))
]
def split_matrix(a: list) -> tuple[list, list, list, list]:
"""
Given an even length matrix, returns the top_left, top_right, bot_left, bot_right
quadrant.
>>> split_matrix([[4,3,2,4],[2,3,1,1],[6,5,4,3],[8,4,1,6]])
([[4, 3], [2, 3]], [[2, 4], [1, 1]], [[6, 5], [8, 4]], [[4, 3], [1, 6]])
>>> split_matrix([
... [4,3,2,4,4,3,2,4],[2,3,1,1,2,3,1,1],[6,5,4,3,6,5,4,3],[8,4,1,6,8,4,1,6],
... [4,3,2,4,4,3,2,4],[2,3,1,1,2,3,1,1],[6,5,4,3,6,5,4,3],[8,4,1,6,8,4,1,6]
... ]) # doctest: +NORMALIZE_WHITESPACE
([[4, 3, 2, 4], [2, 3, 1, 1], [6, 5, 4, 3], [8, 4, 1, 6]], [[4, 3, 2, 4],
[2, 3, 1, 1], [6, 5, 4, 3], [8, 4, 1, 6]], [[4, 3, 2, 4], [2, 3, 1, 1],
[6, 5, 4, 3], [8, 4, 1, 6]], [[4, 3, 2, 4], [2, 3, 1, 1], [6, 5, 4, 3],
[8, 4, 1, 6]])
"""
if len(a) % 2 != 0 or len(a[0]) % 2 != 0:
raise Exception("Odd matrices are not supported!")
matrix_length = len(a)
mid = matrix_length // 2
top_right = [[a[i][j] for j in range(mid, matrix_length)] for i in range(mid)]
bot_right = [
[a[i][j] for j in range(mid, matrix_length)] for i in range(mid, matrix_length)
]
top_left = [[a[i][j] for j in range(mid)] for i in range(mid)]
bot_left = [[a[i][j] for j in range(mid)] for i in range(mid, matrix_length)]
return top_left, top_right, bot_left, bot_right
def matrix_dimensions(matrix: list) -> tuple[int, int]:
return len(matrix), len(matrix[0])
def print_matrix(matrix: list) -> None:
for i in range(len(matrix)):
print(matrix[i])
def actual_strassen(matrix_a: list, matrix_b: list) -> list:
"""
Recursive function to calculate the product of two matrices, using the Strassen
Algorithm. It only supports even length matrices.
"""
if matrix_dimensions(matrix_a) == (2, 2):
return default_matrix_multiplication(matrix_a, matrix_b)
a, b, c, d = split_matrix(matrix_a)
e, f, g, h = split_matrix(matrix_b)
t1 = actual_strassen(a, matrix_subtraction(f, h))
t2 = actual_strassen(matrix_addition(a, b), h)
t3 = actual_strassen(matrix_addition(c, d), e)
t4 = actual_strassen(d, matrix_subtraction(g, e))
t5 = actual_strassen(matrix_addition(a, d), matrix_addition(e, h))
t6 = actual_strassen(matrix_subtraction(b, d), matrix_addition(g, h))
t7 = actual_strassen(matrix_subtraction(a, c), matrix_addition(e, f))
top_left = matrix_addition(matrix_subtraction(matrix_addition(t5, t4), t2), t6)
top_right = matrix_addition(t1, t2)
bot_left = matrix_addition(t3, t4)
bot_right = matrix_subtraction(matrix_subtraction(matrix_addition(t1, t5), t3), t7)
# construct the new matrix from our 4 quadrants
new_matrix = []
for i in range(len(top_right)):
new_matrix.append(top_left[i] + top_right[i])
for i in range(len(bot_right)):
new_matrix.append(bot_left[i] + bot_right[i])
return new_matrix
def strassen(matrix1: list, matrix2: list) -> list:
"""
>>> strassen([[2,1,3],[3,4,6],[1,4,2],[7,6,7]], [[4,2,3,4],[2,1,1,1],[8,6,4,2]])
[[34, 23, 19, 15], [68, 46, 37, 28], [28, 18, 15, 12], [96, 62, 55, 48]]
>>> strassen([[3,7,5,6,9],[1,5,3,7,8],[1,4,4,5,7]], [[2,4],[5,2],[1,7],[5,5],[7,8]])
[[139, 163], [121, 134], [100, 121]]
"""
if matrix_dimensions(matrix1)[1] != matrix_dimensions(matrix2)[0]:
raise Exception(
f"Unable to multiply these matrices, please check the dimensions. \n"
f"Matrix A:{matrix1} \nMatrix B:{matrix2}"
)
dimension1 = matrix_dimensions(matrix1)
dimension2 = matrix_dimensions(matrix2)
if dimension1[0] == dimension1[1] and dimension2[0] == dimension2[1]:
return [matrix1, matrix2]
maximum = max(max(dimension1), max(dimension2))
maxim = int(math.pow(2, math.ceil(math.log2(maximum))))
new_matrix1 = matrix1
new_matrix2 = matrix2
# Adding zeros to the matrices so that the arrays dimensions are the same and also
# power of 2
for i in range(0, maxim):
if i < dimension1[0]:
for j in range(dimension1[1], maxim):
new_matrix1[i].append(0)
else:
new_matrix1.append([0] * maxim)
if i < dimension2[0]:
for j in range(dimension2[1], maxim):
new_matrix2[i].append(0)
else:
new_matrix2.append([0] * maxim)
final_matrix = actual_strassen(new_matrix1, new_matrix2)
# Removing the additional zeros
for i in range(0, maxim):
if i < dimension1[0]:
for j in range(dimension2[1], maxim):
final_matrix[i].pop()
else:
final_matrix.pop()
return final_matrix
if __name__ == "__main__":
matrix1 = [
[2, 3, 4, 5],
[6, 4, 3, 1],
[2, 3, 6, 7],
[3, 1, 2, 4],
[2, 3, 4, 5],
[6, 4, 3, 1],
[2, 3, 6, 7],
[3, 1, 2, 4],
[2, 3, 4, 5],
[6, 2, 3, 1],
]
matrix2 = [[0, 2, 1, 1], [16, 2, 3, 3], [2, 2, 7, 7], [13, 11, 22, 4]]
print(strassen(matrix1, matrix2))
|
PypiClean
|
/fastapi-crudrouter-mongodb-0.0.7.tar.gz/fastapi-crudrouter-mongodb-0.0.7/fastapi_crudrouter_mongodb/core/router/embed/CRUDEmbedRouterRepository.py
|
from bson import ObjectId
from ...models.mongo_model import MongoModel
async def get_all(db, id: str, parent_collection_name: str, embed_name: str, model: MongoModel) -> list:
"""
Get all embeded documents from the database
"""
try:
documents = db[parent_collection_name].aggregate([
{
'$match': {
'_id': ObjectId(id)
}
},
{
'$unwind': f'${embed_name}'
},
{
'$replaceRoot': {
'newRoot': f"${embed_name}"
}
}
])
models = []
async for document in documents:
models.append(model.from_mongo(document))
return models
except Exception as e:
return []
async def get_one(db, id: str, embed_id: str, parent_collection_name: str, embed_name: str, model: MongoModel) -> MongoModel:
"""
Get a document from the database
"""
try:
document = db[parent_collection_name].aggregate([
{
'$match': {
'_id': ObjectId(id)
}
},
{
'$unwind': f'${embed_name}'
},
{
'$replaceRoot': {
'newRoot': f"${embed_name}"
}
}, {
'$match': {
'_id': ObjectId(embed_id)
}
}
])
models = []
async for document in document:
models.append(model.from_mongo(document))
return models[0]
except Exception as e:
return None
async def create_one(db, id: str, parent_collection_name: str, embed_name: str, data: MongoModel, model: MongoModel) -> MongoModel:
"""
Create a new document in the database
"""
document_mongo = data.to_mongo(add_id=True)
document = await db[parent_collection_name].update_one(
{'_id': ObjectId(id)},
{'$push': {embed_name: document_mongo}}
)
return model.from_mongo(document_mongo)
async def update_one(db, id: str, embed_id: str, parent_collection_name: str, embed_name: str, data: MongoModel, model: MongoModel) -> MongoModel:
"""
Update a document in the database
"""
document_mongo = data.to_mongo(add_id=False)
document = await db[parent_collection_name].update_one(
{'_id': ObjectId(id), f'{embed_name}._id': ObjectId(embed_id)},
{'$set': {f'{embed_name}.$': document_mongo}}
)
return model.from_mongo(document_mongo)
async def delete_one(db, id: str, embed_id: str, parent_collection_name: str, embed_name: str, model: MongoModel) -> MongoModel:
"""
Delete a document in the database
"""
try:
document = await db[parent_collection_name].update_one(
{'_id': ObjectId(id)},
{'$pull': {f'{embed_name}': {'_id': ObjectId(embed_id)}}}
)
return {'deleted': True} if await get_one(db, id, embed_id, parent_collection_name, embed_name, model) is None else None
except Exception as e:
return None
|
PypiClean
|
/Dovetail-1.0beta2.tar.gz/Dovetail-1.0beta2/dovetail/util/exception.py
|
def pp_exception(exception):
"""Pretty-print an exception"""
return "{0}: {1}".format(type(exception).__name__, str(exception))
def stack_trace():
"""Obtains then prints a formatted stack-trace to std_err"""
from sys import exc_info, stderr
exc_type, exc_value, exc_traceback = exc_info()
from traceback import print_exception
print_exception(exc_type, exc_value, exc_traceback, file=stderr)
class DovetailException(Exception):
"""The superclass of all Dovetail exceptions"""
pass
class CircularTaskDependency(DovetailException):
"""An exception raised when a Task has a circular reference on another Task.
This is a condition that is detected at run-time.
The Exception contains two members that record the dependency:
* self.task: The Task on which the circular dependency was detected, and
* self.stack: The call-stack at the point of the detection. The call-stack
is a list of Tasks
"""
def __init__(self, task, stack, *kw, **kwargs):
DovetailException.__init__(self, *kw, **kwargs)
self.task = task
self.stack = list(stack)
class MissingRequirement(DovetailException):
"""An exception thrown when easy_install cannot resolve or install a requirement."""
pass
class NoSuchModule(DovetailException):
"""An exception raised when either a module cannot be found (has not been
loaded by Python, or when it is not a BuildModule"""
pass
class NoSuchTask(DovetailException):
"""An exception raised when a Task is referenced and it has not be loaded"""
pass
class NonZeroReturnCode(DovetailException):
"""An exception raised by @check_result when the task returns
with a non-zero value"""
pass
class Skipped(DovetailException):
"""An exception raised by the @fail_if_skipped directive if the Task was skipped:"""
pass
class FailIf(DovetailException):
"""An exception raised by @fail_if when the predicate is False"""
pass
class NoSuchDirectory(DovetailException):
"""An exception raised when a directive requires use of a directory which does
not exist or is not readable"""
pass
class CommandLineException(DovetailException):
"""The superclass of all exceptions that should be handled by the command-line
routine and for which no stack trace should be printed"""
def __init__(self, return_code, message):
DovetailException.__init__(self, message)
self.return_code = return_code
def additional_help(self):
"""Returns an additional help string to help the user localize the problem"""
from StringIO import StringIO
from dovetail.config import print_usage
string_buffer = StringIO()
print_usage(string_buffer)
message = string_buffer.getvalue()
string_buffer.close()
return message
class InvalidBuildFile(CommandLineException):
"""An exception raised when a build file is specified, but cannot be found,
is not readable or has the wrong extension"""
def __init__(self, message):
CommandLineException.__init__(self, 33, message)
class InvalidEnvironment(CommandLineException):
"""An exception stating that the specified environment is badly configured, inaccessible
or otherwise not valid"""
def __init__(self, message):
CommandLineException.__init__(self, 34, message)
class InvalidTask(CommandLineException):
"""An exception raised when a Task file is specified on the command line,
but cannot be found"""
def __init__(self, message):
CommandLineException.__init__(self, 35, message)
class UnknownReport(CommandLineException):
"""An exception raised if requested to generate a report that is
not known to it"""
def __init__(self, return_code, message):
CommandLineException.__init__(self, return_code, message)
class Terminate(CommandLineException):
"""An exception raised for a generic problem in parsing the command line
arguments or if Dovetail crashes during a build.
.. note::
This is not raised if a build task fails or throws an exception"""
def __init__(self, return_code, message):
CommandLineException.__init__(self, return_code, message)
def additional_help(self):
return "Exception thrown in engine, directives or predicates"
|
PypiClean
|
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/domain/KbdishInfo.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KbdishPracticeInfo import KbdishPracticeInfo
from alipay.aop.api.domain.KbdishSkuInfo import KbdishSkuInfo
from alipay.aop.api.domain.KbdishMaterialBindingInfo import KbdishMaterialBindingInfo
from alipay.aop.api.domain.KbdishPropertyInfo import KbdishPropertyInfo
class KbdishInfo(object):
def __init__(self):
self._catetory_big_id = None
self._catetory_small_id = None
self._create_user = None
self._cur_price_flag = None
self._default_in_carts = None
self._default_in_carts_detail = None
self._dish_cuisine = None
self._dish_id = None
self._dish_img = None
self._dish_name = None
self._dish_practice_list = None
self._dish_sku_list = None
self._dish_version = None
self._en_remember_code = None
self._ext_content = None
self._goods_id = None
self._material_binding_info_list = None
self._merchant_id = None
self._min_serving = None
self._mini_add_num = None
self._nb_remember_code = None
self._not_count_threshold = None
self._out_dish_id = None
self._property_info_list = None
self._remarks = None
self._shop_id = None
self._status = None
self._tags = None
self._type_big = None
self._type_small = None
self._unit_id = None
self._update_user = None
@property
def catetory_big_id(self):
return self._catetory_big_id
@catetory_big_id.setter
def catetory_big_id(self, value):
self._catetory_big_id = value
@property
def catetory_small_id(self):
return self._catetory_small_id
@catetory_small_id.setter
def catetory_small_id(self, value):
self._catetory_small_id = value
@property
def create_user(self):
return self._create_user
@create_user.setter
def create_user(self, value):
self._create_user = value
@property
def cur_price_flag(self):
return self._cur_price_flag
@cur_price_flag.setter
def cur_price_flag(self, value):
self._cur_price_flag = value
@property
def default_in_carts(self):
return self._default_in_carts
@default_in_carts.setter
def default_in_carts(self, value):
self._default_in_carts = value
@property
def default_in_carts_detail(self):
return self._default_in_carts_detail
@default_in_carts_detail.setter
def default_in_carts_detail(self, value):
self._default_in_carts_detail = value
@property
def dish_cuisine(self):
return self._dish_cuisine
@dish_cuisine.setter
def dish_cuisine(self, value):
self._dish_cuisine = value
@property
def dish_id(self):
return self._dish_id
@dish_id.setter
def dish_id(self, value):
self._dish_id = value
@property
def dish_img(self):
return self._dish_img
@dish_img.setter
def dish_img(self, value):
self._dish_img = value
@property
def dish_name(self):
return self._dish_name
@dish_name.setter
def dish_name(self, value):
self._dish_name = value
@property
def dish_practice_list(self):
return self._dish_practice_list
@dish_practice_list.setter
def dish_practice_list(self, value):
if isinstance(value, list):
self._dish_practice_list = list()
for i in value:
if isinstance(i, KbdishPracticeInfo):
self._dish_practice_list.append(i)
else:
self._dish_practice_list.append(KbdishPracticeInfo.from_alipay_dict(i))
@property
def dish_sku_list(self):
return self._dish_sku_list
@dish_sku_list.setter
def dish_sku_list(self, value):
if isinstance(value, list):
self._dish_sku_list = list()
for i in value:
if isinstance(i, KbdishSkuInfo):
self._dish_sku_list.append(i)
else:
self._dish_sku_list.append(KbdishSkuInfo.from_alipay_dict(i))
@property
def dish_version(self):
return self._dish_version
@dish_version.setter
def dish_version(self, value):
self._dish_version = value
@property
def en_remember_code(self):
return self._en_remember_code
@en_remember_code.setter
def en_remember_code(self, value):
self._en_remember_code = value
@property
def ext_content(self):
return self._ext_content
@ext_content.setter
def ext_content(self, value):
self._ext_content = value
@property
def goods_id(self):
return self._goods_id
@goods_id.setter
def goods_id(self, value):
self._goods_id = value
@property
def material_binding_info_list(self):
return self._material_binding_info_list
@material_binding_info_list.setter
def material_binding_info_list(self, value):
if isinstance(value, list):
self._material_binding_info_list = list()
for i in value:
if isinstance(i, KbdishMaterialBindingInfo):
self._material_binding_info_list.append(i)
else:
self._material_binding_info_list.append(KbdishMaterialBindingInfo.from_alipay_dict(i))
@property
def merchant_id(self):
return self._merchant_id
@merchant_id.setter
def merchant_id(self, value):
self._merchant_id = value
@property
def min_serving(self):
return self._min_serving
@min_serving.setter
def min_serving(self, value):
self._min_serving = value
@property
def mini_add_num(self):
return self._mini_add_num
@mini_add_num.setter
def mini_add_num(self, value):
self._mini_add_num = value
@property
def nb_remember_code(self):
return self._nb_remember_code
@nb_remember_code.setter
def nb_remember_code(self, value):
self._nb_remember_code = value
@property
def not_count_threshold(self):
return self._not_count_threshold
@not_count_threshold.setter
def not_count_threshold(self, value):
self._not_count_threshold = value
@property
def out_dish_id(self):
return self._out_dish_id
@out_dish_id.setter
def out_dish_id(self, value):
self._out_dish_id = value
@property
def property_info_list(self):
return self._property_info_list
@property_info_list.setter
def property_info_list(self, value):
if isinstance(value, list):
self._property_info_list = list()
for i in value:
if isinstance(i, KbdishPropertyInfo):
self._property_info_list.append(i)
else:
self._property_info_list.append(KbdishPropertyInfo.from_alipay_dict(i))
@property
def remarks(self):
return self._remarks
@remarks.setter
def remarks(self, value):
self._remarks = value
@property
def shop_id(self):
return self._shop_id
@shop_id.setter
def shop_id(self, value):
self._shop_id = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
self._tags = value
@property
def type_big(self):
return self._type_big
@type_big.setter
def type_big(self, value):
self._type_big = value
@property
def type_small(self):
return self._type_small
@type_small.setter
def type_small(self, value):
self._type_small = value
@property
def unit_id(self):
return self._unit_id
@unit_id.setter
def unit_id(self, value):
self._unit_id = value
@property
def update_user(self):
return self._update_user
@update_user.setter
def update_user(self, value):
self._update_user = value
def to_alipay_dict(self):
params = dict()
if self.catetory_big_id:
if hasattr(self.catetory_big_id, 'to_alipay_dict'):
params['catetory_big_id'] = self.catetory_big_id.to_alipay_dict()
else:
params['catetory_big_id'] = self.catetory_big_id
if self.catetory_small_id:
if hasattr(self.catetory_small_id, 'to_alipay_dict'):
params['catetory_small_id'] = self.catetory_small_id.to_alipay_dict()
else:
params['catetory_small_id'] = self.catetory_small_id
if self.create_user:
if hasattr(self.create_user, 'to_alipay_dict'):
params['create_user'] = self.create_user.to_alipay_dict()
else:
params['create_user'] = self.create_user
if self.cur_price_flag:
if hasattr(self.cur_price_flag, 'to_alipay_dict'):
params['cur_price_flag'] = self.cur_price_flag.to_alipay_dict()
else:
params['cur_price_flag'] = self.cur_price_flag
if self.default_in_carts:
if hasattr(self.default_in_carts, 'to_alipay_dict'):
params['default_in_carts'] = self.default_in_carts.to_alipay_dict()
else:
params['default_in_carts'] = self.default_in_carts
if self.default_in_carts_detail:
if hasattr(self.default_in_carts_detail, 'to_alipay_dict'):
params['default_in_carts_detail'] = self.default_in_carts_detail.to_alipay_dict()
else:
params['default_in_carts_detail'] = self.default_in_carts_detail
if self.dish_cuisine:
if hasattr(self.dish_cuisine, 'to_alipay_dict'):
params['dish_cuisine'] = self.dish_cuisine.to_alipay_dict()
else:
params['dish_cuisine'] = self.dish_cuisine
if self.dish_id:
if hasattr(self.dish_id, 'to_alipay_dict'):
params['dish_id'] = self.dish_id.to_alipay_dict()
else:
params['dish_id'] = self.dish_id
if self.dish_img:
if hasattr(self.dish_img, 'to_alipay_dict'):
params['dish_img'] = self.dish_img.to_alipay_dict()
else:
params['dish_img'] = self.dish_img
if self.dish_name:
if hasattr(self.dish_name, 'to_alipay_dict'):
params['dish_name'] = self.dish_name.to_alipay_dict()
else:
params['dish_name'] = self.dish_name
if self.dish_practice_list:
if isinstance(self.dish_practice_list, list):
for i in range(0, len(self.dish_practice_list)):
element = self.dish_practice_list[i]
if hasattr(element, 'to_alipay_dict'):
self.dish_practice_list[i] = element.to_alipay_dict()
if hasattr(self.dish_practice_list, 'to_alipay_dict'):
params['dish_practice_list'] = self.dish_practice_list.to_alipay_dict()
else:
params['dish_practice_list'] = self.dish_practice_list
if self.dish_sku_list:
if isinstance(self.dish_sku_list, list):
for i in range(0, len(self.dish_sku_list)):
element = self.dish_sku_list[i]
if hasattr(element, 'to_alipay_dict'):
self.dish_sku_list[i] = element.to_alipay_dict()
if hasattr(self.dish_sku_list, 'to_alipay_dict'):
params['dish_sku_list'] = self.dish_sku_list.to_alipay_dict()
else:
params['dish_sku_list'] = self.dish_sku_list
if self.dish_version:
if hasattr(self.dish_version, 'to_alipay_dict'):
params['dish_version'] = self.dish_version.to_alipay_dict()
else:
params['dish_version'] = self.dish_version
if self.en_remember_code:
if hasattr(self.en_remember_code, 'to_alipay_dict'):
params['en_remember_code'] = self.en_remember_code.to_alipay_dict()
else:
params['en_remember_code'] = self.en_remember_code
if self.ext_content:
if hasattr(self.ext_content, 'to_alipay_dict'):
params['ext_content'] = self.ext_content.to_alipay_dict()
else:
params['ext_content'] = self.ext_content
if self.goods_id:
if hasattr(self.goods_id, 'to_alipay_dict'):
params['goods_id'] = self.goods_id.to_alipay_dict()
else:
params['goods_id'] = self.goods_id
if self.material_binding_info_list:
if isinstance(self.material_binding_info_list, list):
for i in range(0, len(self.material_binding_info_list)):
element = self.material_binding_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.material_binding_info_list[i] = element.to_alipay_dict()
if hasattr(self.material_binding_info_list, 'to_alipay_dict'):
params['material_binding_info_list'] = self.material_binding_info_list.to_alipay_dict()
else:
params['material_binding_info_list'] = self.material_binding_info_list
if self.merchant_id:
if hasattr(self.merchant_id, 'to_alipay_dict'):
params['merchant_id'] = self.merchant_id.to_alipay_dict()
else:
params['merchant_id'] = self.merchant_id
if self.min_serving:
if hasattr(self.min_serving, 'to_alipay_dict'):
params['min_serving'] = self.min_serving.to_alipay_dict()
else:
params['min_serving'] = self.min_serving
if self.mini_add_num:
if hasattr(self.mini_add_num, 'to_alipay_dict'):
params['mini_add_num'] = self.mini_add_num.to_alipay_dict()
else:
params['mini_add_num'] = self.mini_add_num
if self.nb_remember_code:
if hasattr(self.nb_remember_code, 'to_alipay_dict'):
params['nb_remember_code'] = self.nb_remember_code.to_alipay_dict()
else:
params['nb_remember_code'] = self.nb_remember_code
if self.not_count_threshold:
if hasattr(self.not_count_threshold, 'to_alipay_dict'):
params['not_count_threshold'] = self.not_count_threshold.to_alipay_dict()
else:
params['not_count_threshold'] = self.not_count_threshold
if self.out_dish_id:
if hasattr(self.out_dish_id, 'to_alipay_dict'):
params['out_dish_id'] = self.out_dish_id.to_alipay_dict()
else:
params['out_dish_id'] = self.out_dish_id
if self.property_info_list:
if isinstance(self.property_info_list, list):
for i in range(0, len(self.property_info_list)):
element = self.property_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.property_info_list[i] = element.to_alipay_dict()
if hasattr(self.property_info_list, 'to_alipay_dict'):
params['property_info_list'] = self.property_info_list.to_alipay_dict()
else:
params['property_info_list'] = self.property_info_list
if self.remarks:
if hasattr(self.remarks, 'to_alipay_dict'):
params['remarks'] = self.remarks.to_alipay_dict()
else:
params['remarks'] = self.remarks
if self.shop_id:
if hasattr(self.shop_id, 'to_alipay_dict'):
params['shop_id'] = self.shop_id.to_alipay_dict()
else:
params['shop_id'] = self.shop_id
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
if self.tags:
if hasattr(self.tags, 'to_alipay_dict'):
params['tags'] = self.tags.to_alipay_dict()
else:
params['tags'] = self.tags
if self.type_big:
if hasattr(self.type_big, 'to_alipay_dict'):
params['type_big'] = self.type_big.to_alipay_dict()
else:
params['type_big'] = self.type_big
if self.type_small:
if hasattr(self.type_small, 'to_alipay_dict'):
params['type_small'] = self.type_small.to_alipay_dict()
else:
params['type_small'] = self.type_small
if self.unit_id:
if hasattr(self.unit_id, 'to_alipay_dict'):
params['unit_id'] = self.unit_id.to_alipay_dict()
else:
params['unit_id'] = self.unit_id
if self.update_user:
if hasattr(self.update_user, 'to_alipay_dict'):
params['update_user'] = self.update_user.to_alipay_dict()
else:
params['update_user'] = self.update_user
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KbdishInfo()
if 'catetory_big_id' in d:
o.catetory_big_id = d['catetory_big_id']
if 'catetory_small_id' in d:
o.catetory_small_id = d['catetory_small_id']
if 'create_user' in d:
o.create_user = d['create_user']
if 'cur_price_flag' in d:
o.cur_price_flag = d['cur_price_flag']
if 'default_in_carts' in d:
o.default_in_carts = d['default_in_carts']
if 'default_in_carts_detail' in d:
o.default_in_carts_detail = d['default_in_carts_detail']
if 'dish_cuisine' in d:
o.dish_cuisine = d['dish_cuisine']
if 'dish_id' in d:
o.dish_id = d['dish_id']
if 'dish_img' in d:
o.dish_img = d['dish_img']
if 'dish_name' in d:
o.dish_name = d['dish_name']
if 'dish_practice_list' in d:
o.dish_practice_list = d['dish_practice_list']
if 'dish_sku_list' in d:
o.dish_sku_list = d['dish_sku_list']
if 'dish_version' in d:
o.dish_version = d['dish_version']
if 'en_remember_code' in d:
o.en_remember_code = d['en_remember_code']
if 'ext_content' in d:
o.ext_content = d['ext_content']
if 'goods_id' in d:
o.goods_id = d['goods_id']
if 'material_binding_info_list' in d:
o.material_binding_info_list = d['material_binding_info_list']
if 'merchant_id' in d:
o.merchant_id = d['merchant_id']
if 'min_serving' in d:
o.min_serving = d['min_serving']
if 'mini_add_num' in d:
o.mini_add_num = d['mini_add_num']
if 'nb_remember_code' in d:
o.nb_remember_code = d['nb_remember_code']
if 'not_count_threshold' in d:
o.not_count_threshold = d['not_count_threshold']
if 'out_dish_id' in d:
o.out_dish_id = d['out_dish_id']
if 'property_info_list' in d:
o.property_info_list = d['property_info_list']
if 'remarks' in d:
o.remarks = d['remarks']
if 'shop_id' in d:
o.shop_id = d['shop_id']
if 'status' in d:
o.status = d['status']
if 'tags' in d:
o.tags = d['tags']
if 'type_big' in d:
o.type_big = d['type_big']
if 'type_small' in d:
o.type_small = d['type_small']
if 'unit_id' in d:
o.unit_id = d['unit_id']
if 'update_user' in d:
o.update_user = d['update_user']
return o
|
PypiClean
|
/tb-rest-client-3.5.tar.gz/tb-rest-client-3.5/tb_rest_client/models/models_pe/page_data_contact_basedobject.py
|
# Copyright 2023. ThingsBoard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pprint
import re # noqa: F401
import six
class PageDataContactBasedobject(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'data': 'list[ContactBasedobject]',
'total_pages': 'int',
'total_elements': 'int',
'has_next': 'bool'
}
attribute_map = {
'data': 'data',
'total_pages': 'totalPages',
'total_elements': 'totalElements',
'has_next': 'hasNext'
}
def __init__(self, data=None, total_pages=None, total_elements=None, has_next=None): # noqa: E501
"""PageDataContactBasedobject - a model defined in Swagger""" # noqa: E501
self._data = None
self._total_pages = None
self._total_elements = None
self._has_next = None
self.discriminator = None
if data is not None:
self.data = data
if total_pages is not None:
self.total_pages = total_pages
if total_elements is not None:
self.total_elements = total_elements
if has_next is not None:
self.has_next = has_next
@property
def data(self):
"""Gets the data of this PageDataContactBasedobject. # noqa: E501
Array of the entities # noqa: E501
:return: The data of this PageDataContactBasedobject. # noqa: E501
:rtype: list[ContactBasedobject]
"""
return self._data
@data.setter
def data(self, data):
"""Sets the data of this PageDataContactBasedobject.
Array of the entities # noqa: E501
:param data: The data of this PageDataContactBasedobject. # noqa: E501
:type: list[ContactBasedobject]
"""
self._data = data
@property
def total_pages(self):
"""Gets the total_pages of this PageDataContactBasedobject. # noqa: E501
Total number of available pages. Calculated based on the 'pageSize' request parameter and total number of entities that match search criteria # noqa: E501
:return: The total_pages of this PageDataContactBasedobject. # noqa: E501
:rtype: int
"""
return self._total_pages
@total_pages.setter
def total_pages(self, total_pages):
"""Sets the total_pages of this PageDataContactBasedobject.
Total number of available pages. Calculated based on the 'pageSize' request parameter and total number of entities that match search criteria # noqa: E501
:param total_pages: The total_pages of this PageDataContactBasedobject. # noqa: E501
:type: int
"""
self._total_pages = total_pages
@property
def total_elements(self):
"""Gets the total_elements of this PageDataContactBasedobject. # noqa: E501
Total number of elements in all available pages # noqa: E501
:return: The total_elements of this PageDataContactBasedobject. # noqa: E501
:rtype: int
"""
return self._total_elements
@total_elements.setter
def total_elements(self, total_elements):
"""Sets the total_elements of this PageDataContactBasedobject.
Total number of elements in all available pages # noqa: E501
:param total_elements: The total_elements of this PageDataContactBasedobject. # noqa: E501
:type: int
"""
self._total_elements = total_elements
@property
def has_next(self):
"""Gets the has_next of this PageDataContactBasedobject. # noqa: E501
'false' value indicates the end of the result set # noqa: E501
:return: The has_next of this PageDataContactBasedobject. # noqa: E501
:rtype: bool
"""
return self._has_next
@has_next.setter
def has_next(self, has_next):
"""Sets the has_next of this PageDataContactBasedobject.
'false' value indicates the end of the result set # noqa: E501
:param has_next: The has_next of this PageDataContactBasedobject. # noqa: E501
:type: bool
"""
self._has_next = has_next
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PageDataContactBasedobject, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PageDataContactBasedobject):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/tulip-1.3.0.tar.gz/tulip-1.3.0/doc/index.rst
|
TuLiP User's Guide
==================
Welcome to TuLiP, the Temporal Logic Planning Toolbox. This user's guide is
intended for people who are interested in making use of TuLiP to synthesize
reactive controllers for hybrid systems and perform related functions.
Contents:
.. toctree::
:numbered:
:maxdepth: 2
intro
install
tutorial
specifications
transys
codegen
gridworlds
benchmarks
formulations
dev_guide
bibliography
Special pages:
* :ref:`genindex`
* :ref:`search`
|
PypiClean
|
/overcooked_ai-1.1.0.tar.gz/overcooked_ai-1.1.0/README.md
|
  [](https://badge.fury.io/py/overcooked-ai) [](https://github.com/HumanCompatibleAI/minerl/overcooked_ai) [](https://github.com/HumanCompatibleAI/overcooked_ai/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+label%3Abug) [](https://pepy.tech/project/overcooked-ai)
# Overcooked-AI
<p align="center">
<!-- <img src="overcooked_ai_js/images/screenshot.png" width="350"> -->
<img src="./images/layouts.gif" width="100%">
<i>5 of the available layouts. New layouts are easy to hardcode or generate programmatically.</i>
</p>
## Introduction
Overcooked-AI is a benchmark environment for fully cooperative human-AI task performance, based on the wildly popular video game [Overcooked](http://www.ghosttowngames.com/overcooked/).
The goal of the game is to deliver soups as fast as possible. Each soup requires placing up to 3 ingredients in a pot, waiting for the soup to cook, and then having an agent pick up the soup and delivering it. The agents should split up tasks on the fly and coordinate effectively in order to achieve high reward.
You can **try out the game [here](https://humancompatibleai.github.io/overcooked-demo/)** (playing with some previously trained DRL agents). To play with your own trained agents using this interface, you can use [this repo](https://github.com/HumanCompatibleAI/overcooked-demo). To run human-AI experiments, check out [this repo](https://github.com/HumanCompatibleAI/overcooked-hAI-exp). You can find some human-human gameplay data already collected [here](https://github.com/HumanCompatibleAI/human_aware_rl/tree/master/human_aware_rl/data/human/anonymized).
Check out [this repo](https://github.com/HumanCompatibleAI/human_aware_rl) for the DRL implementations compatible with the environment and reproducible results to our paper: *[On the Utility of Learning about Humans for Human-AI Coordination](https://arxiv.org/abs/1910.05789)* (also see our [blog post](https://bair.berkeley.edu/blog/2019/10/21/coordination/)).
## Installation
### Installing from PyPI
You can install the pre-compiled wheel file using pip.
```
pip install overcooked-ai
```
Note that PyPI releases are stable but infrequent. For the most up-to-date development features, build from source
### Building from source
It is useful to setup a conda environment with Python 3.7 (virtualenv works too):
```
conda create -n overcooked_ai python=3.7
conda activate overcooked_ai
```
Clone the repo
```
git clone https://github.com/HumanCompatibleAI/overcooked_ai.git
```
Finally, use python setup-tools to locally install
```
pip install -e overcooked_ai/
```
### Verifying Installation
When building from source, you can verify the installation by running the Overcooked unit test suite. The following commands should all be run from the `overcooked_ai` project root directory:
```
python testing/overcooked_test.py
```
If you're thinking of using the planning code extensively, you should run the full testing suite that verifies all of the Overcooked accessory tools (this can take 5-10 mins):
```
python -m unittest discover -s testing/ -p "*_test.py"
```
## Code Structure Overview
`overcooked_ai_py` contains:
`mdp/`:
- `overcooked_mdp.py`: main Overcooked game logic
- `overcooked_env.py`: environment classes built on top of the Overcooked mdp
- `layout_generator.py`: functions to generate random layouts programmatically
`agents/`:
- `agent.py`: location of agent classes
- `benchmarking.py`: sample trajectories of agents (both trained and planners) and load various models
`planning/`:
- `planners.py`: near-optimal agent planning logic
- `search.py`: A* search and shortest path logic
## Python Visualizations
One can adapt a version of [this file](https://github.com/HumanCompatibleAI/human_aware_rl/blob/master/human_aware_rl/overcooked_interactive.py) in order to be able to play games in terminal graphics with custom-defined agents.
## Further Issues and questions
If you have issues or questions, don't hesitate to contact either [Micah Carroll](https://micahcarroll.github.io) at [email protected] or [Nathan Miller](https://github.com/nathan-miller23) at [email protected]
|
PypiClean
|
/verbcl_graph4nlp_cuda102-0.1.0-py3-none-any.whl/graph4nlp/pytorch/modules/evaluation/bleu.py
|
from graph4nlp.pytorch.modules.evaluation.base import EvaluationMetricBase
from graph4nlp.pytorch.modules.evaluation.bleu_tool.bleu import Bleu
class BLEU(EvaluationMetricBase):
"""
The BLEU evaluation metric class.
Parameters
----------
n_grams: list[int]
The BLEU's n_gram parameter. The results will be returned according to the ``n_grams`` one-by-one.
verbase: int, default = 0
The log indicator. If set to 0, it will output no logs.
"""
def __init__(self, n_grams, verbase=0):
super(BLEU, self).__init__()
max_gram = self._check_available(n_grams)
self.scorer = Bleu(max_gram, verbase=verbase)
self.metrics = n_grams
def calculate_scores(self, ground_truth, predict):
"""
The BLEU calculation function. It will compute the BLEU scores.
Parameters
----------
ground_truth: list[string]
The ground truth (correct) target values. It is a list of strings.
predict: list[string]
The predicted target values. It is a list of strings.
Returns
-------
score: list[float]
The list contains BLEU_n results according to ``n_grams``.
scores: list[list[float]]
The specific results for each needed BLEU_n metric.
"""
ref_list = [list(map(str.strip, refs)) for refs in zip(ground_truth)]
refs = {idx: strippedlines for (idx, strippedlines) in enumerate(ref_list)}
hyps = {idx: [lines.strip()] for (idx, lines) in enumerate(predict)}
assert len(refs) == len(hyps)
score, scores = self.scorer.compute_score(refs, hyps)
score_ret = [score[i - 1] for i in self.metrics]
scores_ret = [scores[i - 1] for i in self.metrics]
return score_ret, scores_ret
@staticmethod
def _check_available(n_grams):
"""
The function to check the parameters.
If all tests are passed, it will find the max value in ``n_grams``.
Parameters
----------
n_grams: list[int]
Returns
-------
max_n_grams_value: int
"""
n_grams_ok = True
if isinstance(n_grams, list):
for i in n_grams:
if not isinstance(i, int):
n_grams_ok = False
else:
n_grams_ok = False
if not n_grams_ok:
raise TypeError("argument n_grams must be in list of int.")
return max(n_grams)
|
PypiClean
|
/django-quickbooks-0.6.4.2.tar.gz/django-quickbooks-0.6.4.2/django_quickbooks/validators.py
|
from django_quickbooks import QUICKBOOKS_ENUMS
from django_quickbooks.exceptions import VALIDATION_MESSAGES, ValidationCode
from django_quickbooks.exceptions import ValidationOptionNotFound, ValidationError
def obj_type_validator(value):
from django_quickbooks.objects.base import BaseObject
return isinstance(value, BaseObject)
def is_primitive(value):
from django_quickbooks.objects.base import BaseObject
return isinstance(type(value), type) and not isinstance(value, (BaseObject, list))
def operation_type(value):
for key, val in value.fields.items():
if val['validator']['type'] == SchemeValidator.IDTYPE:
has_attr = hasattr(value, key)
if 'Txn' in key and has_attr:
return QUICKBOOKS_ENUMS.OPP_MOD
if 'Txn' in key and not has_attr:
return QUICKBOOKS_ENUMS.OPP_ADD
if not has_attr:
return QUICKBOOKS_ENUMS.OBJ_REF
if has_attr:
return QUICKBOOKS_ENUMS.OPP_ADD
return ''
def is_list(value):
return isinstance(value, list)
def str_type_validator(value):
if not isinstance(value, str):
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.INVALID_TYPE] % (type(value), str),
ValidationCode.INVALID_TYPE)
def es_type_validator(value):
if not isinstance(value, str) or not value.isnumeric():
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.INVALID_TYPE] % (type(value), str),
ValidationCode.INVALID_TYPE)
def id_type_validator(value):
if not isinstance(value, str):
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.INVALID_TYPE] % (type(value), str),
ValidationCode.INVALID_TYPE)
def bool_type_validator(value):
if value not in [1, 0, 'true', 'false', '1', '0']:
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.INVALID_TYPE] % (type(value), bool),
ValidationCode.INVALID_TYPE)
def min_length_validator(value, length):
if len(value) < length:
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.MIN_LENGTH] % length, ValidationCode.MIN_LENGTH)
def max_length_validator(value, length):
if len(value) > length:
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.MAX_LENGTH] % length, ValidationCode.MAX_LENGTH)
def float_type_validator(value):
if not isinstance(value, float):
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.INVALID_TYPE] % (type(value), float),
ValidationCode.INVALID_TYPE)
def required_validator(value, required=False):
if not value and required:
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.REQUIRED], ValidationCode.REQUIRED)
def many_validator(value, many=False):
if not isinstance(value, list) and many:
raise ValidationError(VALIDATION_MESSAGES[ValidationCode.INVALID_TYPE] % (type(value), list),
ValidationCode.INVALID_TYPE)
class SchemeValidator:
STRTYPE = 'STRTYPE'
ESTYPE = 'ESTYPE'
IDTYPE = 'IDTYPE'
BOOLTYPE = 'BOOLTYPE'
OBJTYPE = 'OBJTYPE'
FLOATTYPE = 'FLOATTYPE'
type_validators = dict(
STRTYPE=str_type_validator,
ESTYPE=es_type_validator,
IDTYPE=id_type_validator,
BOOLTYPE=bool_type_validator,
OBJTYPE=obj_type_validator,
FLOATTYPE=float_type_validator,
)
option_validators = dict(
min_length=min_length_validator,
max_length=max_length_validator,
)
def validate(self, field_name, value, **options):
errors = []
required = options.pop('required', False)
try:
required_validator(value, required)
except ValidationError as exc:
errors.append(exc.detail)
many = options.pop('many', False)
try:
# should be given list type but given something else
many_validator(value, many)
except ValidationError as exc:
errors.append(exc.detail)
if many:
for single_value in value:
try:
self.validate(field_name, single_value, **options)
except ValidationError as exc:
errors.append(exc.detail)
if errors:
raise ValidationError(errors, field_name)
validator = options.pop('validator')
typ = validator['type']
try:
self.type_validators[typ](value)
except ValidationError as exc:
errors.append(exc.detail)
for value, key in options.items():
if value not in self.option_validators:
raise ValidationOptionNotFound
try:
self.option_validators[value](key)
except ValidationError as exc:
errors.append(exc.detail)
if errors:
raise ValidationError(errors, field_name)
|
PypiClean
|
/gym-robotics-1.0.1.tar.gz/gym-robotics-1.0.1/gym_robotics/envs/hand/reach.py
|
import os
from typing import Union
import numpy as np
from gym.utils.ezpickle import EzPickle
from gym_robotics.envs.hand_env import MujocoHandEnv, MujocoPyHandEnv
FINGERTIP_SITE_NAMES = [
"robot0:S_fftip",
"robot0:S_mftip",
"robot0:S_rftip",
"robot0:S_lftip",
"robot0:S_thtip",
]
DEFAULT_INITIAL_QPOS = {
"robot0:WRJ1": -0.16514339750464327,
"robot0:WRJ0": -0.31973286565062153,
"robot0:FFJ3": 0.14340512546557435,
"robot0:FFJ2": 0.32028208333591573,
"robot0:FFJ1": 0.7126053607727917,
"robot0:FFJ0": 0.6705281001412586,
"robot0:MFJ3": 0.000246444303701037,
"robot0:MFJ2": 0.3152655251085491,
"robot0:MFJ1": 0.7659800313729842,
"robot0:MFJ0": 0.7323156897425923,
"robot0:RFJ3": 0.00038520700007378114,
"robot0:RFJ2": 0.36743546201985233,
"robot0:RFJ1": 0.7119514095008576,
"robot0:RFJ0": 0.6699446327514138,
"robot0:LFJ4": 0.0525442258033891,
"robot0:LFJ3": -0.13615534724474673,
"robot0:LFJ2": 0.39872030433433003,
"robot0:LFJ1": 0.7415570009679252,
"robot0:LFJ0": 0.704096378652974,
"robot0:THJ4": 0.003673823825070126,
"robot0:THJ3": 0.5506291436028695,
"robot0:THJ2": -0.014515151997119306,
"robot0:THJ1": -0.0015229223564485414,
"robot0:THJ0": -0.7894883021600622,
}
# Ensure we get the path separator correct on windows
MODEL_XML_PATH = os.path.join("hand", "reach.xml")
def goal_distance(goal_a, goal_b):
assert goal_a.shape == goal_b.shape
return np.linalg.norm(goal_a - goal_b, axis=-1)
def get_base_hand_reanch_env(HandEnvClass: Union[MujocoHandEnv, MujocoPyHandEnv]):
class BaseHandReachEnv(HandEnvClass, EzPickle):
def __init__(
self,
distance_threshold=0.01,
n_substeps=20,
relative_control=False,
initial_qpos=DEFAULT_INITIAL_QPOS,
reward_type="sparse",
**kwargs,
):
self.distance_threshold = distance_threshold
self.reward_type = reward_type
HandEnvClass.__init__(
self,
model_path=MODEL_XML_PATH,
n_substeps=n_substeps,
initial_qpos=initial_qpos,
relative_control=relative_control,
**kwargs,
)
EzPickle.__init__(
self,
distance_threshold,
n_substeps,
relative_control,
initial_qpos,
reward_type,
**kwargs,
)
# GoalEnv methods
# ----------------------------
def compute_reward(self, achieved_goal, goal, info):
d = goal_distance(achieved_goal, goal)
if self.reward_type == "sparse":
return -(d > self.distance_threshold).astype(np.float32)
else:
return -d
def _sample_goal(self):
thumb_name = "robot0:S_thtip"
finger_names = [name for name in FINGERTIP_SITE_NAMES if name != thumb_name]
finger_name = self.np_random.choice(finger_names)
thumb_idx = FINGERTIP_SITE_NAMES.index(thumb_name)
finger_idx = FINGERTIP_SITE_NAMES.index(finger_name)
assert thumb_idx != finger_idx
# Pick a meeting point above the hand.
meeting_pos = self.palm_xpos + np.array([0.0, -0.09, 0.05])
meeting_pos += self.np_random.normal(scale=0.005, size=meeting_pos.shape)
# Slightly move meeting goal towards the respective finger to avoid that they
# overlap.
goal = self.initial_goal.copy().reshape(-1, 3)
for idx in [thumb_idx, finger_idx]:
offset_direction = meeting_pos - goal[idx]
offset_direction /= np.linalg.norm(offset_direction)
goal[idx] = meeting_pos - 0.005 * offset_direction
if self.np_random.uniform() < 0.1:
# With some probability, ask all fingers to move back to the origin.
# This avoids that the thumb constantly stays near the goal position already.
goal = self.initial_goal.copy()
return goal.flatten()
def _is_success(self, achieved_goal, desired_goal):
d = goal_distance(achieved_goal, desired_goal)
return (d < self.distance_threshold).astype(np.float32)
def _get_achieved_goal(self):
raise NotImplementedError
return BaseHandReachEnv
class MujocoHandReachEnv(get_base_hand_reanch_env(MujocoHandEnv)):
def _get_achieved_goal(self):
goal = [
self._utils.get_site_xpos(self.model, self.data, name)
for name in FINGERTIP_SITE_NAMES
]
return np.array(goal).flatten()
# RobotEnv methods
# ----------------------------
def _env_setup(self, initial_qpos):
for name, value in initial_qpos.items():
self._utils.set_joint_qpos(self.model, self.data, name, value)
self._mujoco.mj_forward(self.model, self.data)
self.initial_goal = self._get_achieved_goal().copy()
self.palm_xpos = self.data.xpos[
self._model_names.body_name2id["robot0:palm"]
].copy()
def _get_obs(self):
robot_qpos, robot_qvel = self._utils.robot_get_obs(
self.model, self.data, self._model_names.joint_names
)
achieved_goal = self._get_achieved_goal().ravel()
observation = np.concatenate([robot_qpos, robot_qvel, achieved_goal])
return {
"observation": observation.copy(),
"achieved_goal": achieved_goal.copy(),
"desired_goal": self.goal.copy(),
}
def _render_callback(self):
# Visualize targets.
sites_offset = (self.data.site_xpos - self.model.site_pos).copy()
goal = self.goal.reshape(5, 3)
for finger_idx in range(5):
site_name = f"target{finger_idx}"
site_id = self._model_names.site_name2id[site_name]
self.model.site_pos[site_id] = goal[finger_idx] - sites_offset[site_id]
# Visualize finger positions.
achieved_goal = self._get_achieved_goal().reshape(5, 3)
for finger_idx in range(5):
site_name = f"finger{finger_idx}"
site_id = self._model_names.site_name2id[site_name]
self.model.site_pos[site_id] = (
achieved_goal[finger_idx] - sites_offset[site_id]
)
self._mujoco.mj_forward(self.model, self.data)
class MujocoPyHandReachEnv(get_base_hand_reanch_env(MujocoPyHandEnv)):
def _get_achieved_goal(self):
goal = [self.sim.data.get_site_xpos(name) for name in FINGERTIP_SITE_NAMES]
return np.array(goal).flatten()
# RobotEnv methods
# ----------------------------
def _env_setup(self, initial_qpos):
for name, value in initial_qpos.items():
self.sim.data.set_joint_qpos(name, value)
self.sim.forward()
self.initial_goal = self._get_achieved_goal().copy()
self.palm_xpos = self.sim.data.body_xpos[
self.sim.model.body_name2id("robot0:palm")
].copy()
def _get_obs(self):
robot_qpos, robot_qvel = self._utils.robot_get_obs(self.sim)
achieved_goal = self._get_achieved_goal().ravel()
observation = np.concatenate([robot_qpos, robot_qvel, achieved_goal])
return {
"observation": observation.copy(),
"achieved_goal": achieved_goal.copy(),
"desired_goal": self.goal.copy(),
}
def _render_callback(self):
# Visualize targets.
sites_offset = (self.sim.data.site_xpos - self.sim.model.site_pos).copy()
goal = self.goal.reshape(5, 3)
for finger_idx in range(5):
site_name = f"target{finger_idx}"
site_id = self.sim.model.site_name2id(site_name)
self.sim.model.site_pos[site_id] = goal[finger_idx] - sites_offset[site_id]
# Visualize finger positions.
achieved_goal = self._get_achieved_goal().reshape(5, 3)
for finger_idx in range(5):
site_name = f"finger{finger_idx}"
site_id = self.sim.model.site_name2id(site_name)
self.sim.model.site_pos[site_id] = (
achieved_goal[finger_idx] - sites_offset[site_id]
)
self.sim.forward()
|
PypiClean
|
/xdgappdirs-1.4.4.3.tar.gz/xdgappdirs-1.4.4.3/README.rst
|
============
xdgappdirs
============
This is a fork and almost drop-in replacement of `appdirs
<https://pypi.org/project/appdirs/>`_ that follows the XDG BaseDir Spec on macOS
when the relevant ``XDG_*`` environment variables are available. For instance,
on macOS, when ``XDG_CONFIG_HOME`` is set to ``/Users/steve/.config``,
``user_config_dir('foo')`` evaluates to ``/Users/steve/.config/foo``, whereas
when ``XDG_CONFIG_HOME`` is not set or empty, it evaluates to
``/Users/steve/Library/Application Support/foo``. This gives XDG fans a choice
while not mandating ``.config`` for everyone else, especially for GUI apps.
Other changes:
- Reverts `ActiveState/appdirs#100
<https://github.com/ActiveState/appdirs/pull/100>`_. On macOS,
``user_config_dir`` and ``site_config_dir`` evaluates to subdirs of
``~/Library/Application Support`` and ``/Library/Application Support`` (unless
the relevant ``XDG_*`` env vars are set and non-empty), rather than subdirs of
``~/Library/Preferences`` and ``/Library/Preferences``, which are specifically
for plists and not suitable for anything else. You don't need ``appdirs`` to
tell you where to write plists.
- Properly handle empty ``XDG_*`` env vars. According to XDG BaseDir Spec,
defaults should be used when the env vars are empty.
The original README for ``appdirs`` follows.
the problem
===========
What directory should your app use for storing user data? If running on macOS, you
should use::
~/Library/Application Support/<AppName>
If on Windows (at least English Win XP) that should be::
C:\Documents and Settings\<User>\Application Data\Local Settings\<AppAuthor>\<AppName>
or possibly::
C:\Documents and Settings\<User>\Application Data\<AppAuthor>\<AppName>
for `roaming profiles <https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-vista/cc766489(v=ws.10)>`_ but that is another story.
On Linux (and other Unices) the dir, according to the `XDG
spec <https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_, is::
~/.local/share/<AppName>
``appdirs`` to the rescue
=========================
This kind of thing is what the ``appdirs`` module is for. ``appdirs`` will
help you choose an appropriate:
- user data dir (``user_data_dir``)
- user config dir (``user_config_dir``)
- user cache dir (``user_cache_dir``)
- site data dir (``site_data_dir``)
- site config dir (``site_config_dir``)
- user log dir (``user_log_dir``)
and also:
- is a single module so other Python packages can include their own private copy
- is slightly opinionated on the directory names used. Look for "OPINION" in
documentation and code for when an opinion is being applied.
some example output
===================
On macOS::
>>> from appdirs import *
>>> appname = "SuperApp"
>>> appauthor = "Acme"
>>> user_data_dir(appname, appauthor)
'/Users/trentm/Library/Application Support/SuperApp'
>>> site_data_dir(appname, appauthor)
'/Library/Application Support/SuperApp'
>>> user_cache_dir(appname, appauthor)
'/Users/trentm/Library/Caches/SuperApp'
>>> user_log_dir(appname, appauthor)
'/Users/trentm/Library/Logs/SuperApp'
On Windows 7::
>>> from appdirs import *
>>> appname = "SuperApp"
>>> appauthor = "Acme"
>>> user_data_dir(appname, appauthor)
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp'
>>> user_data_dir(appname, appauthor, roaming=True)
'C:\\Users\\trentm\\AppData\\Roaming\\Acme\\SuperApp'
>>> user_cache_dir(appname, appauthor)
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Cache'
>>> user_log_dir(appname, appauthor)
'C:\\Users\\trentm\\AppData\\Local\\Acme\\SuperApp\\Logs'
On Linux::
>>> from appdirs import *
>>> appname = "SuperApp"
>>> appauthor = "Acme"
>>> user_data_dir(appname, appauthor)
'/home/trentm/.local/share/SuperApp
>>> site_data_dir(appname, appauthor)
'/usr/local/share/SuperApp'
>>> site_data_dir(appname, appauthor, multipath=True)
'/usr/local/share/SuperApp:/usr/share/SuperApp'
>>> user_cache_dir(appname, appauthor)
'/home/trentm/.cache/SuperApp'
>>> user_log_dir(appname, appauthor)
'/home/trentm/.cache/SuperApp/log'
>>> user_config_dir(appname)
'/home/trentm/.config/SuperApp'
>>> site_config_dir(appname)
'/etc/xdg/SuperApp'
>>> os.environ['XDG_CONFIG_DIRS'] = '/etc:/usr/local/etc'
>>> site_config_dir(appname, multipath=True)
'/etc/SuperApp:/usr/local/etc/SuperApp'
``AppDirs`` for convenience
===========================
::
>>> from appdirs import AppDirs
>>> dirs = AppDirs("SuperApp", "Acme")
>>> dirs.user_data_dir
'/Users/trentm/Library/Application Support/SuperApp'
>>> dirs.site_data_dir
'/Library/Application Support/SuperApp'
>>> dirs.user_cache_dir
'/Users/trentm/Library/Caches/SuperApp'
>>> dirs.user_log_dir
'/Users/trentm/Library/Logs/SuperApp'
Per-version isolation
=====================
If you have multiple versions of your app in use that you want to be
able to run side-by-side, then you may want version-isolation for these
dirs::
>>> from appdirs import AppDirs
>>> dirs = AppDirs("SuperApp", "Acme", version="1.0")
>>> dirs.user_data_dir
'/Users/trentm/Library/Application Support/SuperApp/1.0'
>>> dirs.site_data_dir
'/Library/Application Support/SuperApp/1.0'
>>> dirs.user_cache_dir
'/Users/trentm/Library/Caches/SuperApp/1.0'
>>> dirs.user_log_dir
'/Users/trentm/Library/Logs/SuperApp/1.0'
|
PypiClean
|
/eyes17-1.0.27-py3-none-any.whl/eyes17GUI/MPU6050.py
|
import sys, time, math, os.path
from . import utils
from .QtVersion import *
import pyqtgraph as pg
import numpy as np
from . eyes17 import eyemath17 as em
from eyes17.SENSORS import MPU6050
from eyes17.SENSORS.supported import supported,nameMap
from eyes17.sensorlist import sensors as sensorHints
class Expt(QWidget):
TIMER = 10
RPWIDTH = 300
RPGAP = 4
running = False
sensor = None
VMIN = -5
VMAX = 5
TMIN = 0
TMAX = 5
TGAP = 10
MAXCHAN = 7
dataVals = [[] for x in range(MAXCHAN)]
timeVal = []
sensorNames = [
QT_TRANSLATE_NOOP('Expt','Ax'),
QT_TRANSLATE_NOOP('Expt','Ay'),
QT_TRANSLATE_NOOP('Expt','Az'),
QT_TRANSLATE_NOOP('Expt','Temperature'),
QT_TRANSLATE_NOOP('Expt','Vx'),
QT_TRANSLATE_NOOP('Expt','Vy'),
QT_TRANSLATE_NOOP('Expt','Vz')
]
sensorSelectCB = [None]*MAXCHAN
sensorFlags = [False]*MAXCHAN
dataTraces = [None]*MAXCHAN
history = [] # Data store
chanpens = ['y','g','r','m', 'c','w','b'] #pqtgraph pen colors
def __init__(self, device=None):
QWidget.__init__(self)
self.p = device # connection to the device hardware
self.pwin = pg.PlotWidget() # pyqtgraph window
self.pwin.showGrid(x=True, y=True) # with grid
ax = self.pwin.getAxis('bottom')
ax.setLabel(self.tr('Time (mS)'))
ax = self.pwin.getAxis('left')
ax.setLabel(self.tr('Value'))
#self.pwin.disableAutoRange()
self.pwin.setXRange(self.TMIN, self.TMAX)
#self.pwin.setYRange(self.VMIN, self.VMAX)
#self.pwin.hideButtons() # Do not show the 'A' button of pg
right = QVBoxLayout() # right side vertical layout
right.setAlignment(Qt.AlignmentFlag(0x0020)) #Qt.AlignTop
right.setSpacing(self.RPGAP)
for k in range(self.MAXCHAN):
self.dataTraces[k] = self.pwin.plot([0,0],[0,0], pen = self.chanpens[k])
self.sensorSelectCB[k] = QCheckBox(self.tr(self.sensorNames[k]))
right.addWidget(self.sensorSelectCB[k])
self.sensorSelectCB[3].setChecked(True) # Temperature is enabled by default
H = QHBoxLayout()
l = QLabel(text=self.tr('Duration'))
l.setMaximumWidth(80)
H.addWidget(l)
self.TMAXtext = utils.lineEdit(40, self.TMAX, 6, None)
H.addWidget(self.TMAXtext)
l = QLabel(text=self.tr('Seconds'))
l.setMaximumWidth(60)
H.addWidget(l)
right.addLayout(H)
H = QHBoxLayout()
l = QLabel(text=self.tr('Read every'))
l.setMaximumWidth(80)
H.addWidget(l)
self.TGAPtext = utils.lineEdit(40, self.TGAP, 6, None)
H.addWidget(self.TGAPtext)
l = QLabel(text=self.tr('mS'))
l.setMaximumWidth(60)
H.addWidget(l)
right.addLayout(H)
b = QPushButton(self.tr("Start"))
right.addWidget(b)
b.clicked.connect(self.start)
b = QPushButton(self.tr("Stop"))
right.addWidget(b)
b.clicked.connect(self.stop)
b = QPushButton(self.tr("Clear Traces"))
right.addWidget(b)
b.clicked.connect(self.clear)
self.SaveButton = QPushButton(self.tr("Save Data"))
self.SaveButton.clicked.connect(self.save_data)
right.addWidget(self.SaveButton)
#------------------------end of right panel ----------------
top = QHBoxLayout()
top.addWidget(self.pwin)
top.addLayout(right)
full = QVBoxLayout()
full.addLayout(top)
self.msgwin = QLabel(text='')
full.addWidget(self.msgwin)
self.setLayout(full)
self.timer = QTimer()
self.timer.timeout.connect(self.update)
self.timer.start(self.TIMER)
#----------------------------- end of init ---------------
def update(self):
if self.running == False:
return
try:
senData = self.sensor.getRaw()
except:
self.msg(self.tr('I2C device communication error'))
return
#print senData
if self.timeVal == []:
self.start_time = time.time()
elapsed = 0
else:
elapsed = time.time() - self.start_time
self.timeVal.append(elapsed)
for k in range(self.MAXCHAN):
if self.sensorFlags[k] == True:
self.dataVals[k].append(senData[k])
if elapsed > self.TMAX:
self.running = False
self.msg(self.tr('MPU6050 data plot completed'))
return
if len(self.timeVal) > 1: # Draw the traces
for k in range(self.MAXCHAN):
if self.sensorFlags[k] == True:
self.dataTraces[k].setData(self.timeVal, self.dataVals[k])
def start(self):
if self.running == True: return
try:
self.TMAX = float(self.TMAXtext.text())
self.TGAP = float(self.TGAPtext.text())
except:
self.msg(self.tr('Invalid Duration or Time between reads (> 10 mSec)'))
return
self.timer.stop()
self.timer.start(self.TGAP)
lst = self.p.I2C.scan()
for a in lst:
sen = sensorHints.get(a,['unknown'])[0]
if 'MPU-6050' in sen:
self.sensor = supported[a].connect(self.p.I2C,address = a)
break
for k in range(self.MAXCHAN):
self.dataVals[k] = [] # Clear data and traces
self.dataTraces[k].setData([0,0],[0,0])
if self.sensorSelectCB[k].isChecked() == True:
self.sensorFlags[k] = True
else:
self.sensorFlags[k] = False
self.timeVal = []
self.pwin.setXRange(self.TMIN, self.TMAX)
#self.pwin.setYRange(self.VMIN, self.VMAX)
self.running = True
self.msg(self.tr('Started Measurements'))
def stop(self):
self.running = False
self.msg(self.tr('User Stopped'))
def clear(self):
self.timeVal = []
for k in range(self.MAXCHAN):
self.dataVals[k] = [] # Clear data and traces
self.dataTraces[k].setData([0,0],[0,0])
self.msg(self.tr('Cleared Traces and Data'))
def save_data(self):
if self.timeVal == []:
self.msg(self.tr('No Traces available for saving'))
return
fn = QFileDialog.getSaveFileName()
if fn != '':
data = []
for k in range(self.MAXCHAN):
if self.sensorFlags[k] == True:
data.append([self.timeVal, self.dataVals[k]])
self.p.save(data, fn)
self.msg(self.tr('Traces saved to ') + fn)
def msg(self, m):
self.msgwin.setText(self.tr(m))
def comerr(self):
self.msgwin.setText('<font color="red">' + self.tr('Error. Try Device->Reconnect'))
if __name__ == '__main__':
from . eyes17 import eyes
dev = eyes17.eyes.open()
app = QApplication(sys.argv)
# translation stuff
lang=QLocale.system().name()
t=QTranslator()
t.load("lang/"+lang, os.path.dirname(__file__))
app.installTranslator(t)
t1=QTranslator()
t1.load("qt_"+lang,
QLibraryInfo.location(QLibraryInfo.TranslationsPath))
app.installTranslator(t1)
mw = Expt(dev)
mw.show()
sys.exit(app.exec_())
|
PypiClean
|
/cyscs-0.1.tar.gz/cyscs-0.1/c/scs/linsys/direct/external/AMD_README.txt
|
AMD, Copyright (c) 2009-2012 by Timothy A. Davis (http://www.suitesparse.com),
Patrick R. Amestoy, and Iain S. Duff. All Rights Reserved. AMD is available
under alternate licences; contact T. Davis for details.
AMD: a set of routines for permuting sparse matrices prior to
factorization. Includes a version in C, a version in Fortran, and a MATLAB
mexFunction.
Requires SuiteSparse_config, in the ../SuiteSparse_config directory relative to
this directory.
Quick start (Unix, or Windows with Cygwin):
To compile, test, and install AMD, you may wish to first configure the
installation by editting the ../SuiteSparse_config/SuiteSparse_config.mk
file. Next, cd to this directory (AMD) and type "make" (or "make lib" if
you do not have MATLAB). To compile and run a demo program for the Fortran
version, type "make fortran". When done, type "make clean" to remove
unused *.o files (keeps the compiled libraries and demo programs). See the
User Guide (Doc/AMD_UserGuide.pdf), or
../SuiteSparse_config/SuiteSparse_config.mk for more details.
Quick start (for MATLAB users);
To compile, test, and install the AMD mexFunction, cd to the
AMD/MATLAB directory and type amd_make at the MATLAB prompt.
-------------------------------------------------------------------------------
AMD License:
Your use or distribution of AMD or any modified version of
AMD implies that you agree to this License.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
USA
Permission is hereby granted to use or copy this program under the
terms of the GNU LGPL, provided that the Copyright, this License,
and the Availability of the original version is retained on all copies.
User documentation of any code that uses this code or any modified
version of this code must cite the Copyright, this License, the
Availability note, and "Used by permission." Permission to modify
the code and to distribute modified code is granted, provided the
Copyright, this License, and the Availability note are retained,
and a notice that the code was modified is included.
Availability:
http://www.suitesparse.com
-------------------------------------------------------------------------------
This is the AMD README file. It is a terse overview of AMD.
Refer to the User Guide (Doc/AMD_UserGuide.pdf) for how to install
and use AMD.
Description:
AMD is a set of routines for pre-ordering sparse matrices prior to Cholesky
or LU factorization, using the approximate minimum degree ordering
algorithm. Written in ANSI/ISO C with a MATLAB interface, and in
Fortran 77.
Authors:
Timothy A. Davis ([email protected])
Patrick R. Amestory, ENSEEIHT, Toulouse, France.
Iain S. Duff, Rutherford Appleton Laboratory, UK.
Acknowledgements:
This work was supported by the National Science Foundation, under
grants DMS-9504974, DMS-9803599, and CCR-0203270.
Portions of this work were done while on sabbatical at Stanford University
and Lawrence Berkeley National Laboratory (with funding from the SciDAC
program). I would like to thank Gene Golub, Esmond Ng, and Horst Simon
for making this sabbatical possible.
-------------------------------------------------------------------------------
Files and directories in the AMD distribution:
-------------------------------------------------------------------------------
---------------------------------------------------------------------------
Subdirectories of the AMD directory:
---------------------------------------------------------------------------
Doc documentation
Source primary source code
Include include file for use in your code that calls AMD
Demo demo programs. also serves as test of the AMD installation.
MATLAB AMD mexFunction for MATLAB, and supporting m-files
Lib where the compiled C-callable and Fortran-callable
AMD libraries placed.
---------------------------------------------------------------------------
Files in the AMD directory:
---------------------------------------------------------------------------
Makefile top-level Makefile for GNU make or original make.
Windows users would require Cygwin to use "make"
README.txt this file
---------------------------------------------------------------------------
Doc directory: documentation
---------------------------------------------------------------------------
ChangeLog change log
License the AMD License
Makefile for creating the documentation
AMD_UserGuide.bib AMD User Guide (references)
AMD_UserGuide.tex AMD User Guide (LaTeX)
AMD_UserGuide.pdf AMD User Guide (PDF)
lesser.txt the GNU LGPL license
---------------------------------------------------------------------------
Source directory:
---------------------------------------------------------------------------
amd_order.c user-callable, primary AMD ordering routine
amd_control.c user-callable, prints the control parameters
amd_defaults.c user-callable, sets default control parameters
amd_info.c user-callable, prints the statistics from AMD
amd_1.c non-user-callable, construct A+A'
amd_2.c user-callable, primary ordering kernel
(a C version of amd.f and amdbar.f, with
post-ordering added)
amd_aat.c non-user-callable, computes nnz (A+A')
amd_dump.c non-user-callable, debugging routines
amd_postorder.c non-user-callable, postorder
amd_post_tree.c non-user-callable, postorder just one tree
amd_valid.c non-user-callable, verifies a matrix
amd_preprocess.c non-user-callable, computes A', removes duplic
amd.f user-callable Fortran 77 version
amdbar.f user-callable Fortran 77 version
---------------------------------------------------------------------------
Include directory:
---------------------------------------------------------------------------
amd.h include file for C programs that use AMD
amd_internal.h non-user-callable, include file for AMD
---------------------------------------------------------------------------
Demo directory:
---------------------------------------------------------------------------
Makefile for GNU make or original make
amd_demo.c C demo program for AMD
amd_demo.out output of amd_demo.c
amd_demo2.c C demo program for AMD, jumbled matrix
amd_demo2.out output of amd_demo2.c
amd_l_demo.c C demo program for AMD (long integer version)
amd_l_demo.out output of amd_l_demo.c
amd_simple.c simple C demo program for AMD
amd_simple.out output of amd_simple.c
amd_f77demo.f Fortran 77 demo program for AMD
amd_f77demo.out output of amd_f77demo.f
amd_f77simple.c simple Fortran 77 demo program for AMD
amd_f77simple.out output of amd_f77simple.f
amd_f77cross.f Fortran 77 demo, calls the C version of AMD
amd_f77cross.out output of amd_f77cross.f
amd_f77wrapper.c Fortran-callable wrapper for C version of AMD
---------------------------------------------------------------------------
MATLAB directory:
---------------------------------------------------------------------------
GNUmakefile a nice Makefile, for GNU make
Makefile an ugly Unix Makefile (for older make's)
Contents.m for "help amd2" listing of toolbox contents
amd2.m MATLAB help file for AMD
amd_make.m MATLAB m-file for compiling AMD mexFunction
amd_install.m compile and install the AMD mexFunction
amd_mex.c AMD mexFunction for MATLAB
amd_demo.m MATLAB demo for AMD
amd_demo.m.out diary output of amd_demo.m
can_24.mat input file for AMD demo
---------------------------------------------------------------------------
Lib directory: libamd.a and libamdf77.a libraries placed here
---------------------------------------------------------------------------
GNUmakefile a nice Makefile, for GNU make
Makefile an ugly Unix Makefile (for older make's)
libamd.def AMD definitions for Windows
|
PypiClean
|
/coriolis_eda-2.5.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/coriolis/designflow/technos.py
|
import sys
import os
import socket
from pathlib import Path
from .task import ShellEnv
__all__ = [ 'Where', 'setupCMOS', 'setupCMOS45' ]
class Where ( object ):
coriolisTop = None
allianceTop = None
cellsTop = None
checkToolkit = None
def __init__ ( self, checkToolkit=None ):
if 'CORIOLIS_TOP' in os.environ: Where.coriolisTop = Path( os.environ['CORIOLIS_TOP'] )
if 'ALLIANCE_TOP' in os.environ: Where.allianceTop = Path( os.environ['ALLIANCE_TOP'] )
if 'CELLS_TOP' in os.environ: Where.cellsTop = Path( os.environ['CELLS_TOP'] )
if Where.coriolisTop and not Where.allianceTop: Where.allianceTop = Where.coriolisTop
#print( Where.coriolisTop, Where.allianceTop )
if not Where.coriolisTop:
print( 'technos.Where.__init__(): Unable to locate Coriolis top.' )
if checkToolkit is None:
checkToolkit = Path.home() / 'coriolis-2.x' / 'src' / 'alliance-check-toolkit'
else:
if isinstance(checkToolkit,str):
checkToolkit = Path( checkToolkit )
if not Where.cellsTop:
Where.cellsTop = checkToolkit / 'cells'
Where.checkToolkit = checkToolkit
if not Where.cellsTop and Where.allianceTop:
Where.cellsTop = Where.allianceTop / 'cells'
ShellEnv.ALLIANCE_TOP = Where.allianceTop.as_posix()
def __repr__ ( self ):
if not Where.coriolisTop:
return '<Where coriolisTop=NOT_FOUND>'
return '<Where coriolisTop="{}">'.format( Where.coriolisTop.as_posix() )
def setupCMOS ( checkToolkit=None ):
Where( checkToolkit )
ShellEnv().export()
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import overlay, l, u, n
from .yosys import Yosys
import coriolis.technos.symbolic.cmos
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = True
cfg.misc.verboseLevel2 = True
cfg.misc.minTraceLevel = 1900
cfg.misc.maxTraceLevel = 3000
cfg.katana.eventsLimit = 1000000
cfg.katana.termSatReservedLocal = 6
cfg.katana.termSatThreshold = 9
Viewer.Graphics.setStyle( 'Alliance.Classic [black]' )
af = CRL.AllianceFramework.get()
env = af.getEnvironment()
env.setCLOCK( '^ck$|m_clock|^clk$' )
Yosys.setLiberty( Where.cellsTop / 'sxlib' / 'sxlib.lib' )
ShellEnv.RDS_TECHNO_NAME = (Where.allianceTop / 'etc' / 'cmos.rds').as_posix()
path = None
for pathVar in [ 'PATH', 'path' ]:
if pathVar in os.environ:
path = os.environ[ pathVar ]
os.environ[ pathVar ] = path + ':' + (Where.allianceTop / 'bin').as_posix()
break
def setupLCMOS ( checkToolkit=None ):
Where( checkToolkit )
ShellEnv().export()
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import overlay, l, u, n
from .yosys import Yosys
import coriolis.technos.symbolic.lcmos
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = True
cfg.misc.verboseLevel2 = True
cfg.misc.minTraceLevel = 1900
cfg.misc.maxTraceLevel = 3000
cfg.katana.eventsLimit = 1000000
cfg.katana.termSatReservedLocal = 6
cfg.katana.termSatThreshold = 9
Viewer.Graphics.setStyle( 'Alliance.Classic [black]' )
af = CRL.AllianceFramework.get()
env = af.getEnvironment()
env.setCLOCK( '^ck$|m_clock|^clk$' )
Yosys.setLiberty( Where.checkToolkit / 'cells' / 'lsxlib' / 'lsxlib.lib' )
ShellEnv.RDS_TECHNO_NAME = (Where.allianceTop / 'etc' / 'cmos.rds').as_posix()
path = None
for pathVar in [ 'PATH', 'path' ]:
if pathVar in os.environ:
path = os.environ[ pathVar ]
os.environ[ pathVar ] = path + ':' + (Where.allianceTop / 'bin').as_posix()
break
def setupCMOS45 ( useNsxlib=False, checkToolkit=None, cellsTop=None ):
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import overlay, l, u, n
from .yosys import Yosys
import coriolis.technos.symbolic.cmos45
Where( checkToolkit )
if cellsTop is None:
cellsTop = Where.cellsTop
else:
if isinstance(cellsTop,str):
cellsTop = Path( cellsTop )
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = True
cfg.misc.verboseLevel2 = True
cfg.misc.minTraceLevel = 1900
cfg.misc.maxTraceLevel = 3000
cfg.katana.eventsLimit = 1000000
cfg.etesian.graphics = 3
cfg.etesian.spaceMargin = 0.05
cfg.etesian.aspectRatio = 1.0
cfg.anabatic.edgeLenght = 24
cfg.anabatic.edgeWidth = 8
if useNsxlib:
cfg.anabatic.routingGauge = 'msxlib4'
cfg.anabatic.topRoutingLayer = 'METAL4'
cfg.katana.termSatReservedLocal = 6
cfg.katana.termSatThreshold = 9
Viewer.Graphics.setStyle( 'Alliance.Classic [black]' )
af = CRL.AllianceFramework.get()
env = af.getEnvironment()
env.setCLOCK( '^ck$|m_clock|^clk$' )
sxlib = cellsTop / 'nsxlib'
iolib = cellsTop / 'niolib'
liberty = sxlib / 'nsxlib.lib'
env.addSYSTEM_LIBRARY( library=iolib.as_posix(), mode=CRL.Environment.Prepend )
env.addSYSTEM_LIBRARY( library=sxlib.as_posix(), mode=CRL.Environment.Prepend )
if not sxlib.is_dir():
print( '[ERROR] technos.setupCMOS45(): sxlib directory do *not* exists:' )
print( ' "{}"'.format(sxlib.as_posix()) )
Yosys.setLiberty( liberty )
ShellEnv.RDS_TECHNO_NAME = (Where.checkToolkit / 'etc' / 'FreePDK45.rds').as_posix()
path = None
for pathVar in [ 'PATH', 'path' ]:
if pathVar in os.environ:
path = os.environ[ pathVar ]
os.environ[ pathVar ] = path + ':' + (Where.allianceTop / 'bin').as_posix()
break
def setupSky130_c4m ( checkToolkit=None, pdkMasterTop=None ):
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import setNdaTopDir, overlay, l, u, n
from .yosys import Yosys
if isinstance(pdkMasterTop,str):
pdkMasterTop = Path( pdkMasterTop )
ndaDirectory = None
if pdkMasterTop:
ndaDirectory = pdkMasterTop / 'libs.tech' / 'coriolis' / 'techno'
elif not ndaDirectory:
hostname = socket.gethostname()
if hostname.startswith('lepka'):
ndaDirectory = Path( '/dsk/l1/jpc/crypted/soc/techno' )
if not ndaDirectory.is_dir():
print ('[ERROR] You forgot to mount the NDA encrypted directory, stupid!')
else:
ndaDirectory = Path( '/users/soft/techno/techno' )
pdkMasterTop = ndaDirectory
setNdaTopDir( ndaDirectory.as_posix() )
if not pdkMasterTop.is_dir():
print( '[ERROR] technos.setupSky130_c4m(): pdkMasterTop directory do *not* exists:' )
print( ' "{}"'.format(pdkMasterTop.as_posix()) )
Where( checkToolkit )
from node130.sky130 import techno, StdCellLib #, LibreSOCIO
techno.setup()
StdCellLib.setup()
#LibreSOCIO.setup()
cellsTop = pdkMasterTop / 'libs.ref'
liberty = cellsTop / 'StdCellLib' / 'liberty' / 'StdCellLib_nom.lib'
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.minTraceLevel = 12300
cfg.misc.maxTraceLevel = 12400
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = False
cfg.misc.verboseLevel2 = False
cfg.viewer.pixelThreshold = 5
cfg.etesian.graphics = 2
cfg.anabatic.topRoutingLayer = 'm4'
cfg.katana.eventsLimit = 4000000
af = CRL.AllianceFramework.get()
lg5 = af.getRoutingGauge( 'StdCellLib' ).getLayerGauge( 5 )
lg5.setType( CRL.RoutingLayerGauge.PowerSupply )
env = af.getEnvironment()
env.setCLOCK( '^sys_clk$|^ck|^jtag_tck$' )
Yosys.setLiberty( liberty )
ShellEnv.CHECK_TOOLKIT = Where.checkToolkit.as_posix()
def setupFreePDK45_c4m ( checkToolkit=None, pdkMasterTop=None ):
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import setNdaTopDir, overlay, l, u, n
from .yosys import Yosys
if isinstance(pdkMasterTop,str):
pdkMasterTop = Path( pdkMasterTop )
if not pdkMasterTop.is_dir():
print( '[ERROR] technos.setupFreePDK45_c4m(): pdkMasterTop directory do *not* exists:' )
print( ' "{}"'.format(pdkMasterTop.as_posix()) )
sys.path.append( (pdkMasterTop / 'coriolis' / 'techno' / 'etc' / 'coriolis2').resolve().as_posix() )
Where( checkToolkit )
from NDA.node45.freepdk45_c4m import techno, FlexLib, LibreSOCIO
techno.setup()
FlexLib.setup()
LibreSOCIO.setup()
liberty = pdkMasterTop / 'views' / 'FreePDK45' / 'FlexLib' / 'liberty' / 'FlexLib_nom.lib'
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.minTraceLevel = 12300
cfg.misc.maxTraceLevel = 12400
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = True
cfg.misc.verboseLevel2 = True
cfg.etesian.graphics = 3
cfg.etesian.spaceMargin = 0.10
cfg.anabatic.topRoutingLayer = 'metal6'
cfg.katana.eventsLimit = 4000000
af = CRL.AllianceFramework.get()
lg5 = af.getRoutingGauge('FlexLib').getLayerGauge( 5 )
lg5.setType( CRL.RoutingLayerGauge.PowerSupply )
env = af.getEnvironment()
env.setCLOCK( '^sys_clk$|^ck|^jtag_tck$' )
Yosys.setLiberty( liberty )
ShellEnv.CHECK_TOOLKIT = Where.checkToolkit.as_posix()
def setupTSMC_c180_c4m ( checkToolkit=None, ndaTop=None ):
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import setNdaTopDir, overlay, l, u, n
from .yosys import Yosys
ndaDirectory = None
if ndaTop is not None:
if not isinstance(ndaTop,Path):
ndaDirectory = ndaTop
else:
ndaDirectory = Path( ndaTop )
if not ndaDirectory:
hostname = socket.gethostname()
if hostname.startswith('lepka'):
ndaDirectory = Path( '/dsk/l1/jpc/crypted/soc/techno' )
if not ndaDirectory.is_dir():
print( '[ERROR] You forgot to mount the NDA encrypted directory, stupid!' )
else:
ndaDirectory = '/users/soft/techno/techno'
setNdaTopDir( ndaDirectory )
Where( checkToolkit )
from NDA.node180.tsmc_c018 import techno, FlexLib, LibreSOCIO, LibreSOCMem #, pll
techno.setup()
FlexLib.setup()
LibreSOCIO.setup()
LibreSOCMem.setup()
#pll.setup()
liberty = ndaDirectory / 'etc' / 'coriolis2' / 'NDA' / 'node180' / 'tsmc_c018' / 'FlexLib.lib'
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.minTraceLevel = 10100
cfg.misc.maxTraceLevel = 10200
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = True
cfg.misc.verboseLevel2 = True
cfg.etesian.graphics = 3
cfg.etesian.densityVariation = 0.04
cfg.etesian.spaceMargin = 0.04
cfg.katana.eventsLimit = 4000000
af = CRL.AllianceFramework.get()
env = af.getEnvironment()
env.setCLOCK( '^clk|^reset' )
Yosys.setLiberty( liberty )
ShellEnv.CHECK_TOOLKIT = Where.checkToolkit.as_posix()
def setupGF180MCU_GF ( checkToolkit=None, pdkTop=None ):
from .. import Cfg
from .. import Viewer
from .. import CRL
from ..helpers import setNdaTopDir, overlay, l, u, n
from .yosys import Yosys
if isinstance(pdkTop,str):
pdkTop = Path( pdkTop )
if not pdkTop:
print( '[ERROR] technos.setupGF180MCU_GF(): pdkTop directory has *not* been set.' )
if not pdkTop.is_dir():
print( '[ERROR] technos.setupSky130_c4m(): pdkTop directory do *not* exists:' )
print( ' "{}"'.format(pdkTop.as_posix()) )
Where( checkToolkit )
cellsTop = pdkTop / 'libraries' / 'gf180mcu_fd_sc_mcu9t5v0' / 'latest' / 'cells'
#liberty = pdkTop / 'libraries' / 'gf180mcu_fd_sc_mcu9t5v0' / 'latest' / 'liberty' / 'gf180mcu_fd_sc_mcu9t5v0__tt_025C_5v00.lib'
liberty = pdkTop / 'FULL.lib'
from coriolis.technos.node180.gf180mcu import techno
from coriolis.technos.node180.gf180mcu import mcu9t5v0
techno.setup()
mcu9t5v0.setup( cellsTop )
with overlay.CfgCache(priority=Cfg.Parameter.Priority.UserFile) as cfg:
cfg.misc.catchCore = False
cfg.misc.minTraceLevel = 12300
cfg.misc.maxTraceLevel = 12400
cfg.misc.info = False
cfg.misc.paranoid = False
cfg.misc.bug = False
cfg.misc.logMode = True
cfg.misc.verboseLevel1 = False
cfg.misc.verboseLevel2 = False
cfg.etesian.graphics = 2
cfg.anabatic.topRoutingLayer = 'm4'
cfg.katana.eventsLimit = 4000000
af = CRL.AllianceFramework.get()
#lg5 = af.getRoutingGauge( 'mcu9t' ).getLayerGauge( 5 )
#lg5.setType( CRL.RoutingLayerGauge.PowerSupply )
env = af.getEnvironment()
env.setCLOCK( '^sys_clk$|^ck|^jtag_tck$' )
Yosys.setLiberty( liberty )
ShellEnv.CHECK_TOOLKIT = Where.checkToolkit.as_posix()
|
PypiClean
|
/xkcd.py-1.0.4-py3-none-any.whl/xkcd/comic.py
|
from datetime import datetime
from html import unescape
from os.path import split
from random import randint
from urllib.parse import urlparse
from typing import Optional
from requests import get
XKCD_BASE_URL = "https://xkcd.com/"
XKCD_WIKI_BASE_URL = "https://explainxkcd.com/"
class Comic:
"""
A class that represents a comic.
:param number: The comic's number.
:type number: Optional[:class:`int`]
:param random: Whether to choose a random comic, or not.
:type random: Optional[:class:`bool`]
.. note::
If ``random`` is ``True``, ``number`` must not be specified.
:ivar date: The comic's date.
:ivar image: The URL of the comic's image.
:ivar number: The number of the comic.
:ivar title: The comic's title.
:ivar safe_title: A safe form of the comic's title.
:ivar transcript: The trascript of the comic.
:ivar wiki_url: The URL of the comic's wiki.
:ivar url: The comic's URL.
"""
class Image:
"""
A class that represents an image.
:ivar url: The image's URL.
:ivar title: The image's title (Alt Text).
:ivar filename: The filename of the image.
"""
def __init__(self, _url: str, _title: str) -> None:
self.url = _url
self.title = _title
self.filename = split(urlparse(self.url).path)[1]
def __init__(self, number: Optional[int] = None, *, random: Optional[bool] = False) -> None:
if random and number:
raise ValueError("If 'random' is 'True', 'number' must not be specified.")
if random:
response = get(f"{XKCD_BASE_URL}info.0.json").json()
latest = int(response["num"])
self.number = randint(1, latest)
response = get(f"{XKCD_BASE_URL}{self.number}/info.0.json").json()
else:
request_url = f"{XKCD_BASE_URL}info.0.json" if number is None else f"{XKCD_BASE_URL}{number}/info.0.json"
response = get(request_url).json()
self.number = int(response["num"])
self.date = datetime(int(response["year"]), int(response["month"]), int(response["day"]))
self.safe_title = response["safe_title"]
self.title = response["title"]
self.transcript = unescape(response["transcript"])
self.image = self.Image(response["img"], response["alt"])
self.wiki_url = f"{XKCD_WIKI_BASE_URL}{self.number}"
self.url = f"{XKCD_BASE_URL}{self.number}"
|
PypiClean
|
/django-thesaurus-0.0.2.tar.gz/django-thesaurus-0.0.2/thesaurus/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import parler.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Collection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'abstract': False,
},
bases=(parler.models.TranslatableModelMixin, models.Model),
),
migrations.CreateModel(
name='CollectionTranslation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language_code', models.CharField(db_index=True, max_length=15, verbose_name='Language')),
('label', models.CharField(blank=True, max_length=255, null=True)),
('master', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='translations', to='thesaurus.Collection')),
],
options={
'verbose_name': 'collection Translation',
'db_table': 'thesaurus_collection_translation',
'db_tablespace': '',
'managed': True,
'default_permissions': (),
},
),
migrations.CreateModel(
name='Concept',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(blank=True, max_length=255, null=True, unique=True)),
],
options={
'abstract': False,
},
bases=(parler.models.TranslatableModelMixin, models.Model),
),
migrations.CreateModel(
name='ConceptTranslation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language_code', models.CharField(db_index=True, max_length=15, verbose_name='Language')),
('label', models.CharField(blank=True, max_length=255, null=True)),
('master', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='translations', to='thesaurus.Concept')),
],
options={
'verbose_name': 'concept Translation',
'db_table': 'thesaurus_concept_translation',
'db_tablespace': '',
'managed': True,
'default_permissions': (),
},
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('collection', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='thesaurus.Collection')),
('concept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='thesaurus.Concept')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='thesaurus.Member')),
],
),
migrations.CreateModel(
name='Vocabulary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('prefix', models.CharField(blank=True, max_length=255, null=True)),
('uri', models.CharField(blank=True, max_length=255, null=True, unique=True)),
('collection', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='thesaurus.Collection')),
],
options={
'abstract': False,
},
bases=(parler.models.TranslatableModelMixin, models.Model),
),
migrations.CreateModel(
name='VocabularyTranslation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language_code', models.CharField(db_index=True, max_length=15, verbose_name='Language')),
('label', models.CharField(blank=True, max_length=255, null=True)),
('master', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='translations', to='thesaurus.Vocabulary')),
],
options={
'verbose_name': 'vocabulary Translation',
'db_table': 'thesaurus_vocabulary_translation',
'db_tablespace': '',
'managed': True,
'default_permissions': (),
},
),
migrations.AddField(
model_name='concept',
name='collections',
field=models.ManyToManyField(related_name='concepts', through='thesaurus.Member', to='thesaurus.Collection'),
),
migrations.AddField(
model_name='concept',
name='vocabulary',
field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='thesaurus.Vocabulary'),
),
migrations.AlterUniqueTogether(
name='vocabularytranslation',
unique_together=set([('language_code', 'master')]),
),
migrations.AlterUniqueTogether(
name='concepttranslation',
unique_together=set([('language_code', 'master')]),
),
migrations.AlterUniqueTogether(
name='collectiontranslation',
unique_together=set([('language_code', 'master')]),
),
]
|
PypiClean
|
/waymo_open_dataset_tf_2_1_0-1.3.1-cp36-cp36m-manylinux2010_x86_64.whl/waymo_open_dataset/utils/transform_utils.py
|
"""Utils to manage geometry transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
__all__ = [
'get_yaw_rotation', 'get_yaw_rotation_2d', 'get_rotation_matrix',
'get_transform'
]
def get_yaw_rotation(yaw, name=None):
"""Gets a rotation matrix given yaw only.
Args:
yaw: x-rotation in radians. This tensor can be any shape except an empty
one.
name: the op name.
Returns:
A rotation tensor with the same data type of the input. Its shape is
[input_shape, 3 ,3].
"""
with tf.compat.v1.name_scope(name, 'GetYawRotation', [yaw]):
cos_yaw = tf.cos(yaw)
sin_yaw = tf.sin(yaw)
ones = tf.ones_like(yaw)
zeros = tf.zeros_like(yaw)
return tf.stack([
tf.stack([cos_yaw, -1.0 * sin_yaw, zeros], axis=-1),
tf.stack([sin_yaw, cos_yaw, zeros], axis=-1),
tf.stack([zeros, zeros, ones], axis=-1),
],
axis=-2)
def get_yaw_rotation_2d(yaw):
"""Gets a rotation matrix given yaw only for 2d.
Args:
yaw: x-rotation in radians. This tensor can be any shape except an empty
one.
Returns:
A rotation tensor with the same data type of the input. Its shape is
[input_shape, 2, 2].
"""
with tf.name_scope('GetYawRotation2D'):
cos_yaw = tf.cos(yaw)
sin_yaw = tf.sin(yaw)
return tf.stack([
tf.stack([cos_yaw, -1.0 * sin_yaw], axis=-1),
tf.stack([sin_yaw, cos_yaw], axis=-1),
],
axis=-2)
def get_rotation_matrix(roll, pitch, yaw, name=None):
"""Gets a rotation matrix given roll, pitch, yaw.
roll-pitch-yaw is z-y'-x'' intrinsic rotation which means we need to apply
x(roll) rotation first, then y(pitch) rotation, then z(yaw) rotation.
https://en.wikipedia.org/wiki/Euler_angles
http://planning.cs.uiuc.edu/node102.html
Args:
roll : x-rotation in radians.
pitch: y-rotation in radians. The shape must be the same as roll.
yaw: z-rotation in radians. The shape must be the same as roll.
name: the op name.
Returns:
A rotation tensor with the same data type of the input. Its shape is
[input_shape_of_yaw, 3 ,3].
"""
with tf.compat.v1.name_scope(name, 'GetRotationMatrix', [yaw, pitch, roll]):
cos_roll = tf.cos(roll)
sin_roll = tf.sin(roll)
cos_yaw = tf.cos(yaw)
sin_yaw = tf.sin(yaw)
cos_pitch = tf.cos(pitch)
sin_pitch = tf.sin(pitch)
ones = tf.ones_like(yaw)
zeros = tf.zeros_like(yaw)
r_roll = tf.stack([
tf.stack([ones, zeros, zeros], axis=-1),
tf.stack([zeros, cos_roll, -1.0 * sin_roll], axis=-1),
tf.stack([zeros, sin_roll, cos_roll], axis=-1),
],
axis=-2)
r_pitch = tf.stack([
tf.stack([cos_pitch, zeros, sin_pitch], axis=-1),
tf.stack([zeros, ones, zeros], axis=-1),
tf.stack([-1.0 * sin_pitch, zeros, cos_pitch], axis=-1),
],
axis=-2)
r_yaw = tf.stack([
tf.stack([cos_yaw, -1.0 * sin_yaw, zeros], axis=-1),
tf.stack([sin_yaw, cos_yaw, zeros], axis=-1),
tf.stack([zeros, zeros, ones], axis=-1),
],
axis=-2)
return tf.matmul(r_yaw, tf.matmul(r_pitch, r_roll))
def get_transform(rotation, translation):
"""Combines NxN rotation and Nx1 translation to (N+1)x(N+1) transform.
Args:
rotation: [..., N, N] rotation tensor.
translation: [..., N] translation tensor. This must have the same type as
rotation.
Returns:
transform: [..., (N+1), (N+1)] transform tensor. This has the same type as
rotation.
"""
with tf.name_scope('GetTransform'):
# [..., N, 1]
translation_n_1 = translation[..., tf.newaxis]
# [..., N, N+1]
transform = tf.concat([rotation, translation_n_1], axis=-1)
# [..., N]
last_row = tf.zeros_like(translation)
# [..., N+1]
last_row = tf.concat([last_row, tf.ones_like(last_row[..., 0:1])], axis=-1)
# [..., N+1, N+1]
transform = tf.concat([transform, last_row[..., tf.newaxis, :]], axis=-2)
return transform
|
PypiClean
|
/safegate_pro-2021.7.6-py3-none-any.whl/homeassistant/components/numato/sensor.py
|
import logging
from numato_gpio import NumatoGpioError
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import CONF_ID, CONF_NAME, CONF_SENSORS
from . import (
CONF_DEVICES,
CONF_DST_RANGE,
CONF_DST_UNIT,
CONF_PORTS,
CONF_SRC_RANGE,
DATA_API,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:gauge"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the configured Numato USB GPIO ADC sensor ports."""
if discovery_info is None:
return
api = hass.data[DOMAIN][DATA_API]
sensors = []
devices = hass.data[DOMAIN][CONF_DEVICES]
for device in [d for d in devices if CONF_SENSORS in d]:
device_id = device[CONF_ID]
ports = device[CONF_SENSORS][CONF_PORTS]
for port, adc_def in ports.items():
try:
api.setup_input(device_id, port)
except NumatoGpioError as err:
_LOGGER.error(
"Failed to initialize sensor '%s' on Numato device %s port %s: %s",
adc_def[CONF_NAME],
device_id,
port,
err,
)
continue
sensors.append(
NumatoGpioAdc(
adc_def[CONF_NAME],
device_id,
port,
adc_def[CONF_SRC_RANGE],
adc_def[CONF_DST_RANGE],
adc_def[CONF_DST_UNIT],
api,
)
)
add_entities(sensors, True)
class NumatoGpioAdc(SensorEntity):
"""Represents an ADC port of a Numato USB GPIO expander."""
def __init__(self, name, device_id, port, src_range, dst_range, dst_unit, api):
"""Initialize the sensor."""
self._name = name
self._device_id = device_id
self._port = port
self._src_range = src_range
self._dst_range = dst_range
self._state = None
self._unit_of_measurement = dst_unit
self._api = api
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the state."""
try:
adc_val = self._api.read_adc_input(self._device_id, self._port)
adc_val = self._clamp_to_source_range(adc_val)
self._state = self._linear_scale_to_dest_range(adc_val)
except NumatoGpioError as err:
self._state = None
_LOGGER.error(
"Failed to update Numato device %s ADC-port %s: %s",
self._device_id,
self._port,
err,
)
def _clamp_to_source_range(self, val):
# clamp to source range
val = max(val, self._src_range[0])
val = min(val, self._src_range[1])
return val
def _linear_scale_to_dest_range(self, val):
# linear scale to dest range
src_len = self._src_range[1] - self._src_range[0]
adc_val_rel = val - self._src_range[0]
ratio = float(adc_val_rel) / float(src_len)
dst_len = self._dst_range[1] - self._dst_range[0]
dest_val = self._dst_range[0] + ratio * dst_len
return dest_val
|
PypiClean
|
/diffusersTangkhode-0.0.1.tar.gz/diffusersTangkhode-0.0.1/src/diffusers/pipelines/semantic_stable_diffusion/pipeline_semantic_stable_diffusion.py
|
import inspect
from itertools import repeat
from typing import Callable, List, Optional, Union
import torch
from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer
from ...models import AutoencoderKL, UNet2DConditionModel
from ...pipeline_utils import DiffusionPipeline
from ...pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker
from ...schedulers import KarrasDiffusionSchedulers
from ...utils import logging, randn_tensor
from . import SemanticStableDiffusionPipelineOutput
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
EXAMPLE_DOC_STRING = """
Examples:
```py
>>> import torch
>>> from diffusers import SemanticStableDiffusionPipeline
>>> pipe = SemanticStableDiffusionPipeline.from_pretrained(
... "runwayml/stable-diffusion-v1-5", torch_dtype=torch.float16
... )
>>> pipe = pipe.to("cuda")
>>> out = pipe(
... prompt="a photo of the face of a woman",
... num_images_per_prompt=1,
... guidance_scale=7,
... editing_prompt=[
... "smiling, smile", # Concepts to apply
... "glasses, wearing glasses",
... "curls, wavy hair, curly hair",
... "beard, full beard, mustache",
... ],
... reverse_editing_direction=[
... False,
... False,
... False,
... False,
... ], # Direction of guidance i.e. increase all concepts
... edit_warmup_steps=[10, 10, 10, 10], # Warmup period for each concept
... edit_guidance_scale=[4, 5, 5, 5.4], # Guidance scale for each concept
... edit_threshold=[
... 0.99,
... 0.975,
... 0.925,
... 0.96,
... ], # Threshold for each concept. Threshold equals the percentile of the latent space that will be discarded. I.e. threshold=0.99 uses 1% of the latent dimensions
... edit_momentum_scale=0.3, # Momentum scale that will be added to the latent guidance
... edit_mom_beta=0.6, # Momentum beta
... edit_weights=[1, 1, 1, 1, 1], # Weights of the individual concepts against each other
... )
>>> image = out.images[0]
```
"""
class SemanticStableDiffusionPipeline(DiffusionPipeline):
r"""
Pipeline for text-to-image generation with latent editing.
This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the
library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)
This model builds on the implementation of ['StableDiffusionPipeline']
Args:
vae ([`AutoencoderKL`]):
Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.
text_encoder ([`CLIPTextModel`]):
Frozen text-encoder. Stable Diffusion uses the text portion of
[CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically
the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.
tokenizer (`CLIPTokenizer`):
Tokenizer of class
[CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).
unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.
scheduler ([`SchedulerMixin`]):
A scheduler to be used in combination with `unet` to denoise the encoded image latens. Can be one of
[`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].
safety_checker ([`Q16SafetyChecker`]):
Classification module that estimates whether generated images could be considered offensive or harmful.
Please, refer to the [model card](https://huggingface.co/CompVis/stable-diffusion-v1-4) for details.
feature_extractor ([`CLIPImageProcessor`]):
Model that extracts features from generated images to be used as inputs for the `safety_checker`.
"""
_optional_components = ["safety_checker", "feature_extractor"]
def __init__(
self,
vae: AutoencoderKL,
text_encoder: CLIPTextModel,
tokenizer: CLIPTokenizer,
unet: UNet2DConditionModel,
scheduler: KarrasDiffusionSchedulers,
safety_checker: StableDiffusionSafetyChecker,
feature_extractor: CLIPImageProcessor,
requires_safety_checker: bool = True,
):
super().__init__()
if safety_checker is None and requires_safety_checker:
logger.warning(
f"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure"
" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered"
" results in services or applications open to the public. Both the diffusers team and Hugging Face"
" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling"
" it only for use-cases that involve analyzing network behavior or auditing its results. For more"
" information, please have a look at https://github.com/huggingface/diffusers/pull/254 ."
)
if safety_checker is not None and feature_extractor is None:
raise ValueError(
"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety"
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
)
self.register_modules(
vae=vae,
text_encoder=text_encoder,
tokenizer=tokenizer,
unet=unet,
scheduler=scheduler,
safety_checker=safety_checker,
feature_extractor=feature_extractor,
)
self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)
self.register_to_config(requires_safety_checker=requires_safety_checker)
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents
def decode_latents(self, latents):
latents = 1 / self.vae.config.scaling_factor * latents
image = self.vae.decode(latents).sample
image = (image / 2 + 0.5).clamp(0, 1)
# we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16
image = image.cpu().permute(0, 2, 3, 1).float().numpy()
return image
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs
def prepare_extra_step_kwargs(self, generator, eta):
# prepare extra kwargs for the scheduler step, since not all schedulers have the same signature
# eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.
# eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502
# and should be between [0, 1]
accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys())
extra_step_kwargs = {}
if accepts_eta:
extra_step_kwargs["eta"] = eta
# check if the scheduler accepts generator
accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys())
if accepts_generator:
extra_step_kwargs["generator"] = generator
return extra_step_kwargs
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs
def check_inputs(
self,
prompt,
height,
width,
callback_steps,
negative_prompt=None,
prompt_embeds=None,
negative_prompt_embeds=None,
):
if height % 8 != 0 or width % 8 != 0:
raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.")
if (callback_steps is None) or (
callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)
):
raise ValueError(
f"`callback_steps` has to be a positive integer but is {callback_steps} of type"
f" {type(callback_steps)}."
)
if prompt is not None and prompt_embeds is not None:
raise ValueError(
f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to"
" only forward one of the two."
)
elif prompt is None and prompt_embeds is None:
raise ValueError(
"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined."
)
elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):
raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}")
if negative_prompt is not None and negative_prompt_embeds is not None:
raise ValueError(
f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:"
f" {negative_prompt_embeds}. Please make sure to only forward one of the two."
)
if prompt_embeds is not None and negative_prompt_embeds is not None:
if prompt_embeds.shape != negative_prompt_embeds.shape:
raise ValueError(
"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but"
f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`"
f" {negative_prompt_embeds.shape}."
)
# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents
def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):
shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)
if isinstance(generator, list) and len(generator) != batch_size:
raise ValueError(
f"You have passed a list of generators of length {len(generator)}, but requested an effective batch"
f" size of {batch_size}. Make sure the batch size matches the length of the generators."
)
if latents is None:
latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)
else:
latents = latents.to(device)
# scale the initial noise by the standard deviation required by the scheduler
latents = latents * self.scheduler.init_noise_sigma
return latents
@torch.no_grad()
def __call__(
self,
prompt: Union[str, List[str]],
height: Optional[int] = None,
width: Optional[int] = None,
num_inference_steps: int = 50,
guidance_scale: float = 7.5,
negative_prompt: Optional[Union[str, List[str]]] = None,
num_images_per_prompt: int = 1,
eta: float = 0.0,
generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,
latents: Optional[torch.FloatTensor] = None,
output_type: Optional[str] = "pil",
return_dict: bool = True,
callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,
callback_steps: int = 1,
editing_prompt: Optional[Union[str, List[str]]] = None,
editing_prompt_embeddings: Optional[torch.Tensor] = None,
reverse_editing_direction: Optional[Union[bool, List[bool]]] = False,
edit_guidance_scale: Optional[Union[float, List[float]]] = 5,
edit_warmup_steps: Optional[Union[int, List[int]]] = 10,
edit_cooldown_steps: Optional[Union[int, List[int]]] = None,
edit_threshold: Optional[Union[float, List[float]]] = 0.9,
edit_momentum_scale: Optional[float] = 0.1,
edit_mom_beta: Optional[float] = 0.4,
edit_weights: Optional[List[float]] = None,
sem_guidance: Optional[List[torch.Tensor]] = None,
):
r"""
Function invoked when calling the pipeline for generation.
Args:
prompt (`str` or `List[str]`):
The prompt or prompts to guide the image generation.
height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):
The height in pixels of the generated image.
width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):
The width in pixels of the generated image.
num_inference_steps (`int`, *optional*, defaults to 50):
The number of denoising steps. More denoising steps usually lead to a higher quality image at the
expense of slower inference.
guidance_scale (`float`, *optional*, defaults to 7.5):
Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).
`guidance_scale` is defined as `w` of equation 2. of [Imagen
Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >
1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,
usually at the expense of lower image quality.
negative_prompt (`str` or `List[str]`, *optional*):
The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored
if `guidance_scale` is less than `1`).
num_images_per_prompt (`int`, *optional*, defaults to 1):
The number of images to generate per prompt.
eta (`float`, *optional*, defaults to 0.0):
Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to
[`schedulers.DDIMScheduler`], will be ignored for others.
generator (`torch.Generator`, *optional*):
One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)
to make generation deterministic.
latents (`torch.FloatTensor`, *optional*):
Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image
generation. Can be used to tweak the same generation with different prompts. If not provided, a latents
tensor will ge generated by sampling using the supplied random `generator`.
output_type (`str`, *optional*, defaults to `"pil"`):
The output format of the generate image. Choose between
[PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.
return_dict (`bool`, *optional*, defaults to `True`):
Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a
plain tuple.
callback (`Callable`, *optional*):
A function that will be called every `callback_steps` steps during inference. The function will be
called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.
callback_steps (`int`, *optional*, defaults to 1):
The frequency at which the `callback` function will be called. If not specified, the callback will be
called at every step.
editing_prompt (`str` or `List[str]`, *optional*):
The prompt or prompts to use for Semantic guidance. Semantic guidance is disabled by setting
`editing_prompt = None`. Guidance direction of prompt should be specified via
`reverse_editing_direction`.
editing_prompt_embeddings (`torch.Tensor>`, *optional*):
Pre-computed embeddings to use for semantic guidance. Guidance direction of embedding should be
specified via `reverse_editing_direction`.
reverse_editing_direction (`bool` or `List[bool]`, *optional*, defaults to `False`):
Whether the corresponding prompt in `editing_prompt` should be increased or decreased.
edit_guidance_scale (`float` or `List[float]`, *optional*, defaults to 5):
Guidance scale for semantic guidance. If provided as list values should correspond to `editing_prompt`.
`edit_guidance_scale` is defined as `s_e` of equation 6 of [SEGA
Paper](https://arxiv.org/pdf/2301.12247.pdf).
edit_warmup_steps (`float` or `List[float]`, *optional*, defaults to 10):
Number of diffusion steps (for each prompt) for which semantic guidance will not be applied. Momentum
will still be calculated for those steps and applied once all warmup periods are over.
`edit_warmup_steps` is defined as `delta` (δ) of [SEGA Paper](https://arxiv.org/pdf/2301.12247.pdf).
edit_cooldown_steps (`float` or `List[float]`, *optional*, defaults to `None`):
Number of diffusion steps (for each prompt) after which semantic guidance will no longer be applied.
edit_threshold (`float` or `List[float]`, *optional*, defaults to 0.9):
Threshold of semantic guidance.
edit_momentum_scale (`float`, *optional*, defaults to 0.1):
Scale of the momentum to be added to the semantic guidance at each diffusion step. If set to 0.0
momentum will be disabled. Momentum is already built up during warmup, i.e. for diffusion steps smaller
than `sld_warmup_steps`. Momentum will only be added to latent guidance once all warmup periods are
finished. `edit_momentum_scale` is defined as `s_m` of equation 7 of [SEGA
Paper](https://arxiv.org/pdf/2301.12247.pdf).
edit_mom_beta (`float`, *optional*, defaults to 0.4):
Defines how semantic guidance momentum builds up. `edit_mom_beta` indicates how much of the previous
momentum will be kept. Momentum is already built up during warmup, i.e. for diffusion steps smaller
than `edit_warmup_steps`. `edit_mom_beta` is defined as `beta_m` (β) of equation 8 of [SEGA
Paper](https://arxiv.org/pdf/2301.12247.pdf).
edit_weights (`List[float]`, *optional*, defaults to `None`):
Indicates how much each individual concept should influence the overall guidance. If no weights are
provided all concepts are applied equally. `edit_mom_beta` is defined as `g_i` of equation 9 of [SEGA
Paper](https://arxiv.org/pdf/2301.12247.pdf).
sem_guidance (`List[torch.Tensor]`, *optional*):
List of pre-generated guidance vectors to be applied at generation. Length of the list has to
correspond to `num_inference_steps`.
Returns:
[`~pipelines.semantic_stable_diffusion.SemanticStableDiffusionPipelineOutput`] or `tuple`:
[`~pipelines.semantic_stable_diffusion.SemanticStableDiffusionPipelineOutput`] if `return_dict` is True,
otherwise a `tuple. When returning a tuple, the first element is a list with the generated images, and the
second element is a list of `bool`s denoting whether the corresponding generated image likely represents
"not-safe-for-work" (nsfw) content, according to the `safety_checker`.
"""
# 0. Default height and width to unet
height = height or self.unet.config.sample_size * self.vae_scale_factor
width = width or self.unet.config.sample_size * self.vae_scale_factor
# 1. Check inputs. Raise error if not correct
self.check_inputs(prompt, height, width, callback_steps)
# 2. Define call parameters
batch_size = 1 if isinstance(prompt, str) else len(prompt)
if editing_prompt:
enable_edit_guidance = True
if isinstance(editing_prompt, str):
editing_prompt = [editing_prompt]
enabled_editing_prompts = len(editing_prompt)
elif editing_prompt_embeddings is not None:
enable_edit_guidance = True
enabled_editing_prompts = editing_prompt_embeddings.shape[0]
else:
enabled_editing_prompts = 0
enable_edit_guidance = False
# get prompt text embeddings
text_inputs = self.tokenizer(
prompt,
padding="max_length",
max_length=self.tokenizer.model_max_length,
return_tensors="pt",
)
text_input_ids = text_inputs.input_ids
if text_input_ids.shape[-1] > self.tokenizer.model_max_length:
removed_text = self.tokenizer.batch_decode(text_input_ids[:, self.tokenizer.model_max_length :])
logger.warning(
"The following part of your input was truncated because CLIP can only handle sequences up to"
f" {self.tokenizer.model_max_length} tokens: {removed_text}"
)
text_input_ids = text_input_ids[:, : self.tokenizer.model_max_length]
text_embeddings = self.text_encoder(text_input_ids.to(self.device))[0]
# duplicate text embeddings for each generation per prompt, using mps friendly method
bs_embed, seq_len, _ = text_embeddings.shape
text_embeddings = text_embeddings.repeat(1, num_images_per_prompt, 1)
text_embeddings = text_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)
if enable_edit_guidance:
# get safety text embeddings
if editing_prompt_embeddings is None:
edit_concepts_input = self.tokenizer(
[x for item in editing_prompt for x in repeat(item, batch_size)],
padding="max_length",
max_length=self.tokenizer.model_max_length,
return_tensors="pt",
)
edit_concepts_input_ids = edit_concepts_input.input_ids
if edit_concepts_input_ids.shape[-1] > self.tokenizer.model_max_length:
removed_text = self.tokenizer.batch_decode(
edit_concepts_input_ids[:, self.tokenizer.model_max_length :]
)
logger.warning(
"The following part of your input was truncated because CLIP can only handle sequences up to"
f" {self.tokenizer.model_max_length} tokens: {removed_text}"
)
edit_concepts_input_ids = edit_concepts_input_ids[:, : self.tokenizer.model_max_length]
edit_concepts = self.text_encoder(edit_concepts_input_ids.to(self.device))[0]
else:
edit_concepts = editing_prompt_embeddings.to(self.device).repeat(batch_size, 1, 1)
# duplicate text embeddings for each generation per prompt, using mps friendly method
bs_embed_edit, seq_len_edit, _ = edit_concepts.shape
edit_concepts = edit_concepts.repeat(1, num_images_per_prompt, 1)
edit_concepts = edit_concepts.view(bs_embed_edit * num_images_per_prompt, seq_len_edit, -1)
# here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)
# of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`
# corresponds to doing no classifier free guidance.
do_classifier_free_guidance = guidance_scale > 1.0
# get unconditional embeddings for classifier free guidance
if do_classifier_free_guidance:
uncond_tokens: List[str]
if negative_prompt is None:
uncond_tokens = [""]
elif type(prompt) is not type(negative_prompt):
raise TypeError(
f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !="
f" {type(prompt)}."
)
elif isinstance(negative_prompt, str):
uncond_tokens = [negative_prompt]
elif batch_size != len(negative_prompt):
raise ValueError(
f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:"
f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches"
" the batch size of `prompt`."
)
else:
uncond_tokens = negative_prompt
max_length = text_input_ids.shape[-1]
uncond_input = self.tokenizer(
uncond_tokens,
padding="max_length",
max_length=max_length,
truncation=True,
return_tensors="pt",
)
uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device))[0]
# duplicate unconditional embeddings for each generation per prompt, using mps friendly method
seq_len = uncond_embeddings.shape[1]
uncond_embeddings = uncond_embeddings.repeat(batch_size, num_images_per_prompt, 1)
uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1)
# For classifier free guidance, we need to do two forward passes.
# Here we concatenate the unconditional and text embeddings into a single batch
# to avoid doing two forward passes
if enable_edit_guidance:
text_embeddings = torch.cat([uncond_embeddings, text_embeddings, edit_concepts])
else:
text_embeddings = torch.cat([uncond_embeddings, text_embeddings])
# get the initial random noise unless the user supplied it
# 4. Prepare timesteps
self.scheduler.set_timesteps(num_inference_steps, device=self.device)
timesteps = self.scheduler.timesteps
# 5. Prepare latent variables
num_channels_latents = self.unet.in_channels
latents = self.prepare_latents(
batch_size * num_images_per_prompt,
num_channels_latents,
height,
width,
text_embeddings.dtype,
self.device,
generator,
latents,
)
# 6. Prepare extra step kwargs.
extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)
# Initialize edit_momentum to None
edit_momentum = None
self.uncond_estimates = None
self.text_estimates = None
self.edit_estimates = None
self.sem_guidance = None
for i, t in enumerate(self.progress_bar(timesteps)):
# expand the latents if we are doing classifier free guidance
latent_model_input = (
torch.cat([latents] * (2 + enabled_editing_prompts)) if do_classifier_free_guidance else latents
)
latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)
# predict the noise residual
noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=text_embeddings).sample
# perform guidance
if do_classifier_free_guidance:
noise_pred_out = noise_pred.chunk(2 + enabled_editing_prompts) # [b,4, 64, 64]
noise_pred_uncond, noise_pred_text = noise_pred_out[0], noise_pred_out[1]
noise_pred_edit_concepts = noise_pred_out[2:]
# default text guidance
noise_guidance = guidance_scale * (noise_pred_text - noise_pred_uncond)
# noise_guidance = (noise_pred_text - noise_pred_edit_concepts[0])
if self.uncond_estimates is None:
self.uncond_estimates = torch.zeros((num_inference_steps + 1, *noise_pred_uncond.shape))
self.uncond_estimates[i] = noise_pred_uncond.detach().cpu()
if self.text_estimates is None:
self.text_estimates = torch.zeros((num_inference_steps + 1, *noise_pred_text.shape))
self.text_estimates[i] = noise_pred_text.detach().cpu()
if self.edit_estimates is None and enable_edit_guidance:
self.edit_estimates = torch.zeros(
(num_inference_steps + 1, len(noise_pred_edit_concepts), *noise_pred_edit_concepts[0].shape)
)
if self.sem_guidance is None:
self.sem_guidance = torch.zeros((num_inference_steps + 1, *noise_pred_text.shape))
if edit_momentum is None:
edit_momentum = torch.zeros_like(noise_guidance)
if enable_edit_guidance:
concept_weights = torch.zeros(
(len(noise_pred_edit_concepts), noise_guidance.shape[0]),
device=self.device,
dtype=noise_guidance.dtype,
)
noise_guidance_edit = torch.zeros(
(len(noise_pred_edit_concepts), *noise_guidance.shape),
device=self.device,
dtype=noise_guidance.dtype,
)
# noise_guidance_edit = torch.zeros_like(noise_guidance)
warmup_inds = []
for c, noise_pred_edit_concept in enumerate(noise_pred_edit_concepts):
self.edit_estimates[i, c] = noise_pred_edit_concept
if isinstance(edit_guidance_scale, list):
edit_guidance_scale_c = edit_guidance_scale[c]
else:
edit_guidance_scale_c = edit_guidance_scale
if isinstance(edit_threshold, list):
edit_threshold_c = edit_threshold[c]
else:
edit_threshold_c = edit_threshold
if isinstance(reverse_editing_direction, list):
reverse_editing_direction_c = reverse_editing_direction[c]
else:
reverse_editing_direction_c = reverse_editing_direction
if edit_weights:
edit_weight_c = edit_weights[c]
else:
edit_weight_c = 1.0
if isinstance(edit_warmup_steps, list):
edit_warmup_steps_c = edit_warmup_steps[c]
else:
edit_warmup_steps_c = edit_warmup_steps
if isinstance(edit_cooldown_steps, list):
edit_cooldown_steps_c = edit_cooldown_steps[c]
elif edit_cooldown_steps is None:
edit_cooldown_steps_c = i + 1
else:
edit_cooldown_steps_c = edit_cooldown_steps
if i >= edit_warmup_steps_c:
warmup_inds.append(c)
if i >= edit_cooldown_steps_c:
noise_guidance_edit[c, :, :, :, :] = torch.zeros_like(noise_pred_edit_concept)
continue
noise_guidance_edit_tmp = noise_pred_edit_concept - noise_pred_uncond
# tmp_weights = (noise_pred_text - noise_pred_edit_concept).sum(dim=(1, 2, 3))
tmp_weights = (noise_guidance - noise_pred_edit_concept).sum(dim=(1, 2, 3))
tmp_weights = torch.full_like(tmp_weights, edit_weight_c) # * (1 / enabled_editing_prompts)
if reverse_editing_direction_c:
noise_guidance_edit_tmp = noise_guidance_edit_tmp * -1
concept_weights[c, :] = tmp_weights
noise_guidance_edit_tmp = noise_guidance_edit_tmp * edit_guidance_scale_c
# torch.quantile function expects float32
if noise_guidance_edit_tmp.dtype == torch.float32:
tmp = torch.quantile(
torch.abs(noise_guidance_edit_tmp).flatten(start_dim=2),
edit_threshold_c,
dim=2,
keepdim=False,
)
else:
tmp = torch.quantile(
torch.abs(noise_guidance_edit_tmp).flatten(start_dim=2).to(torch.float32),
edit_threshold_c,
dim=2,
keepdim=False,
).to(noise_guidance_edit_tmp.dtype)
noise_guidance_edit_tmp = torch.where(
torch.abs(noise_guidance_edit_tmp) >= tmp[:, :, None, None],
noise_guidance_edit_tmp,
torch.zeros_like(noise_guidance_edit_tmp),
)
noise_guidance_edit[c, :, :, :, :] = noise_guidance_edit_tmp
# noise_guidance_edit = noise_guidance_edit + noise_guidance_edit_tmp
warmup_inds = torch.tensor(warmup_inds).to(self.device)
if len(noise_pred_edit_concepts) > warmup_inds.shape[0] > 0:
concept_weights = concept_weights.to("cpu") # Offload to cpu
noise_guidance_edit = noise_guidance_edit.to("cpu")
concept_weights_tmp = torch.index_select(concept_weights.to(self.device), 0, warmup_inds)
concept_weights_tmp = torch.where(
concept_weights_tmp < 0, torch.zeros_like(concept_weights_tmp), concept_weights_tmp
)
concept_weights_tmp = concept_weights_tmp / concept_weights_tmp.sum(dim=0)
# concept_weights_tmp = torch.nan_to_num(concept_weights_tmp)
noise_guidance_edit_tmp = torch.index_select(
noise_guidance_edit.to(self.device), 0, warmup_inds
)
noise_guidance_edit_tmp = torch.einsum(
"cb,cbijk->bijk", concept_weights_tmp, noise_guidance_edit_tmp
)
noise_guidance_edit_tmp = noise_guidance_edit_tmp
noise_guidance = noise_guidance + noise_guidance_edit_tmp
self.sem_guidance[i] = noise_guidance_edit_tmp.detach().cpu()
del noise_guidance_edit_tmp
del concept_weights_tmp
concept_weights = concept_weights.to(self.device)
noise_guidance_edit = noise_guidance_edit.to(self.device)
concept_weights = torch.where(
concept_weights < 0, torch.zeros_like(concept_weights), concept_weights
)
concept_weights = torch.nan_to_num(concept_weights)
noise_guidance_edit = torch.einsum("cb,cbijk->bijk", concept_weights, noise_guidance_edit)
noise_guidance_edit = noise_guidance_edit + edit_momentum_scale * edit_momentum
edit_momentum = edit_mom_beta * edit_momentum + (1 - edit_mom_beta) * noise_guidance_edit
if warmup_inds.shape[0] == len(noise_pred_edit_concepts):
noise_guidance = noise_guidance + noise_guidance_edit
self.sem_guidance[i] = noise_guidance_edit.detach().cpu()
if sem_guidance is not None:
edit_guidance = sem_guidance[i].to(self.device)
noise_guidance = noise_guidance + edit_guidance
noise_pred = noise_pred_uncond + noise_guidance
# compute the previous noisy sample x_t -> x_t-1
latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample
# call the callback, if provided
if callback is not None and i % callback_steps == 0:
callback(i, t, latents)
# 8. Post-processing
image = self.decode_latents(latents)
if self.safety_checker is not None:
safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors="pt").to(
self.device
)
image, has_nsfw_concept = self.safety_checker(
images=image, clip_input=safety_checker_input.pixel_values.to(text_embeddings.dtype)
)
else:
has_nsfw_concept = None
if output_type == "pil":
image = self.numpy_to_pil(image)
if not return_dict:
return (image, has_nsfw_concept)
return SemanticStableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)
|
PypiClean
|
/yuuno-core-1.3.1.zip/yuuno-core-1.3.1/yuuno/yuuno.py
|
# -*- coding: utf-8 -*-
# Yuuno - IPython + VapourSynth
# Copyright (C) 2017 StuxCrystal (Roland Netzsch <[email protected]>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Union, Sequence, Type, TypeVar, Optional
from traitlets.utils.importstring import import_item
from traitlets import Instance, List
from traitlets import default
from yuuno.core.environment import Environment
from yuuno.core.extension import Extension
from yuuno.core.namespace import Namespace
from yuuno.core.settings import Settings
from yuuno.output import YuunoImageOutput
T = TypeVar("T")
class Yuuno(Settings):
"""
Main-instance of Yuuno.
"""
environment: Environment = Instance(Environment)
extensions: Sequence[Extension] = List(Instance(Extension))
output: YuunoImageOutput = Instance(YuunoImageOutput)
namespace: Namespace = Instance(Namespace)
@default("output")
def _default_output(self):
return YuunoImageOutput(yuuno=self)
@default("namespace")
def _default_namespace(self):
return Namespace()
def _actual_extensions(self):
return self.extension_types + self.environment.additional_extensions()
def _load_extensions(self) -> Sequence[Extension]:
exts = []
for extension in self._actual_extensions():
if callable(extension):
ext_cls = extension
else:
ext_cls = import_item(extension)
if not ext_cls.is_supported():
self.log.info(f"Yuuno-Extension {ext_cls.get_name()} reported that it is not supported on this system.")
continue
else:
self.log.debug(f"Yuuno-Extension {ext_cls.get_name()} loaded.")
exts.append(ext_cls(parent=self))
return exts
def _initialize_extensions(self) -> None:
self.extensions = self._load_extensions()
self.environment.post_extension_load()
failed_extensions = []
for extension in self.extensions:
try:
extension.initialize()
except Exception as e:
failed_extensions.append(extension)
import traceback
traceback.print_exception(type(e), e, e.__traceback__)
else:
self.log.debug(f"Yuuno-Extension {extension.get_name()} initialized.")
for extension in failed_extensions:
self.extensions.remove(extension)
def _deinitialize_extensions(self) -> None:
for extension in reversed(self.extensions):
extension.deinitialize()
def get_extension(self, cls: Union[Type[T], str]) -> Optional[T]:
"""
Returns the loaded extension given by type.
:param cls: The class of the object.
:return: The given extension or None
"""
for extension in self.extensions:
if isinstance(cls, str):
if extension.get_name() == cls:
return extension
continue
if isinstance(extension, cls):
return extension
return None
def start(self) -> None:
"""
Initializes essential stuff about Yuuno.
"""
self._initialize_extensions()
self.environment.initialize()
def stop(self) -> None:
"""
Clears up stuff about yuuno.
"""
self.environment.deinitialize()
self._deinitialize_extensions()
self.clear_instance()
def wrap(self, obj: object) -> object:
"""
Create the clip-wrapper for a specific object.
"""
return self.registry.wrap(obj)
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/users/item/people/item/person_item_request_builder.py
|
from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from .....models import person
from .....models.o_data_errors import o_data_error
class PersonItemRequestBuilder():
"""
Provides operations to manage the people property of the microsoft.graph.user entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new PersonItemRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/users/{user%2Did}/people/{person%2Did}{?%24select}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
async def get(self,request_configuration: Optional[PersonItemRequestBuilderGetRequestConfiguration] = None) -> Optional[person.Person]:
"""
Read-only. The most relevant people to the user. The collection is ordered by their relevance to the user, which is determined by the user's communication, collaboration and business relationships. A person is an aggregation of information from across mail, contacts and social networks.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[person.Person]
"""
request_info = self.to_get_request_information(
request_configuration
)
from .....models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from .....models import person
return await self.request_adapter.send_async(request_info, person.Person, error_mapping)
def to_get_request_information(self,request_configuration: Optional[PersonItemRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
Read-only. The most relevant people to the user. The collection is ordered by their relevance to the user, which is determined by the user's communication, collaboration and business relationships. A person is an aggregation of information from across mail, contacts and social networks.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
@dataclass
class PersonItemRequestBuilderGetQueryParameters():
"""
Read-only. The most relevant people to the user. The collection is ordered by their relevance to the user, which is determined by the user's communication, collaboration and business relationships. A person is an aggregation of information from across mail, contacts and social networks.
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "select":
return "%24select"
return original_name
# Select properties to be returned
select: Optional[List[str]] = None
@dataclass
class PersonItemRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[PersonItemRequestBuilder.PersonItemRequestBuilderGetQueryParameters] = None
|
PypiClean
|
/dials-0.0.1.tar.gz/dials-0.0.1/algorithms/integration/image_integrator.py
|
from __future__ import absolute_import, division, print_function
import logging
logger = logging.getLogger(__name__)
class TimingInfo(object):
"""
A class to contain timing info.
"""
def __init__(self):
self.read = 0
self.initialize = 0
self.process = 0
self.finalize = 0
self.total = 0
self.user = 0
def __str__(self):
""" Convert to string. """
from libtbx.table_utils import format as table
rows = [
["Read time", "%.2f seconds" % (self.read)],
["Pre-process time", "%.2f seconds" % (self.initialize)],
["Process time", "%.2f seconds" % (self.process)],
["Post-process time", "%.2f seconds" % (self.finalize)],
["Total time", "%.2f seconds" % (self.total)],
["User time", "%.2f seconds" % (self.user)],
]
return table(rows, justify="right", prefix=" ")
class ProcessorImageBase(object):
""" Processor interface class. """
def __init__(self, manager):
"""
Initialise the processor.
The processor requires a manager class implementing the Manager interface.
This class executes all the workers in separate threads and accumulates the
results to expose to the user.
:param manager: The processing manager
:param params: The phil parameters
"""
self.manager = manager
@property
def executor(self):
"""
Get the executor
:return: The executor
"""
return self.manager.executor
@executor.setter
def executor(self, function):
"""
Set the executor
:param function: The executor
"""
self.manager.executor = function
def process(self):
"""
Do all the processing tasks.
:return: The processing results
"""
from time import time
from dials.util.mp import multi_node_parallel_map
import platform
start_time = time()
self.manager.initialize()
mp_method = self.manager.params.integration.mp.method
mp_nproc = min(len(self.manager), self.manager.params.integration.mp.nproc)
if (
mp_nproc > 1 and platform.system() == "Windows"
): # platform.system() forks which is bad for MPI, so don't use it unless nproc > 1
logger.warning(
"\n"
+ "*" * 80
+ "\n"
+ "Multiprocessing is not available on windows. Setting nproc = 1\n"
+ "*" * 80
+ "\n"
)
mp_nproc = 1
assert mp_nproc > 0, "Invalid number of processors"
logger.info(self.manager.summary())
logger.info(" Using %s with %d parallel job(s)\n" % (mp_method, mp_nproc))
if mp_nproc > 1:
def process_output(result):
for message in result[1]:
logger.log(message.levelno, message.msg)
self.manager.accumulate(result[0])
result[0].reflections = None
result[0].data = None
def execute_task(task):
from dials.util import log
log.config_simple_cached()
result = task()
handlers = logging.getLogger("dials").handlers
assert len(handlers) == 1, "Invalid number of logging handlers"
return result, handlers[0].messages()
multi_node_parallel_map(
func=execute_task,
iterable=list(self.manager.tasks()),
njobs=mp_njobs,
nproc=mp_nproc,
callback=process_output,
method=mp_method,
preserve_order=True,
preserve_exception_message=True,
)
else:
for task in self.manager.tasks():
self.manager.accumulate(task())
self.manager.finalize()
end_time = time()
self.manager.time.user_time = end_time - start_time
result = self.manager.result()
return result, self.manager.time
class Result(object):
"""
A class representing a processing result.
"""
def __init__(self, index, reflections):
"""
Initialise the data.
:param index: The processing job index
:param reflections: The processed reflections
:param data: Other processed data
"""
self.index = index
self.reflections = reflections
class Dataset(object):
def __init__(self, frames, size):
from dials.array_family import flex
self.frames = frames
nframes = frames[1] - frames[0]
self.data = []
self.mask = []
for sz in size:
self.data.append(flex.double(flex.grid(nframes, sz[0], sz[1])))
self.mask.append(flex.bool(flex.grid(nframes, sz[0], sz[1])))
def set_image(self, index, data, mask):
from dials.array_family import flex
for d1, d2 in zip(self.data, data):
h, w = d2.all()
d2.reshape(flex.grid(1, h, w))
d1[index : index + 1, :, :] = d2.as_double()
for m1, m2 in zip(self.mask, mask):
h, w = m2.all()
m2.reshape(flex.grid(1, h, w))
m1[index : index + 1, :, :] = m2
class Task(object):
"""
A class to perform a null task.
"""
def __init__(self, index, frames, reflections, experiments, params, executor):
"""
Initialise the task
:param index: The index of the processing job
:param frames: The frames to process
:param experiments: The list of experiments
:param reflections: The list of reflections
:param params The processing parameters
:param executor: The executor class
"""
self.index = index
self.frames = frames
self.experiments = experiments
self.reflections = reflections
self.params = params
self.executor = executor
def __call__(self):
"""
Do the processing.
:return: The processed data
"""
from dials.model.data import make_image
from dials.model.data import MultiPanelImageVolume
from dials.model.data import ImageVolume
from dials.algorithms.integration.processor import job
from time import time
# Set the job index
job.index = self.index
# Get the start time
start_time = time()
# Check all reflections have same imageset and get it
exp_id = list(set(self.reflections["id"]))
imageset = self.experiments[exp_id[0]].imageset
for i in exp_id[1:]:
assert (
self.experiments[i].imageset == imageset
), "Task can only handle 1 imageset"
# Get the sub imageset
frame00, frame01 = self.frames
try:
frame10, frame11 = imageset.get_array_range()
except Exception:
frame10, frame11 = (0, len(imageset))
try:
assert frame00 < frame01
assert frame10 < frame11
assert frame00 >= frame10
assert frame01 <= frame11
index0 = frame00 - frame10
index1 = index0 + (frame01 - frame00)
assert index0 < index1
assert index0 >= 0
assert index1 <= len(imageset)
imageset = imageset[index0:index1]
except Exception:
raise RuntimeError("Programmer Error: bad array range")
try:
frame0, frame1 = imageset.get_array_range()
except Exception:
frame0, frame1 = (0, len(imageset))
# Initialise the dataset
image_volume = MultiPanelImageVolume()
for panel in self.experiments[0].detector:
image_volume.add(
ImageVolume(
frame0, frame1, panel.get_image_size()[1], panel.get_image_size()[0]
)
)
# Read all the images into a block of data
read_time = 0.0
for i in range(len(imageset)):
st = time()
image = imageset.get_corrected_data(i)
mask = imageset.get_mask(i)
if self.params.integration.lookup.mask is not None:
assert len(mask) == len(self.params.lookup.mask), (
"Mask/Image are incorrect size %d %d"
% (len(mask), len(self.params.integration.lookup.mask))
)
mask = tuple(
m1 & m2 for m1, m2 in zip(self.params.integration.lookup.mask, mask)
)
image_volume.set_image(frame0 + i, make_image(image, mask))
read_time += time() - st
del image
del mask
# Process the data
st = time()
data = self.executor.process(image_volume, self.experiments, self.reflections)
process_time = time() - st
# Set the result values
result = Result(self.index, self.reflections)
result.read_time = read_time
result.process_time = process_time
result.total_time = time() - start_time
result.data = data
return result
class ManagerImage(object):
"""
A class to manage processing book-keeping
"""
def __init__(self, experiments, reflections, params):
"""
Initialise the manager.
:param experiments: The list of experiments
:param reflections: The list of reflections
:param params: The phil parameters
"""
# Initialise the callbacks
self.executor = None
# Save some data
self.experiments = experiments
self.reflections = reflections
# Save some parameters
self.params = params
# Set the finalized flag to False
self.finalized = False
# Initialise the timing information
self.time = TimingInfo()
def initialize(self):
"""
Initialise the processing
"""
from dials_algorithms_integration_integrator_ext import (
ReflectionManagerPerImage,
)
from time import time
# Get the start time
start_time = time()
# Ensure the reflections contain bounding boxes
assert "bbox" in self.reflections, "Reflections have no bbox"
# Split the reflections into partials
self._split_reflections()
# Create the reflection manager
frames = self.experiments[0].scan.get_array_range()
self.manager = ReflectionManagerPerImage(frames, self.reflections)
# Parallel reading of HDF5 from the same handle is not allowed. Python
# multiprocessing is a bit messed up and used fork on linux so need to
# close and reopen file.
for exp in self.experiments:
if exp.imageset.reader().is_single_file_reader():
exp.imageset.reader().nullify_format_instance()
# Set the initialization time
self.time.initialize = time() - start_time
def task(self, index):
"""
Get a task.
"""
return Task(
index=index,
frames=self.manager.frames(index),
reflections=self.manager.split(index),
experiments=self.experiments,
params=self.params,
executor=self.executor,
)
def tasks(self):
"""
Iterate through the tasks.
"""
for i in range(len(self)):
yield self.task(i)
def accumulate(self, result):
"""
Accumulate the results.
"""
self.manager.accumulate(result.index, result.reflections)
if result.data is not None:
self.executor.accumulate(result.index, result.data)
self.time.read += result.read_time
self.time.process += result.process_time
self.time.total += result.total_time
def finalize(self):
"""
Finalize the processing and finish.
"""
from time import time
# Get the start time
start_time = time()
# Check manager is finished
assert self.manager.finished(), "Manager is not finished"
# Update the time and finalized flag
self.time.finalize = time() - start_time
self.finalized = True
def result(self):
"""
Return the result.
:return: The result
"""
assert self.finalized, "Manager is not finalized"
return self.reflections
def finished(self):
"""
Return if all tasks have finished.
:return: True/False all tasks have finished
"""
return self.finalized and self.manager.finished()
def __len__(self):
"""
Return the number of tasks.
:return: the number of tasks
"""
return len(self.manager)
def summary(self):
return ""
def _split_reflections(self):
"""
Split the reflections into partials or over job boundaries
"""
# Optionally split the reflection table into partials, otherwise,
# split over job boundaries
num_full = len(self.reflections)
self.reflections.split_partials()
num_partial = len(self.reflections)
assert num_partial >= num_full, "Invalid number of partials"
if num_partial > num_full:
logger.info(
" Split %d reflections into %d partial reflections\n"
% (num_full, num_partial)
)
class ProcessorImage(ProcessorImageBase):
""" Top level processor for per image processing. """
def __init__(self, experiments, reflections, params):
""" Initialise the manager and the processor. """
# Create the processing manager
manager = ManagerImage(experiments, reflections, params)
# Initialise the processor
super(ProcessorImage, self).__init__(manager)
class InitializerRot(object):
"""
A pre-processing class for oscillation data.
"""
def __init__(self, experiments, params):
"""
Initialise the pre-processor.
"""
self.experiments = experiments
self.params = params
def __call__(self, reflections):
"""
Do some pre-processing.
"""
from dials.array_family import flex
# Compute some reflection properties
reflections.compute_zeta_multi(self.experiments)
reflections.compute_d(self.experiments)
reflections.compute_bbox(self.experiments)
# Filter the reflections by zeta
mask = flex.abs(reflections["zeta"]) < self.params.filter.min_zeta
reflections.set_flags(mask, reflections.flags.dont_integrate)
# Filter the reflections by powder ring
if self.params.filter.powder_filter is not None:
mask = self.params.filter.powder_filter(reflections["d"])
reflections.set_flags(mask, reflections.flags.in_powder_ring)
class FinalizerRot(object):
"""
A post-processing class for oscillation data.
"""
def __init__(self, experiments, params):
"""
Initialise the post processor.
"""
self.experiments = experiments
self.params = params
def __call__(self, reflections):
"""
Do some post processing.
"""
# Compute the corrections
reflections.compute_corrections(self.experiments)
class ImageIntegratorExecutor(object):
def __init__(self):
pass
def process(self, image_volume, experiments, reflections):
from dials.algorithms.integration.processor import job
# Compute the partiality
reflections.compute_partiality(experiments)
# Get some info
full_value = 0.997
fully_recorded = reflections["partiality"] > full_value
npart = fully_recorded.count(False)
nfull = fully_recorded.count(True)
nice = reflections.get_flags(reflections.flags.in_powder_ring).count(True)
nint = reflections.get_flags(reflections.flags.dont_integrate).count(False)
ntot = len(reflections)
# Write some output
logger.info("")
logger.info(" Beginning integration job %d" % job.index)
logger.info("")
logger.info(
" Frames: %d -> %d" % (image_volume.frame0(), image_volume.frame1())
)
logger.info("")
logger.info(" Number of reflections")
logger.info(" Partial: %d" % npart)
logger.info(" Full: %d" % nfull)
logger.info(" In ice ring: %d" % nice)
logger.info(" Integrate: %d" % nint)
logger.info(" Total: %d" % ntot)
logger.info("")
# Print a histogram of reflections on frames
if image_volume.frame1() - image_volume.frame0() > 1:
logger.info(
" The following histogram shows the number of reflections predicted"
)
logger.info(" to have all or part of their intensity on each frame.")
logger.info("")
logger.info(frame_hist(reflections["bbox"], prefix=" ", symbol="*"))
logger.info("")
# Compute the shoebox mask
reflections.compute_mask(experiments=experiments, image_volume=image_volume)
# Compute the background
reflections.compute_background(
experiments=experiments, image_volume=image_volume
)
# Compute the summed intensity
reflections.compute_summed_intensity(image_volume=image_volume)
# Compute the centroid
reflections.compute_centroid(experiments=experiments, image_volume=image_volume)
# Get some reflection info
image_volume.update_reflection_info(reflections)
# Print some info
fmt = " Integrated % 5d (sum) + % 5d (prf) / % 5d reflections"
nsum = reflections.get_flags(reflections.flags.integrated_sum).count(True)
nprf = reflections.get_flags(reflections.flags.integrated_prf).count(True)
ntot = len(reflections)
logger.info(fmt % (nsum, nprf, ntot))
class ImageIntegrator(object):
"""
A class that does integration directly on the image skipping the shoebox
creation step.
"""
def __init__(self, experiments, reflections, params):
"""
Initialize the integrator
:param experiments: The experiment list
:param reflections: The reflections to process
:param params: The parameters to use
"""
# Check all reflections have same imageset and get it
imageset = experiments[0].imageset
for expr in experiments:
assert expr.imageset == imageset, "All experiments must share and imageset"
# Save some stuff
self.experiments = experiments
self.reflections = reflections
self.params = Parameters.from_phil(params.integration)
self.profile_model_report = None
self.integration_report = None
def integrate(self):
"""
Integrate the data
"""
from dials.algorithms.integration.report import IntegrationReport
from dials.util.command_line import heading
# Init the report
self.profile_model_report = None
self.integration_report = None
# Heading
logger.info("=" * 80)
logger.info("")
logger.info(heading("Processing reflections"))
logger.info("")
# Create summary format
fmt = (
" Processing the following experiments:\n"
"\n"
" Experiments: %d\n"
" Beams: %d\n"
" Detectors: %d\n"
" Goniometers: %d\n"
" Scans: %d\n"
" Crystals: %d\n"
" Imagesets: %d\n"
)
# Print the summary
logger.info(
fmt
% (
len(self.experiments),
len(self.experiments.beams()),
len(self.experiments.detectors()),
len(self.experiments.goniometers()),
len(self.experiments.scans()),
len(self.experiments.crystals()),
len(self.experiments.imagesets()),
)
)
# Print a heading
logger.info("=" * 80)
logger.info("")
logger.info(heading("Integrating reflections"))
logger.info("")
# Initialise the processing
initialize = InitializerRot(self.experiments, self.params)
initialize(self.reflections)
# Construvt the image integrator processor
processor = ProcessorImage(self.experiments, self.reflections, self.params)
processor.executor = ImageIntegratorExecutor()
# Do the processing
self.reflections, time_info = processor.process()
# Finalise the processing
finalize = FinalizerRot(self.experiments, self.params)
finalize(self.reflections)
# Create the integration report
self.integration_report = IntegrationReport(self.experiments, self.reflections)
logger.info("")
logger.info(self.integration_report.as_str(prefix=" "))
# Print the time info
logger.info(str(time_info))
logger.info("")
# Return the reflections
return self.reflections
|
PypiClean
|
/chatnoir_pyterrier-2.0.8-py3-none-any.whl/chatnoir_pyterrier/retrieve.py
|
from dataclasses import dataclass, field
from functools import reduce
from itertools import islice
from typing import Set, Optional, Iterable, Union, Any, Dict
from chatnoir_api import Index, Result, Slop, MinimalResult, ExplainedResult, \
MinimalResultStaging, ResultStaging, ExplainedMinimalResult, \
ExplainedMinimalResultStaging, ExplainedResultStaging
from chatnoir_api.v1 import (
search, search_phrases
)
from chatnoir_api.v1.defaults import (
DEFAULT_INDEX, DEFAULT_SLOP, DEFAULT_RETRIES, DEFAULT_BACKOFF_SECONDS
)
from pandas import DataFrame
from pandas.core.groupby import DataFrameGroupBy
from pyterrier.batchretrieve import BatchRetrieveBase
from pyterrier.model import add_ranks
from tqdm import tqdm
from chatnoir_pyterrier.feature import Feature
@dataclass
class ChatNoirRetrieve(BatchRetrieveBase):
name = "ChatNoirRetrieve"
api_key: str
index: Union[Index, Set[Index]] = field(
default_factory=lambda: DEFAULT_INDEX,
)
phrases: bool = False
slop: Slop = DEFAULT_SLOP
features: Union[Feature, Set[Feature]] = Feature.NONE
filter_unknown: bool = False
num_results: Optional[int] = 10
staging: bool = False
page_size: int = 100
retries: int = DEFAULT_RETRIES
backoff_seconds: float = DEFAULT_BACKOFF_SECONDS
verbose: bool = False
def __post_init__(self):
super().__init__(verbose=self.verbose)
def _merge_result(
self,
row: Dict[str, Any],
result: Union[
MinimalResult, ExplainedMinimalResult,
Result, ExplainedResult,
MinimalResultStaging, ExplainedMinimalResultStaging,
ResultStaging, ExplainedResultStaging,
]
) -> Dict[str, Any]:
row = {
**row,
"docno": result.trec_id,
"score": result.score,
}
if Feature.UUID in self.features:
row["uuid"] = result.uuid
if Feature.TREC_ID in self.features:
row["trec_id"] = result.trec_id
if Feature.WARC_ID in self.features:
row["warc_id"] = result.warc_id
if Feature.INDEX in self.features:
row["index"] = result.index.value
if Feature.CRAWL_DATE in self.features:
row["crawl_date"] = result.crawl_date
if Feature.TARGET_HOSTNAME in self.features:
row["target_hostname"] = result.target_hostname
if Feature.TARGET_URI in self.features:
row["target_uri"] = result.target_uri
if Feature.CACHE_URI in self.features:
row["cache_uri"] = result.cache_uri
if Feature.PAGE_RANK in self.features:
row["page_rank"] = result.page_rank
if Feature.SPAM_RANK in self.features:
row["spam_rank"] = result.spam_rank
if Feature.TITLE_HIGHLIGHTED in self.features:
row["title_highlighted"] = result.title.html
if Feature.TITLE_TEXT in self.features:
row["title_text"] = result.title.text
if Feature.SNIPPET_HIGHLIGHTED in self.features:
row["snippet_highlighted"] = result.snippet.html
if Feature.SNIPPET_TEXT in self.features:
row["snippet_text"] = result.snippet.text
if Feature.EXPLANATION in self.features:
row["explanation"] = result.explanation
if Feature.CONTENT in self.features:
row["html"] = result.cache_contents(plain=False)
if Feature.CONTENT_PLAIN in self.features:
row["html_plain"] = result.cache_contents(plain=True)
if Feature.CONTENT_TYPE in self.features:
row["html_plain"] = result.content_type
if Feature.LANGUAGE in self.features:
row["language"] = result.language
return row
def _transform_query(self, topic: DataFrame) -> DataFrame:
if len(topic.index) != 1:
raise RuntimeError("Can only transform one query at a time.")
row: Dict[str, Any] = topic.to_dict(orient="records")[0]
query: str = row["query"]
page_size: int
if self.num_results is not None:
page_size = min(self.page_size, self.num_results)
else:
page_size = self.page_size
features: Feature
if isinstance(self.features, Set):
features = reduce(
lambda feature_a, feature_b: feature_a | feature_b,
self.features
)
else:
features = self.features
explain: bool = Feature.EXPLANATION in features
results: Iterable[Union[
MinimalResult, ExplainedMinimalResult,
Result, ExplainedResult,
MinimalResultStaging, ExplainedMinimalResultStaging,
ResultStaging, ExplainedResultStaging,
]]
if not self.phrases:
results = search(
api_key=self.api_key,
query=query,
index=self.index,
minimal=False,
explain=explain,
staging=self.staging,
page_size=page_size,
retries=self.retries,
backoff_seconds=self.backoff_seconds,
).results
else:
results = search_phrases(
api_key=self.api_key,
query=query,
index=self.index,
minimal=False,
slop=self.slop,
explain=explain,
staging=self.staging,
page_size=page_size,
retries=self.retries,
backoff_seconds=self.backoff_seconds,
).results
if self.filter_unknown:
# Filter unknown results, i.e., when the TREC ID is missing.
results = (
result
for result in results
if result.trec_id is not None
)
pass
if self.num_results is not None:
results = islice(results, self.num_results)
return DataFrame([
self._merge_result(row, result)
for result in results
])
def transform(self, topics: DataFrame) -> DataFrame:
if not isinstance(topics, DataFrame):
raise RuntimeError("Can only transform dataframes.")
if not {'qid', 'query'}.issubset(topics.columns):
raise RuntimeError("Needs qid and query columns.")
if len(topics) == 0:
return self._transform_query(topics)
topics_by_query: DataFrameGroupBy = topics.groupby(
by=["qid"],
as_index=False,
sort=False,
)
retrieved: DataFrame
if self.verbose:
# Show progress during reranking queries.
tqdm.pandas(
desc="Searching with ChatNoir",
unit="query",
)
retrieved = topics_by_query.progress_apply(
self._transform_query
)
else:
retrieved = topics_by_query.apply(self._transform_query)
retrieved = retrieved.reset_index(drop=True)\
.sort_values(by=["score"], ascending=False)
retrieved = add_ranks(retrieved)
return retrieved
def __hash__(self):
return hash((
self.api_key,
(
tuple(sorted(self.index, key=lambda index: index.name))
if isinstance(self.index, Set)
else self.index
),
self.phrases,
self.slop,
(
list(sorted(self.features))
if isinstance(self.features, Set)
else self.features
),
self.filter_unknown,
self.num_results,
self.page_size,
self.retries,
self.backoff_seconds,
self.verbose,
))
|
PypiClean
|
/TOPSIS-Karanbir-101803235-0.1.tar.gz/TOPSIS-Karanbir-101803235-0.1/topsis_analysis/topsispackage.py
|
def topsis(inputname,w,im,outputname):
import pandas as pd
import sys
import os
def is_numeric1(t):
try:
t=float(t)
if (isinstance(t, int)==True or isinstance(t, float)==True):
return True
except:
print("Not a numeric value in columns 2nd and above!")
sys.exit(0)
return False
if os.path.exists(inputname) == False: ##checking of the file exists
print("No such file exists")
sys.exit(0)
a=[inputname,outputname]
for val1 in a: ##correct file type should be passed
nametemp=val1.split('.')
if nametemp[1]!="csv":
print("Only .csv files allowed")
sys.exit(0)
df1=pd.read_csv(inputname)
df=pd.read_csv(inputname) ##reading the data frame
if df.shape[1]<=3: ## every csv should have more than 3 columns
print(" No. columns should be greater than 3! ")
sys.exit(0)
for i in range(1,df.shape[1]): ##checking if all the columns fron 2nd have numeric vallues
for j in range(df.shape[0]):
if(is_numeric1(df.iloc[j,i]))==False:
print(df.iloc[j,i])
print("All the values in 2nd column and further should be numeric")
sys.exit(0)
impact1=im
totalweight=0.00
weight1=w
impacts=impact1.split(',') ##if they will be separated by commas then length will be equal to number of columns
weights=weight1.split(',') ##if they will be separated by commas then length will be equal to number of columns
for i in range(len(weights)):
totalweight=totalweight+float(weights[i])
if df.shape[1]-1 != len(impacts) or df.shape[1]-1 != len(weights )or len(impacts)!= len(weights):
print("Either the impacts or weights are not equal to number of columns(starting from 2nd) or the impacts or weights are not separated by commas!")
sys.exit(0)
for i in impacts: ##Impacts must be either +ve or -ve.
if i not in ["+","-"]:
print("Impacts should be either + or -!")
sys.exit(0)
##vector normalization
xsquares=[0]*(df.shape[1])
for i in range(1,df.shape[1]):
for j in range(df.shape[0]):
xsquares[i]=xsquares[i]+(df.iloc[j,i])*(df.iloc[j,i])
for i in range(1,df.shape[1]):
xsquares[i]=(xsquares[i])**0.5
for i in range(1,df.shape[1]):
for j in range(df.shape[0]):
df.iloc[j,i]=(df.iloc[j,i])/xsquares[i]
##weight assignment
for i in range(1,df.shape[1]):
for j in range(df.shape[0]):
df.iloc[j,i]=(df.iloc[j,i])*(float(weights[i-1]))/totalweight
#finding ideal best and ideal best
##vjplus is ideal best and vjminus is ideal worst
vjplus=[0]*(df.shape[1])
vjminus=[0]*(df.shape[1])
for i in range(1,df.shape[1]):
if impacts[i-1]=="+":
vjplus[i]=max(df.iloc[:,i])
vjminus[i]=min(df.iloc[:,i])
elif impacts[i-1]=="-":
vjplus[i]=min(df.iloc[:,i])
vjminus[i]=max(df.iloc[:,i])
##calculating euclidean distance and performace matrix
siplus=[0]*(df.shape[0])
siminus=[0]*(df.shape[0])
si=[0]*(df.shape[0])
pi=[0]*(df.shape[0])
for k in range(df.shape[0]):
for l in range(1,df.shape[1]):
siplus[k]=siplus[k]+(df.iloc[k,l]-vjplus[l])*(df.iloc[k,l]-vjplus[l])
siminus[k]=siminus[k]+(df.iloc[k,l]-vjminus[l])*(df.iloc[k,l]-vjminus[l])
for k in range(df.shape[0]):
siplus[k]=(siplus[k])**0.5
siminus[k]=(siminus[k])**0.5
si[k]=siplus[k]+siminus[k]
pi[k]=siminus[k]/si[k]
df=df1
##now adding the topsis score to dataframe
df["Topsis Score"]=pi
##now ranking according to topsis score
df["Rank"]=df["Topsis Score"].rank(ascending=False)
##making an output file
df.to_csv(outputname,index=False)
print("Output generated successfully")
print(df)
|
PypiClean
|
/py-az-cli-0.1.8.tar.gz/py-az-cli-0.1.8/pyaz/logicapp/__init__.py
|
from .. pyaz_utils import _call_az
def delete(name, resource_group, slot=None, yes=None):
'''
Delete a logic app.
Required Parameters:
- name -- name of the logic app.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
- yes -- Do not prompt for confirmation.
'''
return _call_az("az logicapp delete", locals())
def stop(name, resource_group, slot=None):
'''
Stop a logic app.
Required Parameters:
- name -- name of the logic app
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
'''
return _call_az("az logicapp stop", locals())
def start(name, resource_group, slot=None):
'''
Start a logic app.
Required Parameters:
- name -- name of the logic app
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
'''
return _call_az("az logicapp start", locals())
def restart(name, resource_group, slot=None):
'''
Restart a logic app.
Required Parameters:
- name -- name of the logic app
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
'''
return _call_az("az logicapp restart", locals())
def create(name, resource_group, storage_account, app_insights=None, app_insights_key=None, consumption_plan_location=None, deployment_container_image_name=None, deployment_local_git=None, deployment_source_branch=None, deployment_source_url=None, disable_app_insights=None, docker_registry_server_password=None, docker_registry_server_user=None, os_type=None, plan=None, tags=None):
'''
Create a logic app.
Required Parameters:
- name -- name of the new logic app
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- storage_account -- Provide a string value of a Storage Account in the provided Resource Group. Or Resource ID of a Storage Account in a different Resource Group
Optional Parameters:
- app_insights -- Name of the existing App Insights project to be added to the logic app. Must be in the same resource group.
- app_insights_key -- Instrumentation key of App Insights to be added.
- consumption_plan_location -- Geographic location where logic app will be hosted. Use `az logicapp list-consumption-locations` to view available locations.
- deployment_container_image_name -- Container image name from Docker Hub, e.g. publisher/image-name:tag
- deployment_local_git -- enable local git
- deployment_source_branch -- the branch to deploy
- deployment_source_url -- Git repository URL to link with manual integration
- disable_app_insights -- Disable creating application insights resource during logicapp create. No logs will be available.
- docker_registry_server_password -- The container registry server password. Required for private registries.
- docker_registry_server_user -- The container registry server username.
- os_type -- Set the OS type for the app to be created.
- plan -- name or resource id of the logicapp app service plan. Use 'appservice plan create' to get one. If using an App Service plan from a different resource group, the full resource id must be used and not the plan name.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az logicapp create", locals())
def list(resource_group=None):
'''
List logic apps.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az logicapp list", locals())
def show(name, resource_group, slot=None):
'''
Get the details of a logic app.
Required Parameters:
- name -- name of the logic app.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
'''
return _call_az("az logicapp show", locals())
|
PypiClean
|
/tensorflow_cpu_aws-2.14.0rc1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/tensorflow/python/keras/utils/generic_utils.py
|
"""Python utilities required by Keras."""
import binascii
import codecs
import importlib
import marshal
import os
import re
import sys
import threading
import time
import types as python_types
import warnings
import weakref
import numpy as np
from tensorflow.python.keras.utils import tf_contextlib
from tensorflow.python.keras.utils import tf_inspect
from tensorflow.python.util import nest
from tensorflow.python.util import tf_decorator
_GLOBAL_CUSTOM_OBJECTS = {}
_GLOBAL_CUSTOM_NAMES = {}
# Flag that determines whether to skip the NotImplementedError when calling
# get_config in custom models and layers. This is only enabled when saving to
# SavedModel, when the config isn't required.
_SKIP_FAILED_SERIALIZATION = False
# If a layer does not have a defined config, then the returned config will be a
# dictionary with the below key.
_LAYER_UNDEFINED_CONFIG_KEY = 'layer was saved without config'
class CustomObjectScope(object):
"""Exposes custom classes/functions to Keras deserialization internals.
Under a scope `with custom_object_scope(objects_dict)`, Keras methods such
as `tf.keras.models.load_model` or `tf.keras.models.model_from_config`
will be able to deserialize any custom object referenced by a
saved config (e.g. a custom layer or metric).
Example:
Consider a custom regularizer `my_regularizer`:
```python
layer = Dense(3, kernel_regularizer=my_regularizer)
config = layer.get_config() # Config contains a reference to `my_regularizer`
...
# Later:
with custom_object_scope({'my_regularizer': my_regularizer}):
layer = Dense.from_config(config)
```
Args:
*args: Dictionary or dictionaries of `{name: object}` pairs.
"""
def __init__(self, *args):
self.custom_objects = args
self.backup = None
def __enter__(self):
self.backup = _GLOBAL_CUSTOM_OBJECTS.copy()
for objects in self.custom_objects:
_GLOBAL_CUSTOM_OBJECTS.update(objects)
return self
def __exit__(self, *args, **kwargs):
_GLOBAL_CUSTOM_OBJECTS.clear()
_GLOBAL_CUSTOM_OBJECTS.update(self.backup)
def get_custom_objects():
"""Retrieves a live reference to the global dictionary of custom objects.
Updating and clearing custom objects using `custom_object_scope`
is preferred, but `get_custom_objects` can
be used to directly access the current collection of custom objects.
Example:
```python
get_custom_objects().clear()
get_custom_objects()['MyObject'] = MyObject
```
Returns:
Global dictionary of names to classes (`_GLOBAL_CUSTOM_OBJECTS`).
"""
return _GLOBAL_CUSTOM_OBJECTS
# Store a unique, per-object ID for shared objects.
#
# We store a unique ID for each object so that we may, at loading time,
# re-create the network properly. Without this ID, we would have no way of
# determining whether a config is a description of a new object that
# should be created or is merely a reference to an already-created object.
SHARED_OBJECT_KEY = 'shared_object_id'
SHARED_OBJECT_DISABLED = threading.local()
SHARED_OBJECT_LOADING = threading.local()
SHARED_OBJECT_SAVING = threading.local()
# Attributes on the threadlocal variable must be set per-thread, thus we
# cannot initialize these globally. Instead, we have accessor functions with
# default values.
def _shared_object_disabled():
"""Get whether shared object handling is disabled in a threadsafe manner."""
return getattr(SHARED_OBJECT_DISABLED, 'disabled', False)
def _shared_object_loading_scope():
"""Get the current shared object saving scope in a threadsafe manner."""
return getattr(SHARED_OBJECT_LOADING, 'scope', NoopLoadingScope())
def _shared_object_saving_scope():
"""Get the current shared object saving scope in a threadsafe manner."""
return getattr(SHARED_OBJECT_SAVING, 'scope', None)
class DisableSharedObjectScope(object):
"""A context manager for disabling handling of shared objects.
Disables shared object handling for both saving and loading.
Created primarily for use with `clone_model`, which does extra surgery that
is incompatible with shared objects.
"""
def __enter__(self):
SHARED_OBJECT_DISABLED.disabled = True
self._orig_loading_scope = _shared_object_loading_scope()
self._orig_saving_scope = _shared_object_saving_scope()
def __exit__(self, *args, **kwargs):
SHARED_OBJECT_DISABLED.disabled = False
SHARED_OBJECT_LOADING.scope = self._orig_loading_scope
SHARED_OBJECT_SAVING.scope = self._orig_saving_scope
class NoopLoadingScope(object):
"""The default shared object loading scope. It does nothing.
Created to simplify serialization code that doesn't care about shared objects
(e.g. when serializing a single object).
"""
def get(self, unused_object_id):
return None
def set(self, object_id, obj):
pass
class SharedObjectLoadingScope(object):
"""A context manager for keeping track of loaded objects.
During the deserialization process, we may come across objects that are
shared across multiple layers. In order to accurately restore the network
structure to its original state, `SharedObjectLoadingScope` allows us to
re-use shared objects rather than cloning them.
"""
def __enter__(self):
if _shared_object_disabled():
return NoopLoadingScope()
global SHARED_OBJECT_LOADING
SHARED_OBJECT_LOADING.scope = self
self._obj_ids_to_obj = {}
return self
def get(self, object_id):
"""Given a shared object ID, returns a previously instantiated object.
Args:
object_id: shared object ID to use when attempting to find already-loaded
object.
Returns:
The object, if we've seen this ID before. Else, `None`.
"""
# Explicitly check for `None` internally to make external calling code a
# bit cleaner.
if object_id is None:
return
return self._obj_ids_to_obj.get(object_id)
def set(self, object_id, obj):
"""Stores an instantiated object for future lookup and sharing."""
if object_id is None:
return
self._obj_ids_to_obj[object_id] = obj
def __exit__(self, *args, **kwargs):
global SHARED_OBJECT_LOADING
SHARED_OBJECT_LOADING.scope = NoopLoadingScope()
class SharedObjectConfig(dict):
"""A configuration container that keeps track of references.
`SharedObjectConfig` will automatically attach a shared object ID to any
configs which are referenced more than once, allowing for proper shared
object reconstruction at load time.
In most cases, it would be more proper to subclass something like
`collections.UserDict` or `collections.Mapping` rather than `dict` directly.
Unfortunately, python's json encoder does not support `Mapping`s. This is
important functionality to retain, since we are dealing with serialization.
We should be safe to subclass `dict` here, since we aren't actually
overriding any core methods, only augmenting with a new one for reference
counting.
"""
def __init__(self, base_config, object_id, **kwargs):
self.ref_count = 1
self.object_id = object_id
super(SharedObjectConfig, self).__init__(base_config, **kwargs)
def increment_ref_count(self):
# As soon as we've seen the object more than once, we want to attach the
# shared object ID. This allows us to only attach the shared object ID when
# it's strictly necessary, making backwards compatibility breakage less
# likely.
if self.ref_count == 1:
self[SHARED_OBJECT_KEY] = self.object_id
self.ref_count += 1
class SharedObjectSavingScope(object):
"""Keeps track of shared object configs when serializing."""
def __enter__(self):
if _shared_object_disabled():
return None
global SHARED_OBJECT_SAVING
# Serialization can happen at a number of layers for a number of reasons.
# We may end up with a case where we're opening a saving scope within
# another saving scope. In that case, we'd like to use the outermost scope
# available and ignore inner scopes, since there is not (yet) a reasonable
# use case for having these nested and distinct.
if _shared_object_saving_scope() is not None:
self._passthrough = True
return _shared_object_saving_scope()
else:
self._passthrough = False
SHARED_OBJECT_SAVING.scope = self
self._shared_objects_config = weakref.WeakKeyDictionary()
self._next_id = 0
return self
def get_config(self, obj):
"""Gets a `SharedObjectConfig` if one has already been seen for `obj`.
Args:
obj: The object for which to retrieve the `SharedObjectConfig`.
Returns:
The SharedObjectConfig for a given object, if already seen. Else,
`None`.
"""
try:
shared_object_config = self._shared_objects_config[obj]
except (TypeError, KeyError):
# If the object is unhashable (e.g. a subclass of `AbstractBaseClass`
# that has not overridden `__hash__`), a `TypeError` will be thrown.
# We'll just continue on without shared object support.
return None
shared_object_config.increment_ref_count()
return shared_object_config
def create_config(self, base_config, obj):
"""Create a new SharedObjectConfig for a given object."""
shared_object_config = SharedObjectConfig(base_config, self._next_id)
self._next_id += 1
try:
self._shared_objects_config[obj] = shared_object_config
except TypeError:
# If the object is unhashable (e.g. a subclass of `AbstractBaseClass`
# that has not overridden `__hash__`), a `TypeError` will be thrown.
# We'll just continue on without shared object support.
pass
return shared_object_config
def __exit__(self, *args, **kwargs):
if not getattr(self, '_passthrough', False):
global SHARED_OBJECT_SAVING
SHARED_OBJECT_SAVING.scope = None
def serialize_keras_class_and_config(
cls_name, cls_config, obj=None, shared_object_id=None):
"""Returns the serialization of the class with the given config."""
base_config = {'class_name': cls_name, 'config': cls_config}
# We call `serialize_keras_class_and_config` for some branches of the load
# path. In that case, we may already have a shared object ID we'd like to
# retain.
if shared_object_id is not None:
base_config[SHARED_OBJECT_KEY] = shared_object_id
# If we have an active `SharedObjectSavingScope`, check whether we've already
# serialized this config. If so, just use that config. This will store an
# extra ID field in the config, allowing us to re-create the shared object
# relationship at load time.
if _shared_object_saving_scope() is not None and obj is not None:
shared_object_config = _shared_object_saving_scope().get_config(obj)
if shared_object_config is None:
return _shared_object_saving_scope().create_config(base_config, obj)
return shared_object_config
return base_config
def register_keras_serializable(package='Custom', name=None):
"""Registers an object with the Keras serialization framework.
This decorator injects the decorated class or function into the Keras custom
object dictionary, so that it can be serialized and deserialized without
needing an entry in the user-provided custom object dict. It also injects a
function that Keras will call to get the object's serializable string key.
Note that to be serialized and deserialized, classes must implement the
`get_config()` method. Functions do not have this requirement.
The object will be registered under the key 'package>name' where `name`,
defaults to the object name if not passed.
Args:
package: The package that this class belongs to.
name: The name to serialize this class under in this package. If None, the
class' name will be used.
Returns:
A decorator that registers the decorated class with the passed names.
"""
def decorator(arg):
"""Registers a class with the Keras serialization framework."""
class_name = name if name is not None else arg.__name__
registered_name = package + '>' + class_name
if tf_inspect.isclass(arg) and not hasattr(arg, 'get_config'):
raise ValueError(
'Cannot register a class that does not have a get_config() method.')
if registered_name in _GLOBAL_CUSTOM_OBJECTS:
raise ValueError(
'%s has already been registered to %s' %
(registered_name, _GLOBAL_CUSTOM_OBJECTS[registered_name]))
if arg in _GLOBAL_CUSTOM_NAMES:
raise ValueError('%s has already been registered to %s' %
(arg, _GLOBAL_CUSTOM_NAMES[arg]))
_GLOBAL_CUSTOM_OBJECTS[registered_name] = arg
_GLOBAL_CUSTOM_NAMES[arg] = registered_name
return arg
return decorator
def get_registered_name(obj):
"""Returns the name registered to an object within the Keras framework.
This function is part of the Keras serialization and deserialization
framework. It maps objects to the string names associated with those objects
for serialization/deserialization.
Args:
obj: The object to look up.
Returns:
The name associated with the object, or the default Python name if the
object is not registered.
"""
if obj in _GLOBAL_CUSTOM_NAMES:
return _GLOBAL_CUSTOM_NAMES[obj]
else:
return obj.__name__
@tf_contextlib.contextmanager
def skip_failed_serialization():
global _SKIP_FAILED_SERIALIZATION
prev = _SKIP_FAILED_SERIALIZATION
try:
_SKIP_FAILED_SERIALIZATION = True
yield
finally:
_SKIP_FAILED_SERIALIZATION = prev
def get_registered_object(name, custom_objects=None, module_objects=None):
"""Returns the class associated with `name` if it is registered with Keras.
This function is part of the Keras serialization and deserialization
framework. It maps strings to the objects associated with them for
serialization/deserialization.
Example:
```
def from_config(cls, config, custom_objects=None):
if 'my_custom_object_name' in config:
config['hidden_cls'] = tf.keras.utils.get_registered_object(
config['my_custom_object_name'], custom_objects=custom_objects)
```
Args:
name: The name to look up.
custom_objects: A dictionary of custom objects to look the name up in.
Generally, custom_objects is provided by the user.
module_objects: A dictionary of custom objects to look the name up in.
Generally, module_objects is provided by midlevel library implementers.
Returns:
An instantiable class associated with 'name', or None if no such class
exists.
"""
if name in _GLOBAL_CUSTOM_OBJECTS:
return _GLOBAL_CUSTOM_OBJECTS[name]
elif custom_objects and name in custom_objects:
return custom_objects[name]
elif module_objects and name in module_objects:
return module_objects[name]
return None
# pylint: disable=g-bad-exception-name
class CustomMaskWarning(Warning):
pass
# pylint: enable=g-bad-exception-name
def serialize_keras_object(instance):
"""Serialize a Keras object into a JSON-compatible representation.
Calls to `serialize_keras_object` while underneath the
`SharedObjectSavingScope` context manager will cause any objects re-used
across multiple layers to be saved with a special shared object ID. This
allows the network to be re-created properly during deserialization.
Args:
instance: The object to serialize.
Returns:
A dict-like, JSON-compatible representation of the object's config.
"""
_, instance = tf_decorator.unwrap(instance)
if instance is None:
return None
# pylint: disable=protected-access
#
# For v1 layers, checking supports_masking is not enough. We have to also
# check whether compute_mask has been overridden.
supports_masking = (getattr(instance, 'supports_masking', False)
or (hasattr(instance, 'compute_mask')
and not is_default(instance.compute_mask)))
if supports_masking and is_default(instance.get_config):
warnings.warn('Custom mask layers require a config and must override '
'get_config. When loading, the custom mask layer must be '
'passed to the custom_objects argument.',
category=CustomMaskWarning)
# pylint: enable=protected-access
if hasattr(instance, 'get_config'):
name = get_registered_name(instance.__class__)
try:
config = instance.get_config()
except NotImplementedError as e:
if _SKIP_FAILED_SERIALIZATION:
return serialize_keras_class_and_config(
name, {_LAYER_UNDEFINED_CONFIG_KEY: True})
raise e
serialization_config = {}
for key, item in config.items():
if isinstance(item, str):
serialization_config[key] = item
continue
# Any object of a different type needs to be converted to string or dict
# for serialization (e.g. custom functions, custom classes)
try:
serialized_item = serialize_keras_object(item)
if isinstance(serialized_item, dict) and not isinstance(item, dict):
serialized_item['__passive_serialization__'] = True
serialization_config[key] = serialized_item
except ValueError:
serialization_config[key] = item
name = get_registered_name(instance.__class__)
return serialize_keras_class_and_config(
name, serialization_config, instance)
if hasattr(instance, '__name__'):
return get_registered_name(instance)
raise ValueError('Cannot serialize', instance)
def get_custom_objects_by_name(item, custom_objects=None):
"""Returns the item if it is in either local or global custom objects."""
if item in _GLOBAL_CUSTOM_OBJECTS:
return _GLOBAL_CUSTOM_OBJECTS[item]
elif custom_objects and item in custom_objects:
return custom_objects[item]
return None
def class_and_config_for_serialized_keras_object(
config,
module_objects=None,
custom_objects=None,
printable_module_name='object'):
"""Returns the class name and config for a serialized keras object."""
if (not isinstance(config, dict)
or 'class_name' not in config
or 'config' not in config):
raise ValueError('Improper config format: ' + str(config))
class_name = config['class_name']
cls = get_registered_object(class_name, custom_objects, module_objects)
if cls is None:
raise ValueError(
'Unknown {}: {}. Please ensure this object is '
'passed to the `custom_objects` argument. See '
'https://www.tensorflow.org/guide/keras/save_and_serialize'
'#registering_the_custom_object for details.'
.format(printable_module_name, class_name))
cls_config = config['config']
# Check if `cls_config` is a list. If it is a list, return the class and the
# associated class configs for recursively deserialization. This case will
# happen on the old version of sequential model (e.g. `keras_version` ==
# "2.0.6"), which is serialized in a different structure, for example
# "{'class_name': 'Sequential',
# 'config': [{'class_name': 'Embedding', 'config': ...}, {}, ...]}".
if isinstance(cls_config, list):
return (cls, cls_config)
deserialized_objects = {}
for key, item in cls_config.items():
if key == 'name':
# Assume that the value of 'name' is a string that should not be
# deserialized as a function. This avoids the corner case where
# cls_config['name'] has an identical name to a custom function and
# gets converted into that function.
deserialized_objects[key] = item
elif isinstance(item, dict) and '__passive_serialization__' in item:
deserialized_objects[key] = deserialize_keras_object(
item,
module_objects=module_objects,
custom_objects=custom_objects,
printable_module_name='config_item')
# TODO(momernick): Should this also have 'module_objects'?
elif (isinstance(item, str) and
tf_inspect.isfunction(get_registered_object(item, custom_objects))):
# Handle custom functions here. When saving functions, we only save the
# function's name as a string. If we find a matching string in the custom
# objects during deserialization, we convert the string back to the
# original function.
# Note that a potential issue is that a string field could have a naming
# conflict with a custom function name, but this should be a rare case.
# This issue does not occur if a string field has a naming conflict with
# a custom object, since the config of an object will always be a dict.
deserialized_objects[key] = get_registered_object(item, custom_objects)
for key, item in deserialized_objects.items():
cls_config[key] = deserialized_objects[key]
return (cls, cls_config)
def deserialize_keras_object(identifier,
module_objects=None,
custom_objects=None,
printable_module_name='object'):
"""Turns the serialized form of a Keras object back into an actual object.
This function is for mid-level library implementers rather than end users.
Importantly, this utility requires you to provide the dict of `module_objects`
to use for looking up the object config; this is not populated by default.
If you need a deserialization utility that has preexisting knowledge of
built-in Keras objects, use e.g. `keras.layers.deserialize(config)`,
`keras.metrics.deserialize(config)`, etc.
Calling `deserialize_keras_object` while underneath the
`SharedObjectLoadingScope` context manager will cause any already-seen shared
objects to be returned as-is rather than creating a new object.
Args:
identifier: the serialized form of the object.
module_objects: A dictionary of built-in objects to look the name up in.
Generally, `module_objects` is provided by midlevel library implementers.
custom_objects: A dictionary of custom objects to look the name up in.
Generally, `custom_objects` is provided by the end user.
printable_module_name: A human-readable string representing the type of the
object. Printed in case of exception.
Returns:
The deserialized object.
Example:
A mid-level library implementer might want to implement a utility for
retrieving an object from its config, as such:
```python
def deserialize(config, custom_objects=None):
return deserialize_keras_object(
identifier,
module_objects=globals(),
custom_objects=custom_objects,
name="MyObjectType",
)
```
This is how e.g. `keras.layers.deserialize()` is implemented.
"""
if identifier is None:
return None
if isinstance(identifier, dict):
# In this case we are dealing with a Keras config dictionary.
config = identifier
(cls, cls_config) = class_and_config_for_serialized_keras_object(
config, module_objects, custom_objects, printable_module_name)
# If this object has already been loaded (i.e. it's shared between multiple
# objects), return the already-loaded object.
shared_object_id = config.get(SHARED_OBJECT_KEY)
shared_object = _shared_object_loading_scope().get(shared_object_id) # pylint: disable=assignment-from-none
if shared_object is not None:
return shared_object
if hasattr(cls, 'from_config'):
arg_spec = tf_inspect.getfullargspec(cls.from_config)
custom_objects = custom_objects or {}
if 'custom_objects' in arg_spec.args:
deserialized_obj = cls.from_config(
cls_config,
custom_objects=dict(
list(_GLOBAL_CUSTOM_OBJECTS.items()) +
list(custom_objects.items())))
else:
with CustomObjectScope(custom_objects):
deserialized_obj = cls.from_config(cls_config)
else:
# Then `cls` may be a function returning a class.
# in this case by convention `config` holds
# the kwargs of the function.
custom_objects = custom_objects or {}
with CustomObjectScope(custom_objects):
deserialized_obj = cls(**cls_config)
# Add object to shared objects, in case we find it referenced again.
_shared_object_loading_scope().set(shared_object_id, deserialized_obj)
return deserialized_obj
elif isinstance(identifier, str):
object_name = identifier
if custom_objects and object_name in custom_objects:
obj = custom_objects.get(object_name)
elif object_name in _GLOBAL_CUSTOM_OBJECTS:
obj = _GLOBAL_CUSTOM_OBJECTS[object_name]
else:
obj = module_objects.get(object_name)
if obj is None:
raise ValueError(
'Unknown {}: {}. Please ensure this object is '
'passed to the `custom_objects` argument. See '
'https://www.tensorflow.org/guide/keras/save_and_serialize'
'#registering_the_custom_object for details.'
.format(printable_module_name, object_name))
# Classes passed by name are instantiated with no args, functions are
# returned as-is.
if tf_inspect.isclass(obj):
return obj()
return obj
elif tf_inspect.isfunction(identifier):
# If a function has already been deserialized, return as is.
return identifier
else:
raise ValueError('Could not interpret serialized %s: %s' %
(printable_module_name, identifier))
def func_dump(func):
"""Serializes a user defined function.
Args:
func: the function to serialize.
Returns:
A tuple `(code, defaults, closure)`.
"""
if os.name == 'nt':
raw_code = marshal.dumps(func.__code__).replace(b'\\', b'/')
code = codecs.encode(raw_code, 'base64').decode('ascii')
else:
raw_code = marshal.dumps(func.__code__)
code = codecs.encode(raw_code, 'base64').decode('ascii')
defaults = func.__defaults__
if func.__closure__:
closure = tuple(c.cell_contents for c in func.__closure__)
else:
closure = None
return code, defaults, closure
def func_load(code, defaults=None, closure=None, globs=None):
"""Deserializes a user defined function.
Args:
code: bytecode of the function.
defaults: defaults of the function.
closure: closure of the function.
globs: dictionary of global objects.
Returns:
A function object.
"""
if isinstance(code, (tuple, list)): # unpack previous dump
code, defaults, closure = code
if isinstance(defaults, list):
defaults = tuple(defaults)
def ensure_value_to_cell(value):
"""Ensures that a value is converted to a python cell object.
Args:
value: Any value that needs to be casted to the cell type
Returns:
A value wrapped as a cell object (see function "func_load")
"""
def dummy_fn():
# pylint: disable=pointless-statement
value # just access it so it gets captured in .__closure__
cell_value = dummy_fn.__closure__[0]
if not isinstance(value, type(cell_value)):
return cell_value
return value
if closure is not None:
closure = tuple(ensure_value_to_cell(_) for _ in closure)
try:
raw_code = codecs.decode(code.encode('ascii'), 'base64')
except (UnicodeEncodeError, binascii.Error):
raw_code = code.encode('raw_unicode_escape')
code = marshal.loads(raw_code)
if globs is None:
globs = globals()
return python_types.FunctionType(
code, globs, name=code.co_name, argdefs=defaults, closure=closure)
def has_arg(fn, name, accept_all=False):
"""Checks if a callable accepts a given keyword argument.
Args:
fn: Callable to inspect.
name: Check if `fn` can be called with `name` as a keyword argument.
accept_all: What to return if there is no parameter called `name` but the
function accepts a `**kwargs` argument.
Returns:
bool, whether `fn` accepts a `name` keyword argument.
"""
arg_spec = tf_inspect.getfullargspec(fn)
if accept_all and arg_spec.varkw is not None:
return True
return name in arg_spec.args or name in arg_spec.kwonlyargs
class Progbar(object):
"""Displays a progress bar.
Args:
target: Total number of steps expected, None if unknown.
width: Progress bar width on screen.
verbose: Verbosity mode, 0 (silent), 1 (verbose), 2 (semi-verbose)
stateful_metrics: Iterable of string names of metrics that should *not* be
averaged over time. Metrics in this list will be displayed as-is. All
others will be averaged by the progbar before display.
interval: Minimum visual progress update interval (in seconds).
unit_name: Display name for step counts (usually "step" or "sample").
"""
def __init__(self,
target,
width=30,
verbose=1,
interval=0.05,
stateful_metrics=None,
unit_name='step'):
self.target = target
self.width = width
self.verbose = verbose
self.interval = interval
self.unit_name = unit_name
if stateful_metrics:
self.stateful_metrics = set(stateful_metrics)
else:
self.stateful_metrics = set()
self._dynamic_display = ((hasattr(sys.stdout, 'isatty') and
sys.stdout.isatty()) or
'ipykernel' in sys.modules or
'posix' in sys.modules or
'PYCHARM_HOSTED' in os.environ)
self._total_width = 0
self._seen_so_far = 0
# We use a dict + list to avoid garbage collection
# issues found in OrderedDict
self._values = {}
self._values_order = []
self._start = time.time()
self._last_update = 0
self._time_after_first_step = None
def update(self, current, values=None, finalize=None):
"""Updates the progress bar.
Args:
current: Index of current step.
values: List of tuples: `(name, value_for_last_step)`. If `name` is in
`stateful_metrics`, `value_for_last_step` will be displayed as-is.
Else, an average of the metric over time will be displayed.
finalize: Whether this is the last update for the progress bar. If
`None`, defaults to `current >= self.target`.
"""
if finalize is None:
if self.target is None:
finalize = False
else:
finalize = current >= self.target
values = values or []
for k, v in values:
if k not in self._values_order:
self._values_order.append(k)
if k not in self.stateful_metrics:
# In the case that progress bar doesn't have a target value in the first
# epoch, both on_batch_end and on_epoch_end will be called, which will
# cause 'current' and 'self._seen_so_far' to have the same value. Force
# the minimal value to 1 here, otherwise stateful_metric will be 0s.
value_base = max(current - self._seen_so_far, 1)
if k not in self._values:
self._values[k] = [v * value_base, value_base]
else:
self._values[k][0] += v * value_base
self._values[k][1] += value_base
else:
# Stateful metrics output a numeric value. This representation
# means "take an average from a single value" but keeps the
# numeric formatting.
self._values[k] = [v, 1]
self._seen_so_far = current
now = time.time()
info = ' - %.0fs' % (now - self._start)
if self.verbose == 1:
if now - self._last_update < self.interval and not finalize:
return
prev_total_width = self._total_width
if self._dynamic_display:
sys.stdout.write('\b' * prev_total_width)
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
if self.target is not None:
numdigits = int(np.log10(self.target)) + 1
bar = ('%' + str(numdigits) + 'd/%d [') % (current, self.target)
prog = float(current) / self.target
prog_width = int(self.width * prog)
if prog_width > 0:
bar += ('=' * (prog_width - 1))
if current < self.target:
bar += '>'
else:
bar += '='
bar += ('.' * (self.width - prog_width))
bar += ']'
else:
bar = '%7d/Unknown' % current
self._total_width = len(bar)
sys.stdout.write(bar)
time_per_unit = self._estimate_step_duration(current, now)
if self.target is None or finalize:
if time_per_unit >= 1 or time_per_unit == 0:
info += ' %.0fs/%s' % (time_per_unit, self.unit_name)
elif time_per_unit >= 1e-3:
info += ' %.0fms/%s' % (time_per_unit * 1e3, self.unit_name)
else:
info += ' %.0fus/%s' % (time_per_unit * 1e6, self.unit_name)
else:
eta = time_per_unit * (self.target - current)
if eta > 3600:
eta_format = '%d:%02d:%02d' % (eta // 3600,
(eta % 3600) // 60, eta % 60)
elif eta > 60:
eta_format = '%d:%02d' % (eta // 60, eta % 60)
else:
eta_format = '%ds' % eta
info = ' - ETA: %s' % eta_format
for k in self._values_order:
info += ' - %s:' % k
if isinstance(self._values[k], list):
avg = np.mean(self._values[k][0] / max(1, self._values[k][1]))
if abs(avg) > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
else:
info += ' %s' % self._values[k]
self._total_width += len(info)
if prev_total_width > self._total_width:
info += (' ' * (prev_total_width - self._total_width))
if finalize:
info += '\n'
sys.stdout.write(info)
sys.stdout.flush()
elif self.verbose == 2:
if finalize:
numdigits = int(np.log10(self.target)) + 1
count = ('%' + str(numdigits) + 'd/%d') % (current, self.target)
info = count + info
for k in self._values_order:
info += ' - %s:' % k
avg = np.mean(self._values[k][0] / max(1, self._values[k][1]))
if avg > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
info += '\n'
sys.stdout.write(info)
sys.stdout.flush()
self._last_update = now
def add(self, n, values=None):
self.update(self._seen_so_far + n, values)
def _estimate_step_duration(self, current, now):
"""Estimate the duration of a single step.
Given the step number `current` and the corresponding time `now`
this function returns an estimate for how long a single step
takes. If this is called before one step has been completed
(i.e. `current == 0`) then zero is given as an estimate. The duration
estimate ignores the duration of the (assumed to be non-representative)
first step for estimates when more steps are available (i.e. `current>1`).
Args:
current: Index of current step.
now: The current time.
Returns: Estimate of the duration of a single step.
"""
if current:
# there are a few special scenarios here:
# 1) somebody is calling the progress bar without ever supplying step 1
# 2) somebody is calling the progress bar and supplies step one mulitple
# times, e.g. as part of a finalizing call
# in these cases, we just fall back to the simple calculation
if self._time_after_first_step is not None and current > 1:
time_per_unit = (now - self._time_after_first_step) / (current - 1)
else:
time_per_unit = (now - self._start) / current
if current == 1:
self._time_after_first_step = now
return time_per_unit
else:
return 0
def _update_stateful_metrics(self, stateful_metrics):
self.stateful_metrics = self.stateful_metrics.union(stateful_metrics)
def make_batches(size, batch_size):
"""Returns a list of batch indices (tuples of indices).
Args:
size: Integer, total size of the data to slice into batches.
batch_size: Integer, batch size.
Returns:
A list of tuples of array indices.
"""
num_batches = int(np.ceil(size / float(batch_size)))
return [(i * batch_size, min(size, (i + 1) * batch_size))
for i in range(0, num_batches)]
def slice_arrays(arrays, start=None, stop=None):
"""Slice an array or list of arrays.
This takes an array-like, or a list of
array-likes, and outputs:
- arrays[start:stop] if `arrays` is an array-like
- [x[start:stop] for x in arrays] if `arrays` is a list
Can also work on list/array of indices: `slice_arrays(x, indices)`
Args:
arrays: Single array or list of arrays.
start: can be an integer index (start index) or a list/array of indices
stop: integer (stop index); should be None if `start` was a list.
Returns:
A slice of the array(s).
Raises:
ValueError: If the value of start is a list and stop is not None.
"""
if arrays is None:
return [None]
if isinstance(start, list) and stop is not None:
raise ValueError('The stop argument has to be None if the value of start '
'is a list.')
elif isinstance(arrays, list):
if hasattr(start, '__len__'):
# hdf5 datasets only support list objects as indices
if hasattr(start, 'shape'):
start = start.tolist()
return [None if x is None else x[start] for x in arrays]
return [
None if x is None else
None if not hasattr(x, '__getitem__') else x[start:stop] for x in arrays
]
else:
if hasattr(start, '__len__'):
if hasattr(start, 'shape'):
start = start.tolist()
return arrays[start]
if hasattr(start, '__getitem__'):
return arrays[start:stop]
return [None]
def to_list(x):
"""Normalizes a list/tensor into a list.
If a tensor is passed, we return
a list of size 1 containing the tensor.
Args:
x: target object to be normalized.
Returns:
A list.
"""
if isinstance(x, list):
return x
return [x]
def to_snake_case(name):
intermediate = re.sub('(.)([A-Z][a-z0-9]+)', r'\1_\2', name)
insecure = re.sub('([a-z])([A-Z])', r'\1_\2', intermediate).lower()
# If the class is private the name starts with "_" which is not secure
# for creating scopes. We prefix the name with "private" in this case.
if insecure[0] != '_':
return insecure
return 'private' + insecure
def is_all_none(structure):
iterable = nest.flatten(structure)
# We cannot use Python's `any` because the iterable may return Tensors.
for element in iterable:
if element is not None:
return False
return True
def check_for_unexpected_keys(name, input_dict, expected_values):
unknown = set(input_dict.keys()).difference(expected_values)
if unknown:
raise ValueError('Unknown entries in {} dictionary: {}. Only expected '
'following keys: {}'.format(name, list(unknown),
expected_values))
def validate_kwargs(kwargs,
allowed_kwargs,
error_message='Keyword argument not understood:'):
"""Checks that all keyword arguments are in the set of allowed keys."""
for kwarg in kwargs:
if kwarg not in allowed_kwargs:
raise TypeError(error_message, kwarg)
def validate_config(config):
"""Determines whether config appears to be a valid layer config."""
return isinstance(config, dict) and _LAYER_UNDEFINED_CONFIG_KEY not in config
def default(method):
"""Decorates a method to detect overrides in subclasses."""
method._is_default = True # pylint: disable=protected-access
return method
def is_default(method):
"""Check if a method is decorated with the `default` wrapper."""
return getattr(method, '_is_default', False)
def populate_dict_with_module_objects(target_dict, modules, obj_filter):
for module in modules:
for name in dir(module):
obj = getattr(module, name)
if obj_filter(obj):
target_dict[name] = obj
class LazyLoader(python_types.ModuleType):
"""Lazily import a module, mainly to avoid pulling in large dependencies."""
def __init__(self, local_name, parent_module_globals, name):
self._local_name = local_name
self._parent_module_globals = parent_module_globals
super(LazyLoader, self).__init__(name)
def _load(self):
"""Load the module and insert it into the parent's globals."""
# Import the target module and insert it into the parent's namespace
module = importlib.import_module(self.__name__)
self._parent_module_globals[self._local_name] = module
# Update this object's dict so that if someone keeps a reference to the
# LazyLoader, lookups are efficient (__getattr__ is only called on lookups
# that fail).
self.__dict__.update(module.__dict__)
return module
def __getattr__(self, item):
module = self._load()
return getattr(module, item)
# Aliases
custom_object_scope = CustomObjectScope # pylint: disable=invalid-name
|
PypiClean
|
/boto3_type_annotations_with_docs-0.3.1.tar.gz/boto3_type_annotations_with_docs-0.3.1/boto3_type_annotations/cloudtrail/paginator.py
|
from typing import Dict
from typing import List
from datetime import datetime
from botocore.paginate import Paginator
class ListPublicKeys(Paginator):
def paginate(self, StartTime: datetime = None, EndTime: datetime = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`CloudTrail.Client.list_public_keys`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cloudtrail-2013-11-01/ListPublicKeys>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'PublicKeyList': [
{
'Value': b'bytes',
'ValidityStartTime': datetime(2015, 1, 1),
'ValidityEndTime': datetime(2015, 1, 1),
'Fingerprint': 'string'
},
],
}
**Response Structure**
- *(dict) --*
Returns the objects or data listed below if successful. Otherwise, returns an error.
- **PublicKeyList** *(list) --*
Contains an array of PublicKey objects.
.. note::
The returned public keys may have validity time ranges that overlap.
- *(dict) --*
Contains information about a returned public key.
- **Value** *(bytes) --*
The DER encoded public key value in PKCS#1 format.
- **ValidityStartTime** *(datetime) --*
The starting time of validity of the public key.
- **ValidityEndTime** *(datetime) --*
The ending time of validity of the public key.
- **Fingerprint** *(string) --*
The fingerprint of the public key.
:type StartTime: datetime
:param StartTime:
Optionally specifies, in UTC, the start of the time range to look up public keys for CloudTrail digest files. If not specified, the current time is used, and the current public key is returned.
:type EndTime: datetime
:param EndTime:
Optionally specifies, in UTC, the end of the time range to look up public keys for CloudTrail digest files. If not specified, the current time is used.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class ListTags(Paginator):
def paginate(self, ResourceIdList: List, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`CloudTrail.Client.list_tags`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cloudtrail-2013-11-01/ListTags>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
ResourceIdList=[
'string',
],
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'ResourceTagList': [
{
'ResourceId': 'string',
'TagsList': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
}
**Response Structure**
- *(dict) --*
Returns the objects or data listed below if successful. Otherwise, returns an error.
- **ResourceTagList** *(list) --*
A list of resource tags.
- *(dict) --*
A resource tag.
- **ResourceId** *(string) --*
Specifies the ARN of the resource.
- **TagsList** *(list) --*
A list of tags.
- *(dict) --*
A custom key-value pair associated with a resource such as a CloudTrail trail.
- **Key** *(string) --*
The key in a key-value pair. The key must be must be no longer than 128 Unicode characters. The key must be unique for the resource to which it applies.
- **Value** *(string) --*
The value in a key-value pair of a tag. The value must be no longer than 256 Unicode characters.
:type ResourceIdList: list
:param ResourceIdList: **[REQUIRED]**
Specifies a list of trail ARNs whose tags will be listed. The list has a limit of 20 ARNs. The format of a trail ARN is:
``arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail``
- *(string) --*
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
class LookupEvents(Paginator):
def paginate(self, LookupAttributes: List = None, StartTime: datetime = None, EndTime: datetime = None, PaginationConfig: Dict = None) -> Dict:
"""
Creates an iterator that will paginate through responses from :py:meth:`CloudTrail.Client.lookup_events`.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cloudtrail-2013-11-01/LookupEvents>`_
**Request Syntax**
::
response_iterator = paginator.paginate(
LookupAttributes=[
{
'AttributeKey': 'EventId'|'EventName'|'ReadOnly'|'Username'|'ResourceType'|'ResourceName'|'EventSource'|'AccessKeyId',
'AttributeValue': 'string'
},
],
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
PaginationConfig={
'MaxItems': 123,
'PageSize': 123,
'StartingToken': 'string'
}
)
**Response Syntax**
::
{
'Events': [
{
'EventId': 'string',
'EventName': 'string',
'ReadOnly': 'string',
'AccessKeyId': 'string',
'EventTime': datetime(2015, 1, 1),
'EventSource': 'string',
'Username': 'string',
'Resources': [
{
'ResourceType': 'string',
'ResourceName': 'string'
},
],
'CloudTrailEvent': 'string'
},
],
}
**Response Structure**
- *(dict) --*
Contains a response to a LookupEvents action.
- **Events** *(list) --*
A list of events returned based on the lookup attributes specified and the CloudTrail event. The events list is sorted by time. The most recent event is listed first.
- *(dict) --*
Contains information about an event that was returned by a lookup request. The result includes a representation of a CloudTrail event.
- **EventId** *(string) --*
The CloudTrail ID of the event returned.
- **EventName** *(string) --*
The name of the event returned.
- **ReadOnly** *(string) --*
Information about whether the event is a write event or a read event.
- **AccessKeyId** *(string) --*
The AWS access key ID that was used to sign the request. If the request was made with temporary security credentials, this is the access key ID of the temporary credentials.
- **EventTime** *(datetime) --*
The date and time of the event returned.
- **EventSource** *(string) --*
The AWS service that the request was made to.
- **Username** *(string) --*
A user name or role name of the requester that called the API in the event returned.
- **Resources** *(list) --*
A list of resources referenced by the event returned.
- *(dict) --*
Specifies the type and name of a resource referenced by an event.
- **ResourceType** *(string) --*
The type of a resource referenced by the event returned. When the resource type cannot be determined, null is returned. Some examples of resource types are: **Instance** for EC2, **Trail** for CloudTrail, **DBInstance** for RDS, and **AccessKey** for IAM. For a list of resource types supported for event lookup, see `Resource Types Supported for Event Lookup <http://docs.aws.amazon.com/awscloudtrail/latest/userguide/lookup_supported_resourcetypes.html>`__ .
- **ResourceName** *(string) --*
The name of the resource referenced by the event returned. These are user-created names whose values will depend on the environment. For example, the resource name might be "auto-scaling-test-group" for an Auto Scaling Group or "i-1234567" for an EC2 Instance.
- **CloudTrailEvent** *(string) --*
A JSON string that contains a representation of the event returned.
:type LookupAttributes: list
:param LookupAttributes:
Contains a list of lookup attributes. Currently the list can contain only one item.
- *(dict) --*
Specifies an attribute and value that filter the events returned.
- **AttributeKey** *(string) --* **[REQUIRED]**
Specifies an attribute on which to filter the events returned.
- **AttributeValue** *(string) --* **[REQUIRED]**
Specifies a value for the specified AttributeKey.
:type StartTime: datetime
:param StartTime:
Specifies that only events that occur after or at the specified time are returned. If the specified start time is after the specified end time, an error is returned.
:type EndTime: datetime
:param EndTime:
Specifies that only events that occur before or at the specified time are returned. If the specified end time is before the specified start time, an error is returned.
:type PaginationConfig: dict
:param PaginationConfig:
A dictionary that provides parameters to control pagination.
- **MaxItems** *(integer) --*
The total number of items to return. If the total number of items available is more than the value specified in max-items then a ``NextToken`` will be provided in the output that you can use to resume pagination.
- **PageSize** *(integer) --*
The size of each page.
- **StartingToken** *(string) --*
A token to specify where to start paginating. This is the ``NextToken`` from a previous response.
:rtype: dict
:returns:
"""
pass
|
PypiClean
|
/limnoria-2023.8.10.tar.gz/limnoria-2023.8.10/src/httpserver.py
|
import os
import cgi
import socket
from threading import Thread
import supybot.log as log
import supybot.conf as conf
import supybot.world as world
import supybot.utils.minisix as minisix
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization()
if minisix.PY2:
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
else:
from http.server import HTTPServer, BaseHTTPRequestHandler
configGroup = conf.supybot.servers.http
class RequestNotHandled(Exception):
pass
DEFAULT_TEMPLATES = {
'index.html': """\
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>""" + _('Supybot Web server index') + """</title>
<link rel="stylesheet" type="text/css" href="/default.css" media="screen" />
</head>
<body class="purelisting">
<h1>Supybot web server index</h1>
<p>""" + _('Here is a list of the plugins that have a Web interface:') +\
"""
</p>
%(list)s
</body>
</html>""",
'generic/error.html': """\
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>%(title)s</title>
<link rel="stylesheet" href="/default.css" />
</head>
<body class="error">
<h1>Error</h1>
<p>%(error)s</p>
</body>
</html>""",
'default.css': """\
body {
background-color: #F0F0F0;
}
/************************************
* Classes that plugins should use. *
************************************/
/* Error pages */
body.error {
text-align: center;
}
body.error p {
background-color: #FFE0E0;
border: 1px #FFA0A0 solid;
}
/* Pages that only contain a list. */
.purelisting {
text-align: center;
}
.purelisting ul {
margin: 0;
padding: 0;
}
.purelisting ul li {
margin: 0;
padding: 0;
list-style-type: none;
}
/* Pages that only contain a table. */
.puretable {
text-align: center;
}
.puretable table
{
width: 100%;
border-collapse: collapse;
text-align: center;
}
.puretable table th
{
/*color: #039;*/
padding: 10px 8px;
border-bottom: 2px solid #6678b1;
}
.puretable table td
{
padding: 9px 8px 0px 8px;
border-bottom: 1px solid #ccc;
}
""",
'robots.txt': """""",
}
def set_default_templates(defaults):
for filename, content in defaults.items():
path = conf.supybot.directories.data.web.dirize(filename)
if os.path.isfile(path + '.example'):
os.unlink(path + '.example')
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
with open(path + '.example', 'a') as fd:
fd.write(content)
set_default_templates(DEFAULT_TEMPLATES)
def get_template(filename):
path = conf.supybot.directories.data.web.dirize(filename)
if os.path.isfile(path):
with open(path, 'r') as fd:
return fd.read()
else:
assert os.path.isfile(path + '.example'), path + '.example'
with open(path + '.example', 'r') as fd:
return fd.read()
class SupyHTTPRequestHandler(BaseHTTPRequestHandler):
def do_X(self, callbackMethod, *args, **kwargs):
if self.path == '/':
callback = SupyIndex()
elif self.path in ('/robots.txt',):
callback = Static('text/plain; charset=utf-8')
elif self.path in ('/default.css',):
callback = Static('text/css')
elif self.path == '/favicon.ico':
callback = Favicon()
else:
subdir = self.path.split('/')[1]
try:
callback = self.server.callbacks[subdir]
except KeyError:
callback = Supy404()
# Some shortcuts
for name in ('send_response', 'send_header', 'end_headers', 'rfile',
'wfile', 'headers'):
setattr(callback, name, getattr(self, name))
# We call doX, because this is more supybotic than do_X.
path = self.path
if not callback.fullpath:
path = '/' + path.split('/', 2)[-1]
getattr(callback, callbackMethod)(self, path,
*args, **kwargs)
def do_GET(self):
self.do_X('doGet')
def do_POST(self):
if 'Content-Type' not in self.headers:
self.headers['Content-Type'] = 'application/x-www-form-urlencoded'
if self.headers['Content-Type'] == 'application/x-www-form-urlencoded':
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={'REQUEST_METHOD':'POST',
'CONTENT_TYPE':self.headers['Content-Type'],
})
else:
content_length = int(self.headers.get('Content-Length', '0'))
form = self.rfile.read(content_length)
self.do_X('doPost', form=form)
def do_HEAD(self):
self.do_X('doHead')
def address_string(self):
s = BaseHTTPRequestHandler.address_string(self)
# Strip IPv4-mapped IPv6 addresses such as ::ffff:127.0.0.1
prefix = '::ffff:'
if s.startswith(prefix):
s = s[len(prefix):]
return s
def log_message(self, format, *args):
log.info('HTTP request: %s - %s' %
(self.address_string(), format % args))
class SupyHTTPServerCallback(log.Firewalled):
"""This is a base class that should be overriden by any plugin that want
to have a Web interface."""
__firewalled__ = {'doGet': None,
'doPost': None,
'doHead': None,
'doPut': None,
'doDelete': None,
}
fullpath = False
name = "Unnamed plugin"
public = True
"""Whether the callback should be listed in the root index."""
defaultResponse = _("""
This is a default response of the Supybot HTTP server. If you see this
message, it probably means you are developing a plugin, and you have
neither overriden this message or defined an handler for this query.""")
if minisix.PY3:
def write(self, b):
if isinstance(b, str):
b = b.encode()
self.wfile.write(b)
else:
def write(self, s):
self.wfile.write(s)
def doGetOrHead(self, handler, path, write_content):
response = self.defaultResponse.encode()
handler.send_response(405)
self.send_header('Content-Type', 'text/plain; charset=utf-8; charset=utf-8')
self.send_header('Content-Length', len(response))
self.end_headers()
if write_content:
self.wfile.write(response)
def doGet(self, handler, path):
self.doGetOrHead(handler, path, write_content=True)
def doHead(self, handler, path):
self.doGetOrHead(handler, path, write_content=False)
def doPost(self, handler, path, form=None):
self.doGetOrHead(handler, path, write_content=True)
def doWellKnown(self, handler, path):
"""Handles GET request to /.well-known/"""
return None
def doHook(self, handler, subdir):
"""Method called when hooking this callback."""
pass
def doUnhook(self, handler):
"""Method called when unhooking this callback."""
pass
class Supy404(SupyHTTPServerCallback):
"""A 404 Not Found error."""
name = "Error 404"
fullpath = True
response = _("""
I am a pretty clever IRC bot, but I suck at serving Web pages, particulary
if I don't know what to serve.
What I'm saying is you just triggered a 404 Not Found, and I am not
trained to help you in such a case.""")
def doGetOrHead(self, handler, path, write_content):
response = self.response
if minisix.PY3:
response = response.encode()
handler.send_response(404)
self.send_header('Content-Type', 'text/plain; charset=utf-8; charset=utf-8')
self.send_header('Content-Length', len(self.response))
self.end_headers()
if write_content:
self.wfile.write(response)
class SupyIndex(SupyHTTPServerCallback):
"""Displays the index of available plugins."""
name = "index"
defaultResponse = _("Request not handled.")
def doGetOrHead(self, handler, path, write_content):
plugins = [
(name, cb)
for (name, cb) in handler.server.callbacks.items()
if cb.public]
if plugins == []:
plugins = _('No plugins available.')
else:
plugins = '<ul class="plugins"><li>%s</li></ul>' % '</li><li>'.join(
['<a href="/%s/">%s</a>' % (x,y.name) for x,y in plugins])
response = get_template('index.html') % {'list': plugins}
if minisix.PY3:
response = response.encode()
handler.send_response(200)
self.send_header('Content-Type', 'text/html; charset=utf-8')
self.send_header('Content-Length', len(response))
self.end_headers()
if write_content:
self.wfile.write(response)
class Static(SupyHTTPServerCallback):
"""Serves static files."""
fullpath = True
name = 'static'
defaultResponse = _('Request not handled')
def __init__(self, mimetype='text/plain; charset=utf-8'):
super(Static, self).__init__()
self._mimetype = mimetype
def doGetOrHead(self, handler, path, write_content):
response = get_template(path)
if minisix.PY3:
response = response.encode()
handler.send_response(200)
self.send_header('Content-type', self._mimetype)
self.send_header('Content-Length', len(response))
self.end_headers()
if write_content:
self.wfile.write(response)
class Favicon(SupyHTTPServerCallback):
"""Services the favicon.ico file to browsers."""
name = 'favicon'
defaultResponse = _('Request not handled')
def doGetOrHead(self, handler, path, write_content):
response = None
file_path = conf.supybot.servers.http.favicon()
if file_path:
try:
icon = open(file_path, 'rb')
response = icon.read()
except IOError:
pass
finally:
icon.close()
if response is not None:
# I have no idea why, but this headers are already sent.
# filename = file_path.rsplit(os.sep, 1)[1]
# if '.' in filename:
# ext = filename.rsplit('.', 1)[1]
# else:
# ext = 'ico'
# self.send_header('Content-Length', len(response))
# self.send_header('Content-type', 'image/' + ext)
# self.end_headers()
if write_content:
self.wfile.write(response)
else:
response = _('No favicon set.')
if minisix.PY3:
response = response.encode()
handler.send_response(404)
self.send_header('Content-type', 'text/plain; charset=utf-8')
self.send_header('Content-Length', len(response))
self.end_headers()
if write_content:
self.wfile.write(response)
class SupyWellKnown(SupyHTTPServerCallback):
"""Serves /.well-known/ resources."""
name = 'well-known'
defaultResponse = _('Request not handled')
public = False
def doGetOrHead(self, handler, path, write_content):
for callback in handler.server.callbacks.values():
resp = callback.doWellKnown(handler, path)
if resp:
(status, headers, content) = resp
handler.send_response(status)
for header in headers.items():
self.send_header(*header)
self.end_headers()
if write_content:
self.wfile.write(content)
return
handler.send_response(404)
self.end_headers()
self.wfile.write(b"Error 404. There is nothing to see here.")
DEFAULT_CALLBACKS = {'.well-known': SupyWellKnown()}
class RealSupyHTTPServer(HTTPServer):
# TODO: make this configurable
timeout = 0.5
running = False
def __init__(self, address, protocol, callback):
self.protocol = protocol
if protocol == 4:
self.address_family = socket.AF_INET
elif protocol == 6:
self.address_family = socket.AF_INET6
else:
raise AssertionError(protocol)
HTTPServer.__init__(self, address, callback)
self.callbacks = DEFAULT_CALLBACKS.copy()
def server_bind(self):
if self.protocol == 6:
v = conf.supybot.servers.http.singleStack()
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, v)
HTTPServer.server_bind(self)
def hook(self, subdir, callback):
if subdir in self.callbacks:
log.warning(('The HTTP subdirectory `%s` was already hooked but '
'has been claimed by another plugin (or maybe you '
'reloaded the plugin and it didn\'t properly unhook. '
'Forced unhook.') % subdir)
self.callbacks[subdir] = callback
callback.doHook(self, subdir)
def unhook(self, subdir):
callback = self.callbacks.pop(subdir, None)
if callback:
callback.doUnhook(self)
return callback
def __str__(self):
return 'server at %s %i' % self.server_address[0:2]
class TestSupyHTTPServer(RealSupyHTTPServer):
def __init__(self, *args, **kwargs):
self.callbacks = {}
self.server_address = ("0.0.0.0", 0)
def serve_forever(self, *args, **kwargs):
pass
def shutdown(self, *args, **kwargs):
pass
if world.testing or world.documenting:
SupyHTTPServer = TestSupyHTTPServer
else:
SupyHTTPServer = RealSupyHTTPServer
http_servers = []
def startServer():
"""Starts the HTTP server. Shouldn't be called from other modules.
The callback should be an instance of a child of SupyHTTPServerCallback."""
global http_servers
addresses4 = [(4, (x, configGroup.port()))
for x in configGroup.hosts4() if x != '']
addresses6 = [(6, (x, configGroup.port()))
for x in configGroup.hosts6() if x != '']
http_servers = []
for protocol, address in (addresses4 + addresses6):
try:
server = SupyHTTPServer(address, protocol, SupyHTTPRequestHandler)
except OSError as e:
log.error(
'Failed to start HTTP server with protocol %s at address: %s',
protocol, address, e)
if e.args[0] == 98:
log.error(
'This means the port (and address) is already in use by an '
'other process. Either find the process using the port '
'and stop it, or change the port configured in '
'supybot.servers.http.port.')
continue
except:
log.exception(
"Failed to start HTTP server with protocol %s at address",
protocol, address)
continue
Thread(target=server.serve_forever, name='HTTP Server').start()
http_servers.append(server)
log.info('Starting HTTP server: %s' % str(server))
def stopServer():
"""Stops the HTTP server. Should be run only from this module or from
when the bot is dying (ie. from supybot.world)"""
global http_servers
for server in http_servers:
log.info('Stopping HTTP server: %s' % str(server))
server.shutdown()
server = None
if configGroup.keepAlive():
startServer()
def hook(subdir, callback):
"""Sets a callback for a given subdir."""
if not http_servers:
startServer()
assert isinstance(http_servers, list)
for server in http_servers:
server.hook(subdir, callback)
def unhook(subdir):
"""Unsets the callback assigned to the given subdir, and return it."""
global http_servers
assert isinstance(http_servers, list)
for server in list(http_servers):
server.unhook(subdir)
if len(set(server.callbacks) - set(DEFAULT_CALLBACKS)) <= 0 \
and not configGroup.keepAlive():
server.shutdown()
http_servers.remove(server)
|
PypiClean
|
/criteo_api_retailmedia_sdk-2023.7.0.230831-py3-none-any.whl/criteo_api_retailmedia_v2023_07/model/common_line_item_response.py
|
import re # noqa: F401
import sys # noqa: F401
from criteo_api_retailmedia_v2023_07.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from criteo_api_retailmedia_v2023_07.exceptions import ApiAttributeError
def lazy_import():
from criteo_api_retailmedia_v2023_07.model.problem_details import ProblemDetails
from criteo_api_retailmedia_v2023_07.model.resource_of_common_line_item import ResourceOfCommonLineItem
globals()['ProblemDetails'] = ProblemDetails
globals()['ResourceOfCommonLineItem'] = ResourceOfCommonLineItem
class CommonLineItemResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'data': (ResourceOfCommonLineItem,), # noqa: E501
'warnings': ([ProblemDetails],), # noqa: E501
'errors': ([ProblemDetails],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'data': 'data', # noqa: E501
'warnings': 'warnings', # noqa: E501
'errors': 'errors', # noqa: E501
}
read_only_vars = {
'warnings', # noqa: E501
'errors', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""CommonLineItemResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
data (ResourceOfCommonLineItem): [optional] # noqa: E501
warnings ([ProblemDetails]): [optional] # noqa: E501
errors ([ProblemDetails]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""CommonLineItemResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
data (ResourceOfCommonLineItem): [optional] # noqa: E501
warnings ([ProblemDetails]): [optional] # noqa: E501
errors ([ProblemDetails]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/scs_core-2.8.9-py3-none-any.whl/scs_core/interface/interface_conf.py
|
from collections import OrderedDict
from scs_core.data.json import PersistentJSONable
# --------------------------------------------------------------------------------------------------------------------
class InterfaceConf(PersistentJSONable):
"""
classdocs
"""
__FILENAME = "interface_conf.json"
@classmethod
def persistence_location(cls):
return cls.conf_dir(), cls.__FILENAME
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct_from_jdict(cls, jdict, skeleton=False):
if not jdict:
return None
model = jdict.get('model')
return cls(model)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, model):
"""
Constructor
"""
super().__init__()
self.__model = model # string
def __eq__(self, other):
try:
return self.model == other.model
except (TypeError, AttributeError):
return False
# ----------------------------------------------------------------------------------------------------------------
# noinspection PyMethodMayBeStatic
def interface(self):
return None
# ----------------------------------------------------------------------------------------------------------------
@property
def model(self):
return self.__model
# ----------------------------------------------------------------------------------------------------------------
def as_json(self):
jdict = OrderedDict()
jdict['model'] = self.model
return jdict
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "InterfaceConf(core):{model:%s}" % self.model
|
PypiClean
|
/iceberg_penguins.search-0.3.1.tar.gz/iceberg_penguins.search-0.3.1/src/iceberg_penguins/search/data/png_dataset.py
|
import torch
import os.path
from scipy import misc
from .base_dataset import BaseDataset, get_transform
from .image_folder import make_dataset
from PIL import Image
from PIL import ImageFilter
from pdb import set_trace as st
import random
import numpy as np
import time
class PngDataset(BaseDataset):
def initialize(self, opt):
self.opt = opt
self.root = opt.dataroot
self.GTroot = opt.dataroot
self.A_dir = opt.dataroot + '/A/'
self.B_dir = opt.dataroot + '/B/'
self.imname = []
self.imname_pos = []
for root,_,fnames in sorted(os.walk(self.A_dir)):
for fname in fnames:
if fname.endswith('.png'):
path = os.path.join(root,fname)
self.imname.append(fname)
for root,_,fnames in sorted(os.walk(self.B_dir)):
for fname in fnames:
if fname.endswith('.png'):
path = os.path.join(root,fname)
self.imname_pos.append(fname)
self.nim = len(self.imname)
def __len__(self):
return 5000
#return self.nim
def name(self):
return 'PNGDATASET'
def getpatch(self,idx,i,j):
A_img = self.tifimg[:,i*256:(i+1)*256,j*256:(j+1)*256]
B_img = self.GTmask[:,i*256:(i+1)*256,j*256:(j+1)*256]
A_img = torch.from_numpy(A_img).float().div(255)
B_img = torch.from_numpy(B_img).float().div(255)
A_img = torch.unsqueeze(A_img,0)
B_img = torch.unsqueeze(B_img,0)
return {'A': A_img, 'B': B_img,'imname':self.imname[0]}
def get_number_of_patches(self,idx):
return self.nx,self.ny
def __getitem__(self,index):
if self.opt.randomSize:
self.opt.loadSize = np.random.randint(257,300,1)[0]
if random.random() < self.opt.biased_sampling:
r_index = index % len(self.imname_pos)
imname = self.imname_pos[r_index]
A_img = Image.open(os.path.join(self.A_dir,imname))
B_img = Image.open(os.path.join(self.B_dir,imname))
else:
r_index = index % len(self.imname)
imname = self.imname[r_index]
A_img = Image.open(os.path.join(self.A_dir,imname))
if imname in self.imname_pos:
B_img = Image.open(os.path.join(self.B_dir,imname))
else:
t = A_img.size
B_img = Image.fromarray(np.zeros((A_img.size[0],A_img.size[1])))
ow = A_img.size[0]
oh = A_img.size[1]
w = np.float(A_img.size[0])
h = np.float(A_img.size[1])
if self.opt.keep_ratio:
if w>h:
ratio = np.float(self.opt.loadSize)/np.float(h)
neww = np.int(w*ratio)
newh = self.opt.loadSize
else:
ratio = np.float(self.opt.loadSize)/np.float(w)
neww = self.opt.loadSize
newh = np.int(h*ratio)
else:
neww = self.opt.loadSize
newh = self.opt.loadSize
if self.opt.tsize:
neww = self.opt.tw
newh = self.opt.th
t =[Image.FLIP_LEFT_RIGHT,Image.ROTATE_90]
for i in range(0,2):
c = np.random.randint(0,3,1,dtype=np.int)[0]
if c==2: continue
A_img=A_img.transpose(t[c])
B_img=B_img.transpose(t[c])
degree=np.random.randint(-10,10,1)[0]
A_img=A_img.rotate(degree)
B_img=B_img.rotate(degree)
A_img = A_img.resize((neww, newh),Image.NEAREST)
B_img = B_img.resize((neww, newh),Image.NEAREST)
A_img = np.asarray(A_img)
B_img = np.asarray(B_img)
A_img = A_img[:,:,0:3]
B_img.setflags(write=1)
B_img[B_img==2] = 255
B_img[B_img!=255] = 0
A_img = np.transpose(A_img,(2,0,1))
B_img = np.expand_dims(B_img, axis=0)
z,w,h = A_img.shape
w_offset = random.randint(0,max(0,w-self.opt.fineSize-1))
h_offset = random.randint(0,max(0,h-self.opt.fineSize-1))
A_img = A_img[:, w_offset:w_offset + self.opt.fineSize, h_offset:h_offset + self.opt.fineSize]
B_img = B_img[:,w_offset:w_offset + self.opt.fineSize, h_offset:h_offset + self.opt.fineSize]
A_img = torch.from_numpy(A_img).float().div(255)
B_img = torch.from_numpy(B_img).float().div(255)
A_img = A_img - 0.5
A_img = A_img * 2
counts = torch.mean(B_img.view(-1,1))
B_img = B_img - 0.5
B_img = B_img * 2
count_ids = 1
return {'A': A_img, 'B': B_img,'imname':imname,'counts':counts, 'count_ids':count_ids}
|
PypiClean
|
/lbrlabs_pulumi_harness-0.0.6a1688229249.tar.gz/lbrlabs_pulumi_harness-0.0.6a1688229249/lbrlabs_pulumi_harness/platform/sumologic_connector.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SumologicConnectorArgs', 'SumologicConnector']
@pulumi.input_type
class SumologicConnectorArgs:
def __init__(__self__, *,
access_id_ref: pulumi.Input[str],
access_key_ref: pulumi.Input[str],
identifier: pulumi.Input[str],
url: pulumi.Input[str],
delegate_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a SumologicConnector resource.
:param pulumi.Input[str] access_id_ref: Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[str] access_key_ref: Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[str] identifier: Unique identifier of the resource.
:param pulumi.Input[str] url: URL of the SumoLogic server.
:param pulumi.Input[Sequence[pulumi.Input[str]]] delegate_selectors: Tags to filter delegates for connection.
:param pulumi.Input[str] description: Description of the resource.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] org_id: Unique identifier of the organization.
:param pulumi.Input[str] project_id: Unique identifier of the project.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags to associate with the resource.
"""
pulumi.set(__self__, "access_id_ref", access_id_ref)
pulumi.set(__self__, "access_key_ref", access_key_ref)
pulumi.set(__self__, "identifier", identifier)
pulumi.set(__self__, "url", url)
if delegate_selectors is not None:
pulumi.set(__self__, "delegate_selectors", delegate_selectors)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if org_id is not None:
pulumi.set(__self__, "org_id", org_id)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="accessIdRef")
def access_id_ref(self) -> pulumi.Input[str]:
"""
Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
"""
return pulumi.get(self, "access_id_ref")
@access_id_ref.setter
def access_id_ref(self, value: pulumi.Input[str]):
pulumi.set(self, "access_id_ref", value)
@property
@pulumi.getter(name="accessKeyRef")
def access_key_ref(self) -> pulumi.Input[str]:
"""
Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
"""
return pulumi.get(self, "access_key_ref")
@access_key_ref.setter
def access_key_ref(self, value: pulumi.Input[str]):
pulumi.set(self, "access_key_ref", value)
@property
@pulumi.getter
def identifier(self) -> pulumi.Input[str]:
"""
Unique identifier of the resource.
"""
return pulumi.get(self, "identifier")
@identifier.setter
def identifier(self, value: pulumi.Input[str]):
pulumi.set(self, "identifier", value)
@property
@pulumi.getter
def url(self) -> pulumi.Input[str]:
"""
URL of the SumoLogic server.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: pulumi.Input[str]):
pulumi.set(self, "url", value)
@property
@pulumi.getter(name="delegateSelectors")
def delegate_selectors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags to filter delegates for connection.
"""
return pulumi.get(self, "delegate_selectors")
@delegate_selectors.setter
def delegate_selectors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "delegate_selectors", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the organization.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the project.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags to associate with the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _SumologicConnectorState:
def __init__(__self__, *,
access_id_ref: Optional[pulumi.Input[str]] = None,
access_key_ref: Optional[pulumi.Input[str]] = None,
delegate_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering SumologicConnector resources.
:param pulumi.Input[str] access_id_ref: Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[str] access_key_ref: Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[Sequence[pulumi.Input[str]]] delegate_selectors: Tags to filter delegates for connection.
:param pulumi.Input[str] description: Description of the resource.
:param pulumi.Input[str] identifier: Unique identifier of the resource.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] org_id: Unique identifier of the organization.
:param pulumi.Input[str] project_id: Unique identifier of the project.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags to associate with the resource.
:param pulumi.Input[str] url: URL of the SumoLogic server.
"""
if access_id_ref is not None:
pulumi.set(__self__, "access_id_ref", access_id_ref)
if access_key_ref is not None:
pulumi.set(__self__, "access_key_ref", access_key_ref)
if delegate_selectors is not None:
pulumi.set(__self__, "delegate_selectors", delegate_selectors)
if description is not None:
pulumi.set(__self__, "description", description)
if identifier is not None:
pulumi.set(__self__, "identifier", identifier)
if name is not None:
pulumi.set(__self__, "name", name)
if org_id is not None:
pulumi.set(__self__, "org_id", org_id)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter(name="accessIdRef")
def access_id_ref(self) -> Optional[pulumi.Input[str]]:
"""
Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
"""
return pulumi.get(self, "access_id_ref")
@access_id_ref.setter
def access_id_ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_id_ref", value)
@property
@pulumi.getter(name="accessKeyRef")
def access_key_ref(self) -> Optional[pulumi.Input[str]]:
"""
Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
"""
return pulumi.get(self, "access_key_ref")
@access_key_ref.setter
def access_key_ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_key_ref", value)
@property
@pulumi.getter(name="delegateSelectors")
def delegate_selectors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags to filter delegates for connection.
"""
return pulumi.get(self, "delegate_selectors")
@delegate_selectors.setter
def delegate_selectors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "delegate_selectors", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def identifier(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the resource.
"""
return pulumi.get(self, "identifier")
@identifier.setter
def identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "identifier", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the organization.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier of the project.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags to associate with the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
URL of the SumoLogic server.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class SumologicConnector(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_id_ref: Optional[pulumi.Input[str]] = None,
access_key_ref: Optional[pulumi.Input[str]] = None,
delegate_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Resource for creating a Sumologic connector.
## Example Usage
```python
import pulumi
import lbrlabs_pulumi_harness as harness
test = harness.platform.SumologicConnector("test",
access_id_ref="account.secret_id",
access_key_ref="account.secret_id",
delegate_selectors=["harness-delegate"],
description="test",
identifier="identifier",
tags=["foo:bar"],
url="https://api.us2.sumologic.com/")
```
## Import
Import account level sumologic connector
```sh
$ pulumi import harness:platform/sumologicConnector:SumologicConnector example <connector_id>
```
Import org level sumologic connector
```sh
$ pulumi import harness:platform/sumologicConnector:SumologicConnector example <ord_id>/<connector_id>
```
Import project level sumologic connector
```sh
$ pulumi import harness:platform/sumologicConnector:SumologicConnector example <org_id>/<project_id>/<connector_id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_id_ref: Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[str] access_key_ref: Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[Sequence[pulumi.Input[str]]] delegate_selectors: Tags to filter delegates for connection.
:param pulumi.Input[str] description: Description of the resource.
:param pulumi.Input[str] identifier: Unique identifier of the resource.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] org_id: Unique identifier of the organization.
:param pulumi.Input[str] project_id: Unique identifier of the project.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags to associate with the resource.
:param pulumi.Input[str] url: URL of the SumoLogic server.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SumologicConnectorArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Resource for creating a Sumologic connector.
## Example Usage
```python
import pulumi
import lbrlabs_pulumi_harness as harness
test = harness.platform.SumologicConnector("test",
access_id_ref="account.secret_id",
access_key_ref="account.secret_id",
delegate_selectors=["harness-delegate"],
description="test",
identifier="identifier",
tags=["foo:bar"],
url="https://api.us2.sumologic.com/")
```
## Import
Import account level sumologic connector
```sh
$ pulumi import harness:platform/sumologicConnector:SumologicConnector example <connector_id>
```
Import org level sumologic connector
```sh
$ pulumi import harness:platform/sumologicConnector:SumologicConnector example <ord_id>/<connector_id>
```
Import project level sumologic connector
```sh
$ pulumi import harness:platform/sumologicConnector:SumologicConnector example <org_id>/<project_id>/<connector_id>
```
:param str resource_name: The name of the resource.
:param SumologicConnectorArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SumologicConnectorArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_id_ref: Optional[pulumi.Input[str]] = None,
access_key_ref: Optional[pulumi.Input[str]] = None,
delegate_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SumologicConnectorArgs.__new__(SumologicConnectorArgs)
if access_id_ref is None and not opts.urn:
raise TypeError("Missing required property 'access_id_ref'")
__props__.__dict__["access_id_ref"] = access_id_ref
if access_key_ref is None and not opts.urn:
raise TypeError("Missing required property 'access_key_ref'")
__props__.__dict__["access_key_ref"] = access_key_ref
__props__.__dict__["delegate_selectors"] = delegate_selectors
__props__.__dict__["description"] = description
if identifier is None and not opts.urn:
raise TypeError("Missing required property 'identifier'")
__props__.__dict__["identifier"] = identifier
__props__.__dict__["name"] = name
__props__.__dict__["org_id"] = org_id
__props__.__dict__["project_id"] = project_id
__props__.__dict__["tags"] = tags
if url is None and not opts.urn:
raise TypeError("Missing required property 'url'")
__props__.__dict__["url"] = url
super(SumologicConnector, __self__).__init__(
'harness:platform/sumologicConnector:SumologicConnector',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_id_ref: Optional[pulumi.Input[str]] = None,
access_key_ref: Optional[pulumi.Input[str]] = None,
delegate_selectors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
identifier: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'SumologicConnector':
"""
Get an existing SumologicConnector resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_id_ref: Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[str] access_key_ref: Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
:param pulumi.Input[Sequence[pulumi.Input[str]]] delegate_selectors: Tags to filter delegates for connection.
:param pulumi.Input[str] description: Description of the resource.
:param pulumi.Input[str] identifier: Unique identifier of the resource.
:param pulumi.Input[str] name: Name of the resource.
:param pulumi.Input[str] org_id: Unique identifier of the organization.
:param pulumi.Input[str] project_id: Unique identifier of the project.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags to associate with the resource.
:param pulumi.Input[str] url: URL of the SumoLogic server.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SumologicConnectorState.__new__(_SumologicConnectorState)
__props__.__dict__["access_id_ref"] = access_id_ref
__props__.__dict__["access_key_ref"] = access_key_ref
__props__.__dict__["delegate_selectors"] = delegate_selectors
__props__.__dict__["description"] = description
__props__.__dict__["identifier"] = identifier
__props__.__dict__["name"] = name
__props__.__dict__["org_id"] = org_id
__props__.__dict__["project_id"] = project_id
__props__.__dict__["tags"] = tags
__props__.__dict__["url"] = url
return SumologicConnector(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessIdRef")
def access_id_ref(self) -> pulumi.Output[str]:
"""
Reference to the Harness secret containing the access id. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
"""
return pulumi.get(self, "access_id_ref")
@property
@pulumi.getter(name="accessKeyRef")
def access_key_ref(self) -> pulumi.Output[str]:
"""
Reference to the Harness secret containing the access key. To reference a secret at the organization scope, prefix 'org' to the expression: org.{identifier}. To reference a secret at the account scope, prefix 'account` to the expression: account.{identifier}.
"""
return pulumi.get(self, "access_key_ref")
@property
@pulumi.getter(name="delegateSelectors")
def delegate_selectors(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Tags to filter delegates for connection.
"""
return pulumi.get(self, "delegate_selectors")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def identifier(self) -> pulumi.Output[str]:
"""
Unique identifier of the resource.
"""
return pulumi.get(self, "identifier")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="orgId")
def org_id(self) -> pulumi.Output[Optional[str]]:
"""
Unique identifier of the organization.
"""
return pulumi.get(self, "org_id")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[Optional[str]]:
"""
Unique identifier of the project.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Tags to associate with the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
"""
URL of the SumoLogic server.
"""
return pulumi.get(self, "url")
|
PypiClean
|
/tgapp-turbopress-0.0.5.tar.gz/tgapp-turbopress-0.0.5/turbopress/public/ckeditor/lang/de.js
|
/*
Copyright (c) 2003-2011, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
CKEDITOR.lang.de={dir:'ltr',editorTitle:'WYSIWYG-Editor, %1, drücken Sie ALT 0 für Hilfe.',toolbars:'Editor Symbolleisten',editor:'WYSIWYG-Editor',source:'Quellcode',newPage:'Neue Seite',save:'Speichern',preview:'Vorschau',cut:'Ausschneiden',copy:'Kopieren',paste:'Einfügen',print:'Drucken',underline:'Unterstrichen',bold:'Fett',italic:'Kursiv',selectAll:'Alles auswählen',removeFormat:'Formatierungen entfernen',strike:'Durchgestrichen',subscript:'Tiefgestellt',superscript:'Hochgestellt',horizontalrule:'Horizontale Linie einfügen',pagebreak:'Seitenumbruch einfügen',pagebreakAlt:'Seitenumbruch einfügen',unlink:'Link entfernen',undo:'Rückgängig',redo:'Wiederherstellen',common:{browseServer:'Server durchsuchen',url:'URL',protocol:'Protokoll',upload:'Hochladen',uploadSubmit:'Zum Server senden',image:'Bild',flash:'Flash',form:'Formular',checkbox:'Checkbox',radio:'Radiobutton',textField:'Textfeld einzeilig',textarea:'Textfeld mehrzeilig',hiddenField:'Verstecktes Feld',button:'Klickbutton',select:'Auswahlfeld',imageButton:'Bildbutton',notSet:'<nichts>',id:'ID',name:'Name',langDir:'Schreibrichtung',langDirLtr:'Links nach Rechts (LTR)',langDirRtl:'Rechts nach Links (RTL)',langCode:'Sprachenkürzel',longDescr:'Langform URL',cssClass:'Stylesheet Klasse',advisoryTitle:'Titel Beschreibung',cssStyle:'Style',ok:'OK',cancel:'Abbrechen',close:'Schließen',preview:'Vorschau',generalTab:'Allgemein',advancedTab:'Erweitert',validateNumberFailed:'Dieser Wert ist keine Nummer.',confirmNewPage:'Alle nicht gespeicherten Änderungen gehen verlohren. Sind Sie sicher die neue Seite zu laden?',confirmCancel:'Einige Optionen wurden geändert. Wollen Sie den Dialog dennoch schließen?',options:'Optionen',target:'Zielseite',targetNew:'Neues Fenster (_blank)',targetTop:'Oberstes Fenster (_top)',targetSelf:'Gleiches Fenster (_self)',targetParent:'Oberes Fenster (_parent)',langDirLTR:'Links nach Rechts (LNR)',langDirRTL:'Rechts nach Links (RNL)',styles:'Style',cssClasses:'Stylesheet Klasse',width:'Breite',height:'Höhe',align:'Ausrichtung',alignLeft:'Links',alignRight:'Rechts',alignCenter:'Zentriert',alignTop:'Oben',alignMiddle:'Mitte',alignBottom:'Unten',invalidHeight:'Höhe muss eine Zahl sein.',invalidWidth:'Breite muss eine Zahl sein.',invalidCssLength:'Wert spezifiziert für "%1" Feld muss ein positiver numerischer Wert sein mit oder ohne korrekte CSS Messeinheit (px, %, in, cm, mm, em, ex, pt oder pc).',invalidHtmlLength:'Wert spezifiziert für "%1" Feld muss ein positiver numerischer Wert sein mit oder ohne korrekte HTML Messeinheit (px oder %).',invalidInlineStyle:'Wert spezifiziert für inline Stilart muss enthalten ein oder mehr Tupels mit dem Format "Name : Wert" getrennt mit Semikolons.',cssLengthTooltip:'Gebe eine Zahl ein für ein Wert in pixels oder eine Zahl mit einer korrekten CSS Messeinheit (px, %, in, cm, mm, em, ex, pt oder pc).',unavailable:'%1<span class="cke_accessibility">, nicht verfügbar</span>'},contextmenu:{options:'Kontextmenü Optionen'},specialChar:{toolbar:'Sonderzeichen einfügen/editieren',title:'Sonderzeichen auswählen',options:'Sonderzeichen Optionen'},link:{toolbar:'Link einfügen/editieren',other:'<andere>',menu:'Link editieren',title:'Link',info:'Link-Info',target:'Zielseite',upload:'Hochladen',advanced:'Erweitert',type:'Link-Typ',toUrl:'URL',toAnchor:'Anker in dieser Seite',toEmail:'E-Mail',targetFrame:'<Frame>',targetPopup:'<Pop-up Fenster>',targetFrameName:'Ziel-Fenster-Name',targetPopupName:'Pop-up Fenster-Name',popupFeatures:'Pop-up Fenster-Eigenschaften',popupResizable:'Größe änderbar',popupStatusBar:'Statusleiste',popupLocationBar:'Adress-Leiste',popupToolbar:'Symbolleiste',popupMenuBar:'Menü-Leiste',popupFullScreen:'Vollbild (IE)',popupScrollBars:'Rollbalken',popupDependent:'Abhängig (Netscape)',popupLeft:'Linke Position',popupTop:'Obere Position',id:'Id',langDir:'Schreibrichtung',langDirLTR:'Links nach Rechts (LTR)',langDirRTL:'Rechts nach Links (RTL)',acccessKey:'Zugriffstaste',name:'Name',langCode:'Sprachenkürzel',tabIndex:'Tab-Index',advisoryTitle:'Titel Beschreibung',advisoryContentType:'Inhaltstyp',cssClasses:'Stylesheet Klasse',charset:'Ziel-Zeichensatz',styles:'Style',rel:'Beziehung',selectAnchor:'Anker auswählen',anchorName:'nach Anker Name',anchorId:'nach Element Id',emailAddress:'E-Mail Addresse',emailSubject:'Betreffzeile',emailBody:'Nachrichtentext',noAnchors:'(keine Anker im Dokument vorhanden)',noUrl:'Bitte geben Sie die Link-URL an',noEmail:'Bitte geben Sie e-Mail Adresse an'},anchor:{toolbar:'Anker einfügen/editieren',menu:'Anker-Eigenschaften',title:'Anker-Eigenschaften',name:'Anker Name',errorName:'Bitte geben Sie den Namen des Ankers ein',remove:'Anker entfernen'},list:{numberedTitle:'Nummerierte Listen-Eigenschaften',bulletedTitle:'Listen-Eigenschaften',type:'Typ',start:'Start',validateStartNumber:'List Startnummer muss eine ganze Zahl sein.',circle:'Ring',disc:'Kreis',square:'Quadrat',none:'Keine',notset:'<nicht gesetzt>',armenian:'Armenisch Nummerierung',georgian:'Georgisch Nummerierung (an, ban, gan, etc.)',lowerRoman:'Klein römisch (i, ii, iii, iv, v, etc.)',upperRoman:'Groß römisch (I, II, III, IV, V, etc.)',lowerAlpha:'Klein alpha (a, b, c, d, e, etc.)',upperAlpha:'Groß alpha (A, B, C, D, E, etc.)',lowerGreek:'Klein griechisch (alpha, beta, gamma, etc.)',decimal:'Dezimal (1, 2, 3, etc.)',decimalLeadingZero:'Dezimal mit führende Null (01, 02, 03, etc.)'},findAndReplace:{title:'Suchen und Ersetzen',find:'Suchen',replace:'Ersetzen',findWhat:'Suche nach:',replaceWith:'Ersetze mit:',notFoundMsg:'Der gesuchte Text wurde nicht gefunden.',findOptions:'Suchoptionen',matchCase:'Groß-Kleinschreibung beachten',matchWord:'Nur ganze Worte suchen',matchCyclic:'Zyklische Suche',replaceAll:'Alle ersetzen',replaceSuccessMsg:'%1 vorkommen ersetzt.'},table:{toolbar:'Tabelle',title:'Tabellen-Eigenschaften',menu:'Tabellen-Eigenschaften',deleteTable:'Tabelle löschen',rows:'Zeile',columns:'Spalte',border:'Rahmen',widthPx:'Pixel',widthPc:'%',widthUnit:'Breite Einheit',cellSpace:'Zellenabstand außen',cellPad:'Zellenabstand innen',caption:'Überschrift',summary:'Inhaltsübersicht',headers:'Kopfzeile',headersNone:'Keine',headersColumn:'Erste Spalte',headersRow:'Erste Zeile',headersBoth:'Beide',invalidRows:'Die Anzahl der Zeilen muß größer als 0 sein.',invalidCols:'Die Anzahl der Spalten muß größer als 0 sein..',invalidBorder:'Die Rahmenbreite muß eine Zahl sein.',invalidWidth:'Die Tabellenbreite muss eine Zahl sein.',invalidHeight:'Die Tabellenbreite muß eine Zahl sein.',invalidCellSpacing:'Der Zellenabstand außen muß eine positive Zahl sein.',invalidCellPadding:'Der Zellenabstand innen muß eine positive Zahl sein.',cell:{menu:'Zelle',insertBefore:'Zelle davor einfügen',insertAfter:'Zelle danach einfügen',deleteCell:'Zelle löschen',merge:'Zellen verbinden',mergeRight:'Nach rechts verbinden',mergeDown:'Nach unten verbinden',splitHorizontal:'Zelle horizontal teilen',splitVertical:'Zelle vertikal teilen',title:'Zellen-Eigenschaften',cellType:'Zellart',rowSpan:'Anzahl Zeilen verbinden',colSpan:'Anzahl Spalten verbinden',wordWrap:'Zeilenumbruch',hAlign:'Horizontale Ausrichtung',vAlign:'Vertikale Ausrichtung',alignBaseline:'Grundlinie',bgColor:'Hintergrundfarbe',borderColor:'Rahmenfarbe',data:'Daten',header:'Überschrift',yes:'Ja',no:'Nein',invalidWidth:'Zellenbreite muß eine Zahl sein.',invalidHeight:'Zellenhöhe muß eine Zahl sein.',invalidRowSpan:'"Anzahl Zeilen verbinden" muss eine Ganzzahl sein.',invalidColSpan:'"Anzahl Spalten verbinden" muss eine Ganzzahl sein.',chooseColor:'Wählen'},row:{menu:'Zeile',insertBefore:'Zeile oberhalb einfügen',insertAfter:'Zeile unterhalb einfügen',deleteRow:'Zeile entfernen'},column:{menu:'Spalte',insertBefore:'Spalte links davor einfügen',insertAfter:'Spalte rechts danach einfügen',deleteColumn:'Spalte löschen'}},button:{title:'Button-Eigenschaften',text:'Text (Wert)',type:'Typ',typeBtn:'Button',typeSbm:'Absenden',typeRst:'Zurücksetzen'},checkboxAndRadio:{checkboxTitle:'Checkbox-Eigenschaften',radioTitle:'Optionsfeld-Eigenschaften',value:'Wert',selected:'ausgewählt'},form:{title:'Formular-Eigenschaften',menu:'Formular-Eigenschaften',action:'Action',method:'Method',encoding:'Zeichenkodierung'},select:{title:'Auswahlfeld-Eigenschaften',selectInfo:'Info',opAvail:'Mögliche Optionen',value:'Wert',size:'Größe',lines:'Linien',chkMulti:'Erlaube Mehrfachauswahl',opText:'Text',opValue:'Wert',btnAdd:'Hinzufügen',btnModify:'Ändern',btnUp:'Hoch',btnDown:'Runter',btnSetValue:'Setze als Standardwert',btnDelete:'Entfernen'},textarea:{title:'Textfeld (mehrzeilig) Eigenschaften',cols:'Spalten',rows:'Reihen'},textfield:{title:'Textfeld (einzeilig) Eigenschaften',name:'Name',value:'Wert',charWidth:'Zeichenbreite',maxChars:'Max. Zeichen',type:'Typ',typeText:'Text',typePass:'Passwort'},hidden:{title:'Verstecktes Feld-Eigenschaften',name:'Name',value:'Wert'},image:{title:'Bild-Eigenschaften',titleButton:'Bildbutton-Eigenschaften',menu:'Bild-Eigenschaften',infoTab:'Bild-Info',btnUpload:'Zum Server senden',upload:'Hochladen',alt:'Alternativer Text',lockRatio:'Größenverhältnis beibehalten',resetSize:'Größe zurücksetzen',border:'Rahmen',hSpace:'Horizontal-Abstand',vSpace:'Vertikal-Abstand',alertUrl:'Bitte geben Sie die Bild-URL an',linkTab:'Link',button2Img:'Möchten Sie den gewählten Bild-Button in ein einfaches Bild umwandeln?',img2Button:'Möchten Sie das gewählten Bild in einen Bild-Button umwandeln?',urlMissing:'Imagequelle URL fehlt.',validateBorder:'Rahmen muß eine ganze Zahl sein.',validateHSpace:'Horizontal-Abstand muß eine ganze Zahl sein.',validateVSpace:'Vertikal-Abstand muß eine ganze Zahl sein.'},flash:{properties:'Flash-Eigenschaften',propertiesTab:'Eigenschaften',title:'Flash-Eigenschaften',chkPlay:'Automatisch Abspielen',chkLoop:'Endlosschleife',chkMenu:'Flash-Menü aktivieren',chkFull:'Vollbildmodus erlauben',scale:'Skalierung',scaleAll:'Alles anzeigen',scaleNoBorder:'Ohne Rand',scaleFit:'Passgenau',access:'Skript Zugang',accessAlways:'Immer',accessSameDomain:'Gleiche Domain',accessNever:'Nie',alignAbsBottom:'Abs Unten',alignAbsMiddle:'Abs Mitte',alignBaseline:'Baseline',alignTextTop:'Text Oben',quality:'Qualität',qualityBest:'Beste',qualityHigh:'Hoch',qualityAutoHigh:'Auto Hoch',qualityMedium:'Medium',qualityAutoLow:'Auto Niedrig',qualityLow:'Niedrig',windowModeWindow:'Fenster',windowModeOpaque:'Deckend',windowModeTransparent:'Transparent',windowMode:'Fenster Modus',flashvars:'Variablen für Flash',bgcolor:'Hintergrundfarbe',hSpace:'Horizontal-Abstand',vSpace:'Vertikal-Abstand',validateSrc:'Bitte geben Sie die Link-URL an',validateHSpace:'HSpace muss eine Zahl sein.',validateVSpace:'VSpace muss eine Zahl sein.'},spellCheck:{toolbar:'Rechtschreibprüfung',title:'Rechtschreibprüfung',notAvailable:'Entschuldigung, aber dieser Dienst steht im Moment nicht zur Verfügung.',errorLoading:'Fehler beim laden des Dienstanbieters: %s.',notInDic:'Nicht im Wörterbuch',changeTo:'Ändern in',btnIgnore:'Ignorieren',btnIgnoreAll:'Alle Ignorieren',btnReplace:'Ersetzen',btnReplaceAll:'Alle Ersetzen',btnUndo:'Rückgängig',noSuggestions:' - keine Vorschläge - ',progress:'Rechtschreibprüfung läuft...',noMispell:'Rechtschreibprüfung abgeschlossen - keine Fehler gefunden',noChanges:'Rechtschreibprüfung abgeschlossen - keine Worte geändert',oneChange:'Rechtschreibprüfung abgeschlossen - ein Wort geändert',manyChanges:'Rechtschreibprüfung abgeschlossen - %1 Wörter geändert',ieSpellDownload:'Rechtschreibprüfung nicht installiert. Möchten Sie sie jetzt herunterladen?'},smiley:{toolbar:'Smiley',title:'Smiley auswählen',options:'Smiley Optionen'},elementsPath:{eleLabel:'Elements Pfad',eleTitle:'%1 Element'},numberedlist:'Nummerierte Liste',bulletedlist:'Liste',indent:'Einzug erhöhen',outdent:'Einzug verringern',justify:{left:'Linksbündig',center:'Zentriert',right:'Rechtsbündig',block:'Blocksatz'},blockquote:'Zitatblock',clipboard:{title:'Einfügen',cutError:'Die Sicherheitseinstellungen Ihres Browsers lassen es nicht zu, den Text automatisch auszuschneiden. Bitte benutzen Sie die System-Zwischenablage über STRG-X (ausschneiden) und STRG-V (einfügen).',copyError:'Die Sicherheitseinstellungen Ihres Browsers lassen es nicht zu, den Text automatisch kopieren. Bitte benutzen Sie die System-Zwischenablage über STRG-C (kopieren).',pasteMsg:'Bitte fügen Sie den Text in der folgenden Box über die Tastatur (mit <STRONG>Strg+V</STRONG>) ein und bestätigen Sie mit <STRONG>OK</STRONG>.',securityMsg:'Aufgrund von Sicherheitsbeschränkungen Ihres Browsers kann der Editor nicht direkt auf die Zwischenablage zugreifen. Bitte fügen Sie den Inhalt erneut in diesem Fenster ein.',pasteArea:'Einfügebereich'},pastefromword:{confirmCleanup:'Der Text, den Sie einfügen möchten, scheint aus MS-Word kopiert zu sein. Möchten Sie ihn zuvor bereinigen lassen?',toolbar:'Aus MS-Word einfügen',title:'Aus MS-Word einfügen',error:'Aufgrund eines internen Fehlers war es nicht möglich die eingefügten Daten zu bereinigen'},pasteText:{button:'Als Text einfügen',title:'Als Text einfügen'},templates:{button:'Vorlagen',title:'Vorlagen',options:'Vorlagen Optionen',insertOption:'Aktuellen Inhalt ersetzen',selectPromptMsg:'Klicken Sie auf eine Vorlage, um sie im Editor zu öffnen (der aktuelle Inhalt wird dabei gelöscht!):',emptyListMsg:'(keine Vorlagen definiert)'},showBlocks:'Blöcke anzeigen',stylesCombo:{label:'Stil',panelTitle:'Formatierungenstil',panelTitle1:'Block Stilart',panelTitle2:'Inline Stilart',panelTitle3:'Objekt Stilart'},format:{label:'Format',panelTitle:'Format',tag_p:'Normal',tag_pre:'Formatiert',tag_address:'Addresse',tag_h1:'Überschrift 1',tag_h2:'Überschrift 2',tag_h3:'Überschrift 3',tag_h4:'Überschrift 4',tag_h5:'Überschrift 5',tag_h6:'Überschrift 6',tag_div:'Normal (DIV)'},div:{title:'Div Container erzeugen',toolbar:'Div Container erzeugen',cssClassInputLabel:'Stylesheet Klasse',styleSelectLabel:'Style',IdInputLabel:'Id',languageCodeInputLabel:'Sprachenkürzel',inlineStyleInputLabel:'Inline Stil',advisoryTitleInputLabel:'Tooltip',langDirLabel:'Sprache Richtung',langDirLTRLabel:'Links nach Rechs (LTR)',langDirRTLLabel:'Rechs nach Links (RTL)',edit:'Div bearbeiten',remove:'Div entfernen'},iframe:{title:'IFrame-Eigenschaften',toolbar:'IFrame',noUrl:'Bitte geben Sie die IFrame-URL an',scrolling:'Rollbalken anzeigen',border:'Rahmen anzeigen'},font:{label:'Schriftart',voiceLabel:'Schriftart',panelTitle:'Schriftart'},fontSize:{label:'Größe',voiceLabel:'Schrifgröße',panelTitle:'Größe'},colorButton:{textColorTitle:'Textfarbe',bgColorTitle:'Hintergrundfarbe',panelTitle:'Farben',auto:'Automatisch',more:'Weitere Farben...'},colors:{'000':'Schwarz',800000:'Kastanienbraun','8B4513':'Braun','2F4F4F':'Dunkles Schiefergrau','008080':'Blaugrün','000080':'Navy','4B0082':'Indigo',696969:'Dunkelgrau',B22222:'Ziegelrot',A52A2A:'Braun',DAA520:'Goldgelb','006400':'Dunkelgrün','40E0D0':'Türkis','0000CD':'Medium Blau',800080:'Lila',808080:'Grau',F00:'Rot',FF8C00:'Dunkelorange',FFD700:'Gold','008000':'Grün','0FF':'Cyan','00F':'Blau',EE82EE:'Hellviolett',A9A9A9:'Dunkelgrau',FFA07A:'Helles Lachsrosa',FFA500:'Orange',FFFF00:'Gelb','00FF00':'Lime',AFEEEE:'Blaß-Türkis',ADD8E6:'Hellblau',DDA0DD:'Pflaumenblau',D3D3D3:'Hellgrau',FFF0F5:'Lavendel',FAEBD7:'Antik Weiß',FFFFE0:'Hellgelb',F0FFF0:'Honigtau',F0FFFF:'Azurblau',F0F8FF:'Alice Blau',E6E6FA:'Lavendel',FFF:'Weiß'},scayt:{title:'Rechtschreibprüfung während der Texteingabe (SCAYT)',opera_title:'Nicht von Opera unterstützt',enable:'SCAYT einschalten',disable:'SCAYT ausschalten',about:'Über SCAYT',toggle:'SCAYT umschalten',options:'Optionen',langs:'Sprachen',moreSuggestions:'Mehr Vorschläge',ignore:'Ignorieren',ignoreAll:'Alle ignorieren',addWord:'Wort hinzufügen',emptyDic:'Wörterbuchname sollte leer sein.',optionsTab:'Optionen',allCaps:'Groß geschriebenen Wörter ignorieren',ignoreDomainNames:'Domain-Namen ignorieren',mixedCase:'Wörter mit gemischte Setzkasten ignorieren',mixedWithDigits:'Wörter mit Zahlen ignorieren',languagesTab:'Sprachen',dictionariesTab:'Wörterbücher',dic_field_name:'Wörterbuchname',dic_create:'Erzeugen',dic_restore:'Wiederherstellen',dic_delete:'Löschen',dic_rename:'Umbenennen',dic_info:'Anfangs wird das Benutzerwörterbuch in einem Cookie gespeichert. Allerdings sind Cookies in der Größe begrenzt. Wenn das Benutzerwörterbuch bis zu einem Punkt wächst, wo es nicht mehr in einem Cookie gespeichert werden kann, wird das Benutzerwörterbuch auf dem Server gespeichert. Um Ihr persönliches Wörterbuch auf dem Server zu speichern, müssen Sie einen Namen für das Wörterbuch angeben. Falls Sie schon ein gespeicherte Wörterbuch haben, geben Sie bitte dessen Namen ein und klicken Sie auf die Schaltfläche Wiederherstellen.',aboutTab:'Über'},about:{title:'Über CKEditor',dlgTitle:'Über CKEditor',help:'Prüfe $1 für Hilfe.',userGuide:'CKEditor Benutzerhandbuch',moreInfo:'Für Informationen über unsere Lizenzbestimmungen besuchen sie bitte unsere Webseite:',copy:'Copyright © $1. Alle Rechte vorbehalten.'},maximize:'Maximieren',minimize:'Minimieren',fakeobjects:{anchor:'Anker',flash:'Flash Animation',iframe:'IFrame',hiddenfield:'Verstecktes Feld',unknown:'Unbekanntes Objekt'},resize:'Zum Vergrößern ziehen',colordialog:{title:'Farbe wählen',options:'Farbeoptionen',highlight:'Hervorheben',selected:'Ausgewählte Farbe',clear:'Entfernen'},toolbarCollapse:'Symbolleiste einklappen',toolbarExpand:'Symbolleiste ausklappen',toolbarGroups:{document:'Dokument',clipboard:'Zwischenablage/Rückgängig',editing:'Editieren',forms:'Formularen',basicstyles:'Grundstile',paragraph:'Absatz',links:'Links',insert:'Einfügen',styles:'Stile',colors:'Farben',tools:'Werkzeuge'},bidi:{ltr:'Leserichtung von Links nach Rechts',rtl:'Leserichtung von Rechts nach Links'},docprops:{label:'Dokument-Eigenschaften',title:'Dokument-Eigenschaften',design:'Design',meta:'Metadaten',chooseColor:'Wählen',other:'<andere>',docTitle:'Seitentitel',charset:'Zeichenkodierung',charsetOther:'Andere Zeichenkodierung',charsetASCII:'ASCII',charsetCE:'Zentraleuropäisch',charsetCT:'traditionell Chinesisch (Big5)',charsetCR:'Kyrillisch',charsetGR:'Griechisch',charsetJP:'Japanisch',charsetKR:'Koreanisch',charsetTR:'Türkisch',charsetUN:'Unicode (UTF-8)',charsetWE:'Westeuropäisch',docType:'Dokumententyp',docTypeOther:'Anderer Dokumententyp',xhtmlDec:'Beziehe XHTML Deklarationen ein',bgColor:'Hintergrundfarbe',bgImage:'Hintergrundbild URL',bgFixed:'feststehender Hintergrund',txtColor:'Textfarbe',margin:'Seitenränder',marginTop:'Oben',marginLeft:'Links',marginRight:'Rechts',marginBottom:'Unten',metaKeywords:'Schlüsselwörter (durch Komma getrennt)',metaDescription:'Dokument-Beschreibung',metaAuthor:'Autor',metaCopyright:'Copyright',previewHtml:'<p>Das ist ein <strong>Beispieltext</strong>. Du schreibst in <a href="javascript:void(0)">CKEditor</a>.</p>'}};
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.